summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/aaa_profiling/test_compiler.py4
-rw-r--r--test/base/test_tutorials.py144
-rw-r--r--test/base/test_utils.py104
-rw-r--r--test/dialect/mssql/test_compiler.py2
-rw-r--r--test/dialect/mssql/test_query.py16
-rw-r--r--test/dialect/mssql/test_reflection.py38
-rw-r--r--test/dialect/mssql/test_types.py126
-rw-r--r--test/dialect/mysql/test_compiler.py8
-rw-r--r--test/dialect/mysql/test_query.py55
-rw-r--r--test/dialect/mysql/test_reflection.py328
-rw-r--r--test/dialect/postgresql/test_compiler.py81
-rw-r--r--test/dialect/postgresql/test_query.py620
-rw-r--r--test/dialect/postgresql/test_reflection.py11
-rw-r--r--test/dialect/postgresql/test_types.py574
-rw-r--r--test/dialect/test_oracle.py26
-rw-r--r--test/dialect/test_sqlite.py85
-rw-r--r--test/engine/test_pool.py95
-rw-r--r--test/engine/test_reflection.py19
-rw-r--r--test/ext/declarative/test_basic.py26
-rw-r--r--test/ext/declarative/test_inheritance.py30
-rw-r--r--test/ext/declarative/test_mixin.py2
-rw-r--r--test/ext/test_associationproxy.py20
-rw-r--r--test/ext/test_baked.py142
-rw-r--r--test/ext/test_mutable.py32
-rw-r--r--test/orm/inheritance/test_poly_persistence.py38
-rw-r--r--test/orm/inheritance/test_relationship.py212
-rw-r--r--test/orm/inheritance/test_single.py15
-rw-r--r--test/orm/test_bulk.py55
-rw-r--r--test/orm/test_composites.py3
-rw-r--r--test/orm/test_cycles.py49
-rw-r--r--test/orm/test_eager_relations.py113
-rw-r--r--test/orm/test_events.py537
-rw-r--r--test/orm/test_hasparent.py4
-rw-r--r--test/orm/test_lazy_relations.py75
-rw-r--r--test/orm/test_load_on_fks.py3
-rw-r--r--test/orm/test_mapper.py1248
-rw-r--r--test/orm/test_merge.py95
-rw-r--r--test/orm/test_options.py12
-rw-r--r--test/orm/test_query.py189
-rw-r--r--test/orm/test_relationships.py11
-rw-r--r--test/orm/test_session.py108
-rw-r--r--test/orm/test_transaction.py36
-rw-r--r--test/orm/test_unitofwork.py2
-rw-r--r--test/orm/test_unitofworkv2.py659
-rw-r--r--test/orm/test_update_delete.py39
-rw-r--r--test/orm/test_versioning.py144
-rw-r--r--test/profiles.txt58
-rw-r--r--test/requirements.py33
-rw-r--r--test/sql/test_compiler.py116
-rw-r--r--test/sql/test_defaults.py91
-rw-r--r--test/sql/test_functions.py145
-rw-r--r--test/sql/test_insert.py157
-rw-r--r--test/sql/test_insert_exec.py445
-rw-r--r--test/sql/test_metadata.py133
-rw-r--r--test/sql/test_operators.py467
-rw-r--r--test/sql/test_query.py1326
-rw-r--r--test/sql/test_resultset.py1136
-rw-r--r--test/sql/test_returning.py27
-rw-r--r--test/sql/test_selectable.py24
-rw-r--r--test/sql/test_types.py192
-rw-r--r--test/sql/test_update.py124
61 files changed, 8188 insertions, 2521 deletions
diff --git a/test/aaa_profiling/test_compiler.py b/test/aaa_profiling/test_compiler.py
index 5eece4602..5095be103 100644
--- a/test/aaa_profiling/test_compiler.py
+++ b/test/aaa_profiling/test_compiler.py
@@ -32,8 +32,8 @@ class CompileTest(fixtures.TestBase, AssertsExecutionResults):
for t in (t1, t2):
for c in t.c:
c.type._type_affinity
- from sqlalchemy import types
- for t in list(types._type_map.values()):
+ from sqlalchemy.sql import sqltypes
+ for t in list(sqltypes._type_map.values()):
t._type_affinity
cls.dialect = default.DefaultDialect()
diff --git a/test/base/test_tutorials.py b/test/base/test_tutorials.py
new file mode 100644
index 000000000..73dcbb524
--- /dev/null
+++ b/test/base/test_tutorials.py
@@ -0,0 +1,144 @@
+from __future__ import print_function
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import config
+import doctest
+import logging
+import sys
+import re
+import os
+
+
+class DocTest(fixtures.TestBase):
+ def _setup_logger(self):
+ rootlogger = logging.getLogger('sqlalchemy.engine.base.Engine')
+
+ class MyStream(object):
+ def write(self, string):
+ sys.stdout.write(string)
+ sys.stdout.flush()
+
+ def flush(self):
+ pass
+
+ self._handler = handler = logging.StreamHandler(MyStream())
+ handler.setFormatter(logging.Formatter('%(message)s'))
+ rootlogger.addHandler(handler)
+
+ def _teardown_logger(self):
+ rootlogger = logging.getLogger('sqlalchemy.engine.base.Engine')
+ rootlogger.removeHandler(self._handler)
+
+ def _setup_create_table_patcher(self):
+ from sqlalchemy.sql import ddl
+ self.orig_sort = ddl.sort_tables_and_constraints
+
+ def our_sort(tables, **kw):
+ return self.orig_sort(
+ sorted(tables, key=lambda t: t.key), **kw
+ )
+ ddl.sort_tables_and_constraints = our_sort
+
+ def _teardown_create_table_patcher(self):
+ from sqlalchemy.sql import ddl
+ ddl.sort_tables_and_constraints = self.orig_sort
+
+ def setup(self):
+ self._setup_logger()
+ self._setup_create_table_patcher()
+
+ def teardown(self):
+ self._teardown_create_table_patcher()
+ self._teardown_logger()
+
+
+ def _run_doctest_for_content(self, name, content):
+ optionflags = (
+ doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE |
+ _get_allow_unicode_flag()
+ )
+ runner = doctest.DocTestRunner(
+ verbose=None, optionflags=optionflags,
+ checker=_get_unicode_checker())
+ globs = {
+ 'print_function': print_function}
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(content, globs, name, name, 0)
+ runner.run(test)
+ runner.summarize()
+ assert not runner.failures
+
+ def _run_doctest(self, fname):
+ here = os.path.dirname(__file__)
+ sqla_base = os.path.normpath(os.path.join(here, "..", ".."))
+ path = os.path.join(sqla_base, "doc/build", fname)
+ if not os.path.exists(path):
+ config.skip_test("Can't find documentation file %r" % path)
+ with open(path) as file_:
+ content = file_.read()
+ content = re.sub(r'{(?:stop|sql|opensql)}', '', content)
+ self._run_doctest_for_content(fname, content)
+
+ def test_orm(self):
+ self._run_doctest("orm/tutorial.rst")
+
+ def test_core(self):
+ self._run_doctest("core/tutorial.rst")
+
+
+# unicode checker courtesy py.test
+
+
+def _get_unicode_checker():
+ """
+ Returns a doctest.OutputChecker subclass that takes in account the
+ ALLOW_UNICODE option to ignore u'' prefixes in strings. Useful
+ when the same doctest should run in Python 2 and Python 3.
+
+ An inner class is used to avoid importing "doctest" at the module
+ level.
+ """
+ if hasattr(_get_unicode_checker, 'UnicodeOutputChecker'):
+ return _get_unicode_checker.UnicodeOutputChecker()
+
+ import doctest
+ import re
+
+ class UnicodeOutputChecker(doctest.OutputChecker):
+ """
+ Copied from doctest_nose_plugin.py from the nltk project:
+ https://github.com/nltk/nltk
+ """
+
+ _literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
+
+ def check_output(self, want, got, optionflags):
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ if res:
+ return True
+
+ if not (optionflags & _get_allow_unicode_flag()):
+ return False
+
+ else: # pragma: no cover
+ # the code below will end up executed only in Python 2 in
+ # our tests, and our coverage check runs in Python 3 only
+ def remove_u_prefixes(txt):
+ return re.sub(self._literal_re, r'\1\2', txt)
+
+ want = remove_u_prefixes(want)
+ got = remove_u_prefixes(got)
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ return res
+
+ _get_unicode_checker.UnicodeOutputChecker = UnicodeOutputChecker
+ return _get_unicode_checker.UnicodeOutputChecker()
+
+
+def _get_allow_unicode_flag():
+ """
+ Registers and returns the ALLOW_UNICODE flag.
+ """
+ import doctest
+ return doctest.register_optionflag('ALLOW_UNICODE')
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index 256f52850..4370d612b 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -2,13 +2,14 @@ import copy
from sqlalchemy import util, sql, exc, testing
from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures
-from sqlalchemy.testing import eq_, is_, ne_, fails_if
+from sqlalchemy.testing import eq_, is_, ne_, fails_if, mock
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
from sqlalchemy.util import langhelpers
import inspect
+
class _KeyedTupleTest(object):
def _fixture(self, values, labels):
@@ -284,6 +285,102 @@ class MemoizedAttrTest(fixtures.TestBase):
eq_(f1.bar(), 20)
eq_(val[0], 21)
+ def test_memoized_slots(self):
+ canary = mock.Mock()
+
+ class Foob(util.MemoizedSlots):
+ __slots__ = ('foo_bar', 'gogo')
+
+ def _memoized_method_gogo(self):
+ canary.method()
+ return "gogo"
+
+ def _memoized_attr_foo_bar(self):
+ canary.attr()
+ return "foobar"
+
+ f1 = Foob()
+ assert_raises(AttributeError, setattr, f1, "bar", "bat")
+
+ eq_(f1.foo_bar, "foobar")
+
+ eq_(f1.foo_bar, "foobar")
+
+ eq_(f1.gogo(), "gogo")
+
+ eq_(f1.gogo(), "gogo")
+
+ eq_(canary.mock_calls, [mock.call.attr(), mock.call.method()])
+
+
+class WrapCallableTest(fixtures.TestBase):
+ def test_wrapping_update_wrapper_fn(self):
+ def my_fancy_default():
+ """run the fancy default"""
+ return 10
+
+ c = util.wrap_callable(lambda: my_fancy_default, my_fancy_default)
+
+ eq_(c.__name__, "my_fancy_default")
+ eq_(c.__doc__, "run the fancy default")
+
+ def test_wrapping_update_wrapper_fn_nodocstring(self):
+ def my_fancy_default():
+ return 10
+
+ c = util.wrap_callable(lambda: my_fancy_default, my_fancy_default)
+ eq_(c.__name__, "my_fancy_default")
+ eq_(c.__doc__, None)
+
+ def test_wrapping_update_wrapper_cls(self):
+ class MyFancyDefault(object):
+ """a fancy default"""
+
+ def __call__(self):
+ """run the fancy default"""
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, "run the fancy default")
+
+ def test_wrapping_update_wrapper_cls_noclsdocstring(self):
+ class MyFancyDefault(object):
+
+ def __call__(self):
+ """run the fancy default"""
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, "run the fancy default")
+
+ def test_wrapping_update_wrapper_cls_nomethdocstring(self):
+ class MyFancyDefault(object):
+ """a fancy default"""
+
+ def __call__(self):
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, "a fancy default")
+
+ def test_wrapping_update_wrapper_cls_noclsdocstring_nomethdocstring(self):
+ class MyFancyDefault(object):
+
+ def __call__(self):
+ return 10
+
+ def_ = MyFancyDefault()
+ c = util.wrap_callable(lambda: def_(), def_)
+ eq_(c.__name__, "MyFancyDefault")
+ eq_(c.__doc__, None)
+
class ToListTest(fixtures.TestBase):
def test_from_string(self):
@@ -1103,7 +1200,10 @@ class IdentitySetTest(fixtures.TestBase):
return super_, sub_, twin1, twin2, unique1, unique2
def _assert_unorderable_types(self, callable_):
- if util.py3k:
+ if util.py36:
+ assert_raises_message(
+ TypeError, 'not supported between instances of', callable_)
+ elif util.py3k:
assert_raises_message(
TypeError, 'unorderable types', callable_)
else:
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
index 9d89f040b..80be9f67d 100644
--- a/test/dialect/mssql/test_compiler.py
+++ b/test/dialect/mssql/test_compiler.py
@@ -12,7 +12,7 @@ from sqlalchemy import Integer, String, Table, Column, select, MetaData,\
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = mssql.dialect(legacy_schema_aliasing=False)
+ __dialect__ = mssql.dialect()
def test_true_false(self):
self.assert_compile(
diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py
index 61ae32ef4..32edfd7eb 100644
--- a/test/dialect/mssql/test_query.py
+++ b/test/dialect/mssql/test_query.py
@@ -41,17 +41,15 @@ class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
)
def _assert_sql(self, element, legacy_sql, modern_sql=None):
- dialect = mssql.dialect()
+ dialect = mssql.dialect(legacy_schema_aliasing=True)
- with assertions.expect_warnings(
- "legacy_schema_aliasing flag is defaulted to True.*"):
- self.assert_compile(
- element,
- legacy_sql,
- dialect=dialect
- )
+ self.assert_compile(
+ element,
+ legacy_sql,
+ dialect=dialect
+ )
- dialect = mssql.dialect(legacy_schema_aliasing=False)
+ dialect = mssql.dialect()
self.assert_compile(
element,
modern_sql or "foob",
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
index bee441586..e016a6e41 100644
--- a/test/dialect/mssql/test_reflection.py
+++ b/test/dialect/mssql/test_reflection.py
@@ -1,5 +1,5 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_, in_
from sqlalchemy import *
from sqlalchemy import types, schema, event
from sqlalchemy.databases import mssql
@@ -24,14 +24,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Column('user_name', types.VARCHAR(20), nullable=False),
Column('test1', types.CHAR(5), nullable=False),
Column('test2', types.Float(5), nullable=False),
- Column('test3', types.Text('max')),
+ Column('test3', types.Text()),
Column('test4', types.Numeric, nullable=False),
Column('test5', types.DateTime),
Column('parent_user_id', types.Integer,
ForeignKey('engine_users.user_id')),
Column('test6', types.DateTime, nullable=False),
- Column('test7', types.Text('max')),
- Column('test8', types.LargeBinary('max')),
+ Column('test7', types.Text()),
+ Column('test8', types.LargeBinary()),
Column('test_passivedefault2', types.Integer,
server_default='5'),
Column('test9', types.BINARY(100)),
@@ -171,6 +171,32 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
set([t2.c['x col'], t2.c.y])
)
+ @testing.provide_metadata
+ def test_max_ident_in_varchar_not_present(self):
+ """test [ticket:3504].
+
+ Here we are testing not just that the "max" token comes back
+ as None, but also that these types accept "max" as the value
+ of "length" on construction, which isn't a directly documented
+ pattern however is likely in common use.
+
+ """
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('t1', types.String),
+ Column('t2', types.Text('max')),
+ Column('t3', types.Text('max')),
+ Column('t4', types.LargeBinary('max')),
+ Column('t5', types.VARBINARY('max')),
+ )
+ metadata.create_all()
+ for col in inspect(testing.db).get_columns('t'):
+ is_(col['type'].length, None)
+ in_('max', str(col['type'].compile(dialect=testing.db.dialect)))
+
+
from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
from sqlalchemy.dialects.mssql import base
@@ -187,7 +213,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = :table_name_1",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = :table_name_1",
dialect=dialect
)
@@ -197,7 +223,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
dialect=dialect
)
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index 17ceb6b61..dad86c60a 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -1,5 +1,5 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_, engines, pickleable
+from sqlalchemy.testing import eq_, engines, pickleable, assert_raises_message
import datetime
import os
from sqlalchemy import Table, Column, MetaData, Float, \
@@ -8,7 +8,8 @@ from sqlalchemy import Table, Column, MetaData, Float, \
UnicodeText, LargeBinary
from sqlalchemy import types, schema
from sqlalchemy.databases import mssql
-from sqlalchemy.dialects.mssql.base import TIME
+from sqlalchemy.dialects.mssql.base import TIME, _MSDate
+from sqlalchemy.dialects.mssql.base import MS_2005_VERSION, MS_2008_VERSION
from sqlalchemy.testing import fixtures, \
AssertsExecutionResults, ComparesTables
from sqlalchemy import testing
@@ -33,6 +34,36 @@ class TimeTypeTest(fixtures.TestBase):
result_processor = mssql_time_type.result_processor(None, None)
eq_(expected, result_processor(value))
+ def test_result_processor_invalid(self):
+ mssql_time_type = TIME()
+ result_processor = mssql_time_type.result_processor(None, None)
+ assert_raises_message(
+ ValueError,
+ "could not parse 'abc' as a time value",
+ result_processor, 'abc'
+ )
+
+
+class MSDateTypeTest(fixtures.TestBase):
+
+ def test_result_processor(self):
+ expected = datetime.date(2000, 1, 2)
+ self._assert_result_processor(expected, '2000-01-02')
+
+ def _assert_result_processor(self, expected, value):
+ mssql_date_type = _MSDate()
+ result_processor = mssql_date_type.result_processor(None, None)
+ eq_(expected, result_processor(value))
+
+ def test_result_processor_invalid(self):
+ mssql_date_type = _MSDate()
+ result_processor = mssql_date_type.result_processor(None, None)
+ assert_raises_message(
+ ValueError,
+ "could not parse 'abc' as a date value",
+ result_processor, 'abc'
+ )
+
class TypeDDLTest(fixtures.TestBase):
@@ -173,6 +204,91 @@ class TypeDDLTest(fixtures.TestBase):
"%s %s" % (col.name, columns[index][3]))
self.assert_(repr(col))
+ def test_dates(self):
+ "Exercise type specification for date types."
+
+ columns = [
+ # column type, args, kwargs, expected ddl
+ (mssql.MSDateTime, [], {},
+ 'DATETIME', None),
+
+ (types.DATE, [], {},
+ 'DATE', None),
+ (types.Date, [], {},
+ 'DATE', None),
+ (types.Date, [], {},
+ 'DATETIME', MS_2005_VERSION),
+ (mssql.MSDate, [], {},
+ 'DATE', None),
+ (mssql.MSDate, [], {},
+ 'DATETIME', MS_2005_VERSION),
+
+ (types.TIME, [], {},
+ 'TIME', None),
+ (types.Time, [], {},
+ 'TIME', None),
+ (mssql.MSTime, [], {},
+ 'TIME', None),
+ (mssql.MSTime, [1], {},
+ 'TIME(1)', None),
+ (types.Time, [], {},
+ 'DATETIME', MS_2005_VERSION),
+ (mssql.MSTime, [], {},
+ 'TIME', None),
+
+ (mssql.MSSmallDateTime, [], {},
+ 'SMALLDATETIME', None),
+
+ (mssql.MSDateTimeOffset, [], {},
+ 'DATETIMEOFFSET', None),
+ (mssql.MSDateTimeOffset, [1], {},
+ 'DATETIMEOFFSET(1)', None),
+
+ (mssql.MSDateTime2, [], {},
+ 'DATETIME2', None),
+ (mssql.MSDateTime2, [0], {},
+ 'DATETIME2(0)', None),
+ (mssql.MSDateTime2, [1], {},
+ 'DATETIME2(1)', None),
+
+ (mssql.MSTime, [0], {},
+ 'TIME(0)', None),
+
+ (mssql.MSDateTimeOffset, [0], {},
+ 'DATETIMEOFFSET(0)', None),
+
+ ]
+
+ metadata = MetaData()
+ table_args = ['test_mssql_dates', metadata]
+ for index, spec in enumerate(columns):
+ type_, args, kw, res, server_version = spec
+ table_args.append(
+ Column('c%s' % index, type_(*args, **kw), nullable=None))
+
+ date_table = Table(*table_args)
+ dialect = mssql.dialect()
+ dialect.server_version_info = MS_2008_VERSION
+ ms_2005_dialect = mssql.dialect()
+ ms_2005_dialect.server_version_info = MS_2005_VERSION
+ gen = dialect.ddl_compiler(dialect, schema.CreateTable(date_table))
+ gen2005 = ms_2005_dialect.ddl_compiler(
+ ms_2005_dialect, schema.CreateTable(date_table))
+
+ for col in date_table.c:
+ index = int(col.name[1:])
+ server_version = columns[index][4]
+ if not server_version:
+ testing.eq_(
+ gen.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
+ else:
+ testing.eq_(
+ gen2005.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
+
+ self.assert_(repr(col))
+
def test_large_type_deprecation(self):
d1 = mssql.dialect(deprecate_large_types=True)
d2 = mssql.dialect(deprecate_large_types=False)
@@ -313,9 +429,7 @@ class TypeRoundTripTest(
def teardown(self):
metadata.drop_all()
- @testing.fails_on_everything_except(
- 'mssql+pyodbc',
- 'this is some pyodbc-specific feature')
+ @testing.fails_on_everything_except('mssql+pyodbc')
def test_decimal_notation(self):
numeric_table = Table(
'numeric_table', metadata,
@@ -466,6 +580,8 @@ class TypeRoundTripTest(
(mssql.MSDateTime2, [], {},
'DATETIME2', ['>=', (10,)]),
+ (mssql.MSDateTime2, [0], {},
+ 'DATETIME2(0)', ['>=', (10,)]),
(mssql.MSDateTime2, [1], {},
'DATETIME2(1)', ['>=', (10,)]),
diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py
index 304c31012..60af82bab 100644
--- a/test/dialect/mysql/test_compiler.py
+++ b/test/dialect/mysql/test_compiler.py
@@ -511,9 +511,8 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(schema.CreateTable(t1),
'CREATE TABLE sometable (assigned_id '
'INTEGER NOT NULL, id INTEGER NOT NULL '
- 'AUTO_INCREMENT, PRIMARY KEY (assigned_id, '
- 'id), KEY idx_autoinc_id (id))ENGINE=Inn'
- 'oDB')
+ 'AUTO_INCREMENT, PRIMARY KEY (id, assigned_id)'
+ ')ENGINE=InnoDB')
t1 = Table('sometable', MetaData(),
Column('assigned_id', Integer(), primary_key=True,
@@ -537,8 +536,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
'CREATE TABLE sometable ('
'id INTEGER NOT NULL, '
'`order` INTEGER NOT NULL AUTO_INCREMENT, '
- 'PRIMARY KEY (id, `order`), '
- 'KEY idx_autoinc_order (`order`)'
+ 'PRIMARY KEY (`order`, id)'
')ENGINE=InnoDB')
def test_create_table_with_partition(self):
diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py
index f19177c2a..85513167c 100644
--- a/test/dialect/mysql/test_query.py
+++ b/test/dialect/mysql/test_query.py
@@ -5,7 +5,6 @@ from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
-
class IdiosyncrasyTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'mysql'
__backend__ = True
@@ -177,3 +176,57 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
eq_([1, 3, 5], [r.id for r in results])
+class AnyAllTest(fixtures.TablesTest, AssertsCompiledSQL):
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'stuff', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('value', Integer)
+ )
+
+ @classmethod
+ def insert_data(cls):
+ stuff = cls.tables.stuff
+ testing.db.execute(
+ stuff.insert(),
+ [
+ {'id': 1, 'value': 1},
+ {'id': 2, 'value': 2},
+ {'id': 3, 'value': 3},
+ {'id': 4, 'value': 4},
+ {'id': 5, 'value': 5},
+ ]
+ )
+
+ def test_any_w_comparator(self):
+ stuff = self.tables.stuff
+ stmt = select([stuff.c.id]).where(
+ stuff.c.value > any_(select([stuff.c.value])))
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [(2,), (3,), (4,), (5,)]
+ )
+
+ def test_all_w_comparator(self):
+ stuff = self.tables.stuff
+ stmt = select([stuff.c.id]).where(
+ stuff.c.value >= all_(select([stuff.c.value])))
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [(5,)]
+ )
+
+ def test_any_literal(self):
+ stuff = self.tables.stuff
+ stmt = select([4 == any_(select([stuff.c.value]))])
+
+ is_(
+ testing.db.execute(stmt).scalar(), True
+ )
+
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index 39b39e006..a28876262 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -1,13 +1,195 @@
# coding: utf-8
-from sqlalchemy.testing import eq_
-from sqlalchemy import *
+from sqlalchemy.testing import eq_, is_
+from sqlalchemy import Column, Table, DDL, MetaData, TIMESTAMP, \
+ DefaultClause, String, Integer, Text, UnicodeText, SmallInteger,\
+ NCHAR, LargeBinary, DateTime, select, UniqueConstraint, Unicode,\
+ BigInteger
+from sqlalchemy import event
from sqlalchemy import sql
+from sqlalchemy import inspect
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.testing import fixtures, AssertsExecutionResults
from sqlalchemy import testing
+class TypeReflectionTest(fixtures.TestBase):
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @testing.provide_metadata
+ def _run_test(self, specs, attributes):
+ columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
+
+ # Early 5.0 releases seem to report more "general" for columns
+ # in a view, e.g. char -> varchar, tinyblob -> mediumblob
+ use_views = testing.db.dialect.server_version_info > (5, 0, 10)
+
+ m = self.metadata
+ Table('mysql_types', m, *columns)
+
+ if use_views:
+ event.listen(
+ m, 'after_create',
+ DDL(
+ 'CREATE OR REPLACE VIEW mysql_types_v '
+ 'AS SELECT * from mysql_types')
+ )
+ event.listen(
+ m, 'before_drop',
+ DDL("DROP VIEW IF EXISTS mysql_types_v")
+ )
+ m.create_all()
+
+ m2 = MetaData(testing.db)
+ tables = [
+ Table('mysql_types', m2, autoload=True)
+ ]
+ if use_views:
+ tables.append(Table('mysql_types_v', m2, autoload=True))
+
+ for table in tables:
+ for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
+ expected_spec = spec[1]
+ reflected_type = reflected_col.type
+ is_(type(reflected_type), type(expected_spec))
+
+ for attr in attributes:
+ eq_(
+ getattr(reflected_type, attr),
+ getattr(expected_spec, attr),
+ "Column %s: Attribute %s value of %s does not "
+ "match %s for type %s" % (
+ "c%i" % (i + 1),
+ attr,
+ getattr(reflected_type, attr),
+ getattr(expected_spec, attr),
+ spec[0]
+ )
+ )
+
+ def test_time_types(self):
+ specs = []
+
+ if testing.requires.mysql_fsp.enabled:
+ fsps = [None, 0, 5]
+ else:
+ fsps = [None]
+
+ for type_ in (mysql.TIMESTAMP, mysql.DATETIME, mysql.TIME):
+ # MySQL defaults fsp to 0, and if 0 does not report it.
+ # we don't actually render 0 right now in DDL but even if we do,
+ # it comes back blank
+ for fsp in fsps:
+ if fsp:
+ specs.append((type_(fsp=fsp), type_(fsp=fsp)))
+ else:
+ specs.append((type_(), type_()))
+
+ specs.extend([
+ (TIMESTAMP(), mysql.TIMESTAMP()),
+ (DateTime(), mysql.DATETIME()),
+ ])
+
+ # note 'timezone' should always be None on both
+ self._run_test(specs, ['fsp', 'timezone'])
+
+ def test_year_types(self):
+ specs = [
+ (mysql.YEAR(), mysql.YEAR(display_width=4)),
+ (mysql.YEAR(display_width=2), mysql.YEAR(display_width=2)),
+ (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)),
+ ]
+
+ self._run_test(specs, ['display_width'])
+
+ def test_string_types(self):
+ specs = [
+ (String(1), mysql.MSString(1)),
+ (String(3), mysql.MSString(3)),
+ (Text(), mysql.MSText()),
+ (Unicode(1), mysql.MSString(1)),
+ (Unicode(3), mysql.MSString(3)),
+ (UnicodeText(), mysql.MSText()),
+ (mysql.MSChar(1), mysql.MSChar(1)),
+ (mysql.MSChar(3), mysql.MSChar(3)),
+ (NCHAR(2), mysql.MSChar(2)),
+ (mysql.MSNChar(2), mysql.MSChar(2)),
+ (mysql.MSNVarChar(22), mysql.MSString(22),),
+ ]
+ self._run_test(specs, ['length'])
+
+ def test_integer_types(self):
+ specs = []
+ for type_ in [
+ mysql.TINYINT, mysql.SMALLINT,
+ mysql.MEDIUMINT, mysql.INTEGER, mysql.BIGINT]:
+ for display_width in [None, 4, 7]:
+ for unsigned in [False, True]:
+ for zerofill in [None, True]:
+ kw = {}
+ if display_width:
+ kw['display_width'] = display_width
+ if unsigned is not None:
+ kw['unsigned'] = unsigned
+ if zerofill is not None:
+ kw['zerofill'] = zerofill
+
+ zerofill = bool(zerofill)
+ source_type = type_(**kw)
+
+ if display_width is None:
+ display_width = {
+ mysql.MEDIUMINT: 9,
+ mysql.SMALLINT: 6,
+ mysql.TINYINT: 4,
+ mysql.INTEGER: 11,
+ mysql.BIGINT: 20
+ }[type_]
+
+ if zerofill:
+ unsigned = True
+
+ expected_type = type_(
+ display_width=display_width,
+ unsigned=unsigned,
+ zerofill=zerofill
+ )
+ specs.append(
+ (source_type, expected_type)
+ )
+
+ specs.extend([
+ (SmallInteger(), mysql.SMALLINT(display_width=6)),
+ (Integer(), mysql.INTEGER(display_width=11)),
+ (BigInteger, mysql.BIGINT(display_width=20))
+ ])
+ self._run_test(specs, ['display_width', 'unsigned', 'zerofill'])
+
+ def test_binary_types(self):
+ specs = [
+ (LargeBinary(3), mysql.TINYBLOB(), ),
+ (LargeBinary(), mysql.BLOB()),
+ (mysql.MSBinary(3), mysql.MSBinary(3), ),
+ (mysql.MSVarBinary(3), mysql.MSVarBinary(3)),
+ (mysql.MSTinyBlob(), mysql.MSTinyBlob()),
+ (mysql.MSBlob(), mysql.MSBlob()),
+ (mysql.MSBlob(1234), mysql.MSBlob()),
+ (mysql.MSMediumBlob(), mysql.MSMediumBlob()),
+ (mysql.MSLongBlob(), mysql.MSLongBlob()),
+ ]
+ self._run_test(specs, [])
+
+ @testing.uses_deprecated('Manually quoting ENUM value literals')
+ def test_legacy_enum_types(self):
+
+ specs = [
+ (mysql.ENUM("''","'fleem'"), mysql.ENUM("''","'fleem'")), # noqa
+ ]
+
+ self._run_test(specs, ['enums'])
+
+
class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'mysql'
@@ -75,7 +257,8 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
def test_reflection_with_table_options(self):
comment = r"""Comment types type speedily ' " \ '' Fun!"""
- def_table = Table('mysql_def', MetaData(testing.db),
+ def_table = Table(
+ 'mysql_def', MetaData(testing.db),
Column('c1', Integer()),
mysql_engine='MEMORY',
mysql_comment=comment,
@@ -88,8 +271,9 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
def_table.create()
try:
- reflected = Table('mysql_def', MetaData(testing.db),
- autoload=True)
+ reflected = Table(
+ 'mysql_def', MetaData(testing.db),
+ autoload=True)
finally:
def_table.drop()
@@ -108,15 +292,16 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
assert reflected.kwargs['mysql_connection'] == 'fish'
# This field doesn't seem to be returned by mysql itself.
- #assert reflected.kwargs['mysql_password'] == 'secret'
+ # assert reflected.kwargs['mysql_password'] == 'secret'
# This is explicitly ignored when reflecting schema.
- #assert reflected.kwargs['mysql_auto_increment'] == '5'
+ # assert reflected.kwargs['mysql_auto_increment'] == '5'
def test_reflection_on_include_columns(self):
"""Test reflection of include_columns to be sure they respect case."""
- case_table = Table('mysql_case', MetaData(testing.db),
+ case_table = Table(
+ 'mysql_case', MetaData(testing.db),
Column('c1', String(10)),
Column('C2', String(10)),
Column('C3', String(10)))
@@ -128,132 +313,68 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
for t in case_table, reflected:
assert 'c1' in t.c.keys()
assert 'C2' in t.c.keys()
- reflected2 = Table('mysql_case', MetaData(testing.db),
- autoload=True, include_columns=['c1', 'c2'])
+ reflected2 = Table(
+ 'mysql_case', MetaData(testing.db),
+ autoload=True, include_columns=['c1', 'c2'])
assert 'c1' in reflected2.c.keys()
for c in ['c2', 'C2', 'C3']:
assert c not in reflected2.c.keys()
finally:
case_table.drop()
- @testing.exclude('mysql', '<', (5, 0, 0), 'early types are squirrely')
- @testing.uses_deprecated('Using String type with no length')
- @testing.uses_deprecated('Manually quoting ENUM value literals')
- def test_type_reflection(self):
- # (ask_for, roundtripped_as_if_different)
- specs = [(String(1), mysql.MSString(1), ),
- (String(3), mysql.MSString(3), ),
- (Text(), mysql.MSText(), ),
- (Unicode(1), mysql.MSString(1), ),
- (Unicode(3), mysql.MSString(3), ),
- (UnicodeText(), mysql.MSText(), ),
- (mysql.MSChar(1), ),
- (mysql.MSChar(3), ),
- (NCHAR(2), mysql.MSChar(2), ),
- (mysql.MSNChar(2), mysql.MSChar(2), ), # N is CREATE only
- (mysql.MSNVarChar(22), mysql.MSString(22), ),
- (SmallInteger(), mysql.MSSmallInteger(), ),
- (SmallInteger(), mysql.MSSmallInteger(4), ),
- (mysql.MSSmallInteger(), ),
- (mysql.MSSmallInteger(4), mysql.MSSmallInteger(4), ),
- (mysql.MSMediumInteger(), mysql.MSMediumInteger(), ),
- (mysql.MSMediumInteger(8), mysql.MSMediumInteger(8), ),
- (LargeBinary(3), mysql.TINYBLOB(), ),
- (LargeBinary(), mysql.BLOB() ),
- (mysql.MSBinary(3), mysql.MSBinary(3), ),
- (mysql.MSVarBinary(3),),
- (mysql.MSTinyBlob(),),
- (mysql.MSBlob(),),
- (mysql.MSBlob(1234), mysql.MSBlob()),
- (mysql.MSMediumBlob(),),
- (mysql.MSLongBlob(),),
- (mysql.ENUM("''","'fleem'"), ),
- ]
-
- columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
-
- db = testing.db
- m = MetaData(db)
- t_table = Table('mysql_types', m, *columns)
- try:
- m.create_all()
-
- m2 = MetaData(db)
- rt = Table('mysql_types', m2, autoload=True)
- try:
- db.execute('CREATE OR REPLACE VIEW mysql_types_v '
- 'AS SELECT * from mysql_types')
- rv = Table('mysql_types_v', m2, autoload=True)
-
- expected = [len(c) > 1 and c[1] or c[0] for c in specs]
-
- # Early 5.0 releases seem to report more "general" for columns
- # in a view, e.g. char -> varchar, tinyblob -> mediumblob
- #
- # Not sure exactly which point version has the fix.
- if db.dialect.server_version_info < (5, 0, 11):
- tables = rt,
- else:
- tables = rt, rv
-
- for table in tables:
- for i, reflected in enumerate(table.c):
- assert isinstance(reflected.type,
- type(expected[i])), \
- 'element %d: %r not instance of %r' % (i,
- reflected.type, type(expected[i]))
- finally:
- db.execute('DROP VIEW mysql_types_v')
- finally:
- m.drop_all()
-
def test_autoincrement(self):
meta = MetaData(testing.db)
try:
Table('ai_1', meta,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
Column('int_n', Integer, DefaultClause('0'),
primary_key=True),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_2', meta,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
Column('int_n', Integer, DefaultClause('0'),
primary_key=True),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_3', meta,
Column('int_n', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
- Column('int_y', Integer, primary_key=True),
- mysql_engine='MyISAM')
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
+ mysql_engine='MyISAM')
Table('ai_4', meta,
Column('int_n', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
Column('int_n2', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_5', meta,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
Column('int_n', Integer, DefaultClause('0'),
primary_key=True, autoincrement=False),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
Table('ai_6', meta,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
- Column('int_y', Integer, primary_key=True),
- mysql_engine='MyISAM')
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
+ mysql_engine='MyISAM')
Table('ai_7', meta,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
Column('o2', String(1), DefaultClause('x'),
primary_key=True),
- Column('int_y', Integer, primary_key=True),
- mysql_engine='MyISAM')
+ Column('int_y', Integer, primary_key=True,
+ autoincrement=True),
+ mysql_engine='MyISAM')
Table('ai_8', meta,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
Column('o2', String(1), DefaultClause('x'),
primary_key=True),
- mysql_engine='MyISAM')
+ mysql_engine='MyISAM')
meta.create_all()
table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
@@ -309,7 +430,7 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
["t TIMESTAMP"],
["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"]
]):
- Table("nn_t%d" % idx, meta) # to allow DROP
+ Table("nn_t%d" % idx, meta) # to allow DROP
testing.db.execute("""
CREATE TABLE nn_t%d (
@@ -380,7 +501,8 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
class RawReflectionTest(fixtures.TestBase):
def setup(self):
dialect = mysql.dialect()
- self.parser = mysql.MySQLTableDefinitionParser(dialect, dialect.identifier_preparer)
+ self.parser = mysql.MySQLTableDefinitionParser(
+ dialect, dialect.identifier_preparer)
def test_key_reflection(self):
regex = self.parser._re_key
@@ -391,10 +513,14 @@ class RawReflectionTest(fixtures.TestBase):
assert regex.match(' PRIMARY KEY (`id`)')
assert regex.match(' PRIMARY KEY USING BTREE (`id`)')
assert regex.match(' PRIMARY KEY (`id`) USING BTREE')
- assert regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE 16')
- assert regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE=16')
- assert regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = 16')
- assert not regex.match(' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = = 16')
+ assert regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE 16')
+ assert regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE=16')
+ assert regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = 16')
+ assert not regex.match(
+ ' PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = = 16')
def test_fk_reflection(self):
regex = self.parser._re_constraint
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
index 9fa5c9804..71d8fa3e5 100644
--- a/test/dialect/postgresql/test_compiler.py
+++ b/test/dialect/postgresql/test_compiler.py
@@ -9,11 +9,13 @@ from sqlalchemy import Sequence, Table, Column, Integer, update, String,\
Text
from sqlalchemy.dialects.postgresql import ExcludeConstraint, array
from sqlalchemy import exc, schema
-from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import TSRANGE
from sqlalchemy.orm import mapper, aliased, Session
-from sqlalchemy.sql import table, column, operators
+from sqlalchemy.sql import table, column, operators, literal_column
+from sqlalchemy.sql import util as sql_util
from sqlalchemy.util import u
+from sqlalchemy.dialects.postgresql import aggregate_order_by
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -21,7 +23,7 @@ class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
def test_format(self):
seq = Sequence('my_seq_no_schema')
- dialect = postgresql.PGDialect()
+ dialect = postgresql.dialect()
assert dialect.identifier_preparer.format_sequence(seq) \
== 'my_seq_no_schema'
seq = Sequence('my_seq', schema='some_schema')
@@ -508,6 +510,19 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'(CAST("Room" AS TEXT) WITH =)'
)
+ def test_exclude_constraint_when(self):
+ m = MetaData()
+ tbl = Table(
+ 'testtbl', m,
+ Column('room', String)
+ )
+ cons = ExcludeConstraint(('room', '='), where=tbl.c.room.in_(['12']))
+ tbl.append_constraint(cons)
+ self.assert_compile(schema.AddConstraint(cons),
+ 'ALTER TABLE testtbl ADD EXCLUDE USING gist '
+ '(room WITH =) WHERE (testtbl.room IN (\'12\'))',
+ dialect=postgresql.dialect())
+
def test_substring(self):
self.assert_compile(func.substring('abc', 1, 2),
'SUBSTRING(%(substring_1)s FROM %(substring_2)s '
@@ -578,6 +593,22 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"WHERE mytable_1.myid = %(myid_1)s FOR UPDATE OF mytable_1"
)
+ def test_for_update_with_schema(self):
+ m = MetaData()
+ table1 = Table(
+ 'mytable', m,
+ Column('myid'),
+ Column('name'),
+ schema='testschema'
+ )
+
+ self.assert_compile(
+ table1.select(table1.c.myid == 7).with_for_update(of=table1),
+ "SELECT testschema.mytable.myid, testschema.mytable.name "
+ "FROM testschema.mytable "
+ "WHERE testschema.mytable.myid = %(myid_1)s "
+ "FOR UPDATE OF mytable")
+
def test_reserved_words(self):
table = Table("pg_table", MetaData(),
Column("col1", Integer),
@@ -693,7 +724,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self._test_array_zero_indexes(False)
def test_array_literal_type(self):
- is_(postgresql.array([1, 2]).type._type_affinity, postgresql.ARRAY)
+ isinstance(postgresql.array([1, 2]).type, postgresql.ARRAY)
is_(postgresql.array([1, 2]).type.item_type._type_affinity, Integer)
is_(postgresql.array([1, 2], type_=String).
@@ -800,6 +831,48 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=postgresql.dialect()
)
+ def test_aggregate_order_by_one(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ # note this tests that the object exports FROM objects
+ # correctly
+ self.assert_compile(
+ stmt,
+ "SELECT array_agg(table1.a ORDER BY table1.b DESC) "
+ "AS array_agg_1 FROM table1"
+ )
+
+ def test_aggregate_order_by_two(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.string_agg(
+ table.c.a,
+ aggregate_order_by(literal_column("','"), table.c.a)
+ )
+ stmt = select([expr])
+
+ self.assert_compile(
+ stmt,
+ "SELECT string_agg(table1.a, ',' ORDER BY table1.a) "
+ "AS string_agg_1 FROM table1"
+ )
+
+ def test_aggregate_order_by_adapt(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ a1 = table.alias('foo')
+ stmt2 = sql_util.ClauseAdapter(a1).traverse(stmt)
+ self.assert_compile(
+ stmt2,
+ "SELECT array_agg(foo.a ORDER BY foo.b DESC) AS array_agg_1 FROM table1 AS foo"
+ )
+
class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py
index 4a33644e0..9f92a7830 100644
--- a/test/dialect/postgresql/test_query.py
+++ b/test/dialect/postgresql/test_query.py
@@ -12,7 +12,7 @@ from sqlalchemy import exc
from sqlalchemy.dialects import postgresql
import datetime
-metadata = matchtable = cattable = None
+matchtable = cattable = None
class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@@ -22,23 +22,19 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
- global metadata
- cls.engine = testing.db
- metadata = MetaData(testing.db)
+ cls.metadata = MetaData(testing.db)
def teardown(self):
- metadata.drop_all()
- metadata.clear()
- if self.engine is not testing.db:
- self.engine.dispose()
+ self.metadata.drop_all()
+ self.metadata.clear()
def test_compiled_insert(self):
table = Table(
- 'testtable', metadata, Column(
+ 'testtable', self.metadata, Column(
'id', Integer, primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
ins = table.insert(
inline=True,
values={'data': bindparam('x')}).compile()
@@ -49,17 +45,18 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
)
def test_foreignkey_missing_insert(self):
- t1 = Table('t1', metadata, Column('id', Integer,
- primary_key=True))
+ Table(
+ 't1', self.metadata,
+ Column('id', Integer, primary_key=True))
t2 = Table(
't2',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
ForeignKey('t1.id'),
primary_key=True))
- metadata.create_all()
+ self.metadata.create_all()
# want to ensure that "null value in column "id" violates not-
# null constraint" is raised (IntegrityError on psycoopg2, but
@@ -72,14 +69,16 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
engines.testing_engine(options={'implicit_returning': False}),
engines.testing_engine(options={'implicit_returning': True})
]:
- assert_raises_message(exc.DBAPIError,
- 'violates not-null constraint',
- eng.execute, t2.insert())
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ eng.execute, t2.insert()
+ )
def test_sequence_insert(self):
table = Table(
'testtable',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
@@ -88,14 +87,14 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
Column(
'data',
String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_with_sequence(table, 'my_seq')
@testing.requires.returning
def test_sequence_returning_insert(self):
table = Table(
'testtable',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
@@ -104,57 +103,57 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
Column(
'data',
String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_with_sequence_returning(table, 'my_seq')
def test_opt_sequence_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, Sequence(
'my_seq', optional=True), primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement(table)
@testing.requires.returning
def test_opt_sequence_returning_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, Sequence(
'my_seq', optional=True), primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement_returning(table)
def test_autoincrement_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement(table)
@testing.requires.returning
def test_autoincrement_returning_insert(self):
table = Table(
- 'testtable', metadata,
+ 'testtable', self.metadata,
Column(
'id', Integer, primary_key=True),
Column(
'data', String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_autoincrement_returning(table)
def test_noautoincrement_insert(self):
table = Table(
'testtable',
- metadata,
+ self.metadata,
Column(
'id',
Integer,
@@ -163,42 +162,45 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
Column(
'data',
String(30)))
- metadata.create_all()
+ self.metadata.create_all()
self._assert_data_noautoincrement(table)
def _assert_data_autoincrement(self, table):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': False})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
+ with self.sql_execution_asserter(engine) as asserter:
- # execute with explicit id
+ with engine.connect() as conn:
+ # execute with explicit id
- r = table.insert().execute({'id': 30, 'data': 'd1'})
- assert r.inserted_primary_key == [30]
+ r = conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ eq_(r.inserted_primary_key, [30])
- # execute with prefetch id
+ # execute with prefetch id
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [1]
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [1])
- # executemany with explicit ids
+ # executemany with explicit ids
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
- # executemany, uses SERIAL
+ # executemany, uses SERIAL
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
- # single execute, explicit id, inline
+ # single execute, explicit id, inline
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ conn.execute(
+ table.insert(inline=True),
+ {'id': 33, 'data': 'd7'})
- # single execute, inline, uses SERIAL
+ # single execute, inline, uses SERIAL
- table.insert(inline=True).execute({'data': 'd8'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL(
@@ -221,37 +223,41 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
[{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
- )
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
- table.delete().execute()
+ conn.execute(table.delete())
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(self.engine)
+ m2 = MetaData(engine)
table = Table(table.name, m2, autoload=True)
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [5]
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [5])
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL(
@@ -273,278 +279,305 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
'INSERT INTO testtable (data) VALUES (:data)',
[{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (5, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (6, 'd5'),
- (7, 'd6'),
- (33, 'd7'),
- (8, 'd8'),
- ]
- )
- table.delete().execute()
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (5, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (6, 'd5'),
+ (7, 'd6'),
+ (33, 'd7'),
+ (8, 'd8'),
+ ]
+ )
+ conn.execute(table.delete())
def _assert_data_autoincrement_returning(self, table):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': True})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
- # execute with explicit id
+ # execute with explicit id
- r = table.insert().execute({'id': 30, 'data': 'd1'})
- assert r.inserted_primary_key == [30]
+ r = conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ eq_(r.inserted_primary_key, [30])
- # execute with prefetch id
+ # execute with prefetch id
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [1]
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [1])
- # executemany with explicit ids
+ # executemany with explicit ids
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
- # executemany, uses SERIAL
+ # executemany, uses SERIAL
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
- # single execute, explicit id, inline
+ # single execute, explicit id, inline
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
- # single execute, inline, uses SERIAL
+ # single execute, inline, uses SERIAL
- table.insert(inline=True).execute({'data': 'd8'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
DialectSQL('INSERT INTO testtable (data) VALUES (:data) RETURNING '
- 'testtable.id', {'data': 'd2'}),
+ 'testtable.id', {'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
DialectSQL('INSERT INTO testtable (data) VALUES (:data)',
- [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
+ [{'id': 33, 'data': 'd7'}]),
DialectSQL('INSERT INTO testtable (data) VALUES (:data)',
- [{'data': 'd8'}]),
+ [{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
- )
- table.delete().execute()
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
+ conn.execute(table.delete())
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(self.engine)
+ m2 = MetaData(engine)
table = Table(table.name, m2, autoload=True)
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- r = table.insert().execute({'data': 'd2'})
- assert r.inserted_primary_key == [5]
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ r = conn.execute(table.insert(), {'data': 'd2'})
+ eq_(r.inserted_primary_key, [5])
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
DialectSQL('INSERT INTO testtable (data) VALUES (:data) RETURNING '
- 'testtable.id', {'data': 'd2'}),
+ 'testtable.id', {'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
DialectSQL('INSERT INTO testtable (data) VALUES (:data)',
- [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
- DialectSQL('INSERT INTO testtable (data) VALUES (:data)', [{'data': 'd8'}]),
- )
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (5, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (6, 'd5'),
- (7, 'd6'),
- (33, 'd7'),
- (8, 'd8'),
- ]
+ [{'id': 33, 'data': 'd7'}]),
+ DialectSQL(
+ 'INSERT INTO testtable (data) VALUES (:data)',
+ [{'data': 'd8'}]),
)
- table.delete().execute()
+
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (5, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (6, 'd5'),
+ (7, 'd6'),
+ (33, 'd7'),
+ (8, 'd8'),
+ ]
+ )
+ conn.execute(table.delete())
def _assert_data_with_sequence(self, table, seqname):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': False})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- table.insert().execute({'data': 'd2'})
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ conn.execute(table.insert(), {'data': 'd2'})
+ conn.execute(table.insert(),
+ {'id': 31, 'data': 'd3'},
+ {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(table.insert(inline=True),
+ {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
CursorSQL("select nextval('my_seq')"),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 1, 'data': 'd2'}),
+ {'id': 1, 'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd8'}]),
- )
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
+ [{'id': 33, 'data': 'd7'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd8'}]),
)
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
# cant test reflection here since the Sequence must be
# explicitly specified
def _assert_data_with_sequence_returning(self, table, seqname):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': True})
- metadata.bind = self.engine
- with self.sql_execution_asserter(self.engine) as asserter:
- table.insert().execute({'id': 30, 'data': 'd1'})
- table.insert().execute({'data': 'd2'})
- table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
- 'data': 'd4'})
- table.insert().execute({'data': 'd5'}, {'data': 'd6'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
- table.insert(inline=True).execute({'data': 'd8'})
+ with self.sql_execution_asserter(engine) as asserter:
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ conn.execute(table.insert(), {'data': 'd2'})
+ conn.execute(table.insert(),
+ {'id': 31, 'data': 'd3'},
+ {'id': 32, 'data': 'd4'})
+ conn.execute(table.insert(), {'data': 'd5'}, {'data': 'd6'})
+ conn.execute(
+ table.insert(inline=True), {'id': 33, 'data': 'd7'})
+ conn.execute(table.insert(inline=True), {'data': 'd8'})
asserter.assert_(
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- {'id': 30, 'data': 'd1'}),
+ {'id': 30, 'data': 'd1'}),
DialectSQL("INSERT INTO testtable (id, data) VALUES "
- "(nextval('my_seq'), :data) RETURNING testtable.id",
- {'data': 'd2'}),
+ "(nextval('my_seq'), :data) RETURNING testtable.id",
+ {'data': 'd2'}),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
DialectSQL('INSERT INTO testtable (id, data) VALUES (:id, :data)',
- [{'id': 33, 'data': 'd7'}]),
- DialectSQL("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
- ":data)" % seqname, [{'data': 'd8'}]),
+ [{'id': 33, 'data': 'd7'}]),
+ DialectSQL(
+ "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd8'}]),
)
- eq_(
- table.select().execute().fetchall(),
- [
- (30, 'd1'),
- (1, 'd2'),
- (31, 'd3'),
- (32, 'd4'),
- (2, 'd5'),
- (3, 'd6'),
- (33, 'd7'),
- (4, 'd8'),
- ]
- )
+ with engine.connect() as conn:
+ eq_(
+ conn.execute(table.select()).fetchall(),
+ [
+ (30, 'd1'),
+ (1, 'd2'),
+ (31, 'd3'),
+ (32, 'd4'),
+ (2, 'd5'),
+ (3, 'd6'),
+ (33, 'd7'),
+ (4, 'd8'),
+ ]
+ )
- # cant test reflection here since the Sequence must be
- # explicitly specified
+ # cant test reflection here since the Sequence must be
+ # explicitly specified
def _assert_data_noautoincrement(self, table):
- self.engine = \
+ engine = \
engines.testing_engine(options={'implicit_returning': False})
- metadata.bind = self.engine
- table.insert().execute({'id': 30, 'data': 'd1'})
- if self.engine.driver == 'pg8000':
- exception_cls = exc.ProgrammingError
- elif self.engine.driver == 'pypostgresql':
- exception_cls = Exception
- else:
- exception_cls = exc.IntegrityError
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'},
- {'data': 'd3'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'},
- {'data': 'd3'})
- table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
- 'data': 'd3'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
- assert table.select().execute().fetchall() == [
- (30, 'd1'),
- (31, 'd2'),
- (32, 'd3'),
- (33, 'd4')]
- table.delete().execute()
+
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'},
+ {'data': 'd3'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'},
+ {'data': 'd3'})
+
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd2'}, {'id': 32, 'data': 'd3'})
+ conn.execute(table.insert(inline=True), {'id': 33, 'data': 'd4'})
+ eq_(conn.execute(table.select()).fetchall(), [
+ (30, 'd1'),
+ (31, 'd2'),
+ (32, 'd3'),
+ (33, 'd4')])
+ conn.execute(table.delete())
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(self.engine)
+ m2 = MetaData(engine)
table = Table(table.name, m2, autoload=True)
- table.insert().execute({'id': 30, 'data': 'd1'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'})
- assert_raises_message(exception_cls,
- 'violates not-null constraint',
- table.insert().execute, {'data': 'd2'},
- {'data': 'd3'})
- table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
- 'data': 'd3'})
- table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
- assert table.select().execute().fetchall() == [
- (30, 'd1'),
- (31, 'd2'),
- (32, 'd3'),
- (33, 'd4')]
+ with engine.connect() as conn:
+ conn.execute(table.insert(), {'id': 30, 'data': 'd1'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'})
+ assert_raises_message(
+ exc.CompileError,
+ ".*has no Python-side or server-side default.*",
+ conn.execute, table.insert(), {'data': 'd2'},
+ {'data': 'd3'})
+ conn.execute(
+ table.insert(),
+ {'id': 31, 'data': 'd2'}, {'id': 32, 'data': 'd3'})
+ conn.execute(table.insert(inline=True), {'id': 33, 'data': 'd4'})
+ eq_(conn.execute(table.select()).fetchall(), [
+ (30, 'd1'),
+ (31, 'd2'),
+ (32, 'd3'),
+ (33, 'd4')])
class ServerSideCursorsTest(fixtures.TestBase, AssertsExecutionResults):
@@ -837,6 +870,19 @@ class ExtractTest(fixtures.TablesTest):
run_deletes = None
@classmethod
+ def setup_bind(cls):
+ from sqlalchemy import event
+ eng = engines.testing_engine()
+
+ @event.listens_for(eng, "connect")
+ def connect(dbapi_conn, rec):
+ cursor = dbapi_conn.cursor()
+ cursor.execute("SET SESSION TIME ZONE 0")
+ cursor.close()
+
+ return eng
+
+ @classmethod
def define_tables(cls, metadata):
Table('t', metadata,
Column('id', Integer, primary_key=True),
@@ -856,23 +902,17 @@ class ExtractTest(fixtures.TablesTest):
def utcoffset(self, dt):
return datetime.timedelta(hours=4)
- with testing.db.connect() as conn:
-
- # we aren't resetting this at the moment but we don't have
- # any other tests that are TZ specific
- conn.execute("SET SESSION TIME ZONE 0")
- conn.execute(
- cls.tables.t.insert(),
- {
- 'dtme': datetime.datetime(2012, 5, 10, 12, 15, 25),
- 'dt': datetime.date(2012, 5, 10),
- 'tm': datetime.time(12, 15, 25),
- 'intv': datetime.timedelta(seconds=570),
- 'dttz':
- datetime.datetime(2012, 5, 10, 12, 15, 25,
- tzinfo=TZ())
- },
- )
+ cls.bind.execute(
+ cls.tables.t.insert(),
+ {
+ 'dtme': datetime.datetime(2012, 5, 10, 12, 15, 25),
+ 'dt': datetime.date(2012, 5, 10),
+ 'tm': datetime.time(12, 15, 25),
+ 'intv': datetime.timedelta(seconds=570),
+ 'dttz': datetime.datetime(2012, 5, 10, 12, 15, 25,
+ tzinfo=TZ())
+ },
+ )
def _test(self, expr, field="all", overrides=None):
t = self.tables.t
@@ -898,7 +938,7 @@ class ExtractTest(fixtures.TablesTest):
fields.update(overrides)
for field in fields:
- result = testing.db.scalar(
+ result = self.bind.scalar(
select([extract(field, expr)]).select_from(t))
eq_(result, fields[field])
@@ -912,9 +952,9 @@ class ExtractTest(fixtures.TablesTest):
overrides={"epoch": 1336652695.0, "minute": 24})
def test_three(self):
- t = self.tables.t
+ self.tables.t
- actual_ts = testing.db.scalar(func.current_timestamp()) - \
+ actual_ts = self.bind.scalar(func.current_timestamp()) - \
datetime.timedelta(days=5)
self._test(func.current_timestamp() - datetime.timedelta(days=5),
{"hour": actual_ts.hour, "year": actual_ts.year,
@@ -963,7 +1003,7 @@ class ExtractTest(fixtures.TablesTest):
def test_twelve(self):
t = self.tables.t
- actual_ts = testing.db.scalar(
+ actual_ts = self.bind.scalar(
func.current_timestamp()).replace(tzinfo=None) - \
datetime.datetime(2012, 5, 10, 12, 15, 25)
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index 0354fa436..851facd2a 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -13,6 +13,7 @@ from sqlalchemy import exc
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import base as postgresql
from sqlalchemy.dialects.postgresql import ARRAY
+import re
class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
@@ -130,6 +131,15 @@ class MaterializedViewReflectionTest(
insp = inspect(testing.db)
eq_(set(insp.get_view_names()), set(['test_mview', 'test_regview']))
+ def test_get_view_definition(self):
+ insp = inspect(testing.db)
+ eq_(
+ re.sub(
+ r'[\n\t ]+', ' ',
+ insp.get_view_definition("test_mview").strip()),
+ "SELECT testtable.id, testtable.data FROM testtable;"
+ )
+
class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
"""Test PostgreSQL domains"""
@@ -673,6 +683,7 @@ class ReflectionTest(fixtures.TestBase):
eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
conn.close()
+ @testing.fails_if("postgresql < 8.2", "reloptions not supported")
@testing.provide_metadata
def test_index_reflection_with_storage_options(self):
"""reflect indexes with storage options set"""
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index fac0f2df8..49a8cfabd 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -7,11 +7,11 @@ from sqlalchemy import testing
import datetime
from sqlalchemy import Table, MetaData, Column, Integer, Enum, Float, select, \
func, DateTime, Numeric, exc, String, cast, REAL, TypeDecorator, Unicode, \
- Text, null, text
+ Text, null, text, column, Array, any_, all_
from sqlalchemy.sql import operators
from sqlalchemy import types
import sqlalchemy as sa
-from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \
INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE, \
JSON, JSONB
@@ -20,6 +20,8 @@ from sqlalchemy import util
from sqlalchemy.testing.util import round_decimal
from sqlalchemy import inspect
from sqlalchemy import event
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import Session
tztable = notztable = metadata = table = None
@@ -497,6 +499,34 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
finally:
metadata.drop_all()
+ @testing.provide_metadata
+ def test_custom_subclass(self):
+ class MyEnum(TypeDecorator):
+ impl = Enum('oneHI', 'twoHI', 'threeHI', name='myenum')
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value += "HI"
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value += "THERE"
+ return value
+
+ t1 = Table(
+ 'table1', self.metadata,
+ Column('data', MyEnum())
+ )
+ self.metadata.create_all(testing.db)
+
+ with testing.db.connect() as conn:
+ conn.execute(t1.insert(), {"data": "two"})
+ eq_(
+ conn.scalar(select([t1.c.data])),
+ "twoHITHERE"
+ )
+
class OIDTest(fixtures.TestBase):
__only_on__ = 'postgresql'
@@ -559,6 +589,14 @@ class NumericInterpretationTest(fixtures.TestBase):
)
+class PythonTypeTest(fixtures.TestBase):
+ def test_interval(self):
+ is_(
+ postgresql.INTERVAL().python_type,
+ datetime.timedelta
+ )
+
+
class TimezoneTest(fixtures.TestBase):
__backend__ = True
@@ -698,7 +736,178 @@ class TimePrecisionTest(fixtures.TestBase, AssertsCompiledSQL):
eq_(t2.c.c6.type.timezone, True)
-class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
+class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
+ __dialect__ = 'postgresql'
+
+ def test_array_int_index(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col[3]]),
+ "SELECT x[%(x_1)s] AS anon_1",
+ checkparams={'x_1': 3}
+ )
+
+ def test_array_any(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.any(7, operator=operators.lt)]),
+ "SELECT %(param_1)s < ANY (x) AS anon_1",
+ checkparams={'param_1': 7}
+ )
+
+ def test_array_all(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.all(7, operator=operators.lt)]),
+ "SELECT %(param_1)s < ALL (x) AS anon_1",
+ checkparams={'param_1': 7}
+ )
+
+ def test_array_contains(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.contains(array([4, 5, 6]))]),
+ "SELECT x @> ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_contained_by(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.contained_by(array([4, 5, 6]))]),
+ "SELECT x <@ ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_overlap(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.overlap(array([4, 5, 6]))]),
+ "SELECT x && ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_slice_index(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col[5:10]]),
+ "SELECT x[%(x_1)s:%(x_2)s] AS anon_1",
+ checkparams={'x_2': 10, 'x_1': 5}
+ )
+
+ def test_array_dim_index(self):
+ col = column('x', postgresql.ARRAY(Integer, dimensions=2))
+ self.assert_compile(
+ select([col[3][5]]),
+ "SELECT x[%(x_1)s][%(param_1)s] AS anon_1",
+ checkparams={'x_1': 3, 'param_1': 5}
+ )
+
+ def test_array_concat(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ literal = array([4, 5])
+
+ self.assert_compile(
+ select([col + literal]),
+ "SELECT x || ARRAY[%(param_1)s, %(param_2)s] AS anon_1",
+ checkparams={'param_1': 4, 'param_2': 5}
+ )
+
+ def test_array_index_map_dimensions(self):
+ col = column('x', postgresql.ARRAY(Integer, dimensions=3))
+ is_(
+ col[5].type._type_affinity, Array
+ )
+ assert isinstance(
+ col[5].type, postgresql.ARRAY
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, Array
+ )
+ assert isinstance(
+ col[5][6].type, postgresql.ARRAY
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_array_getitem_single_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', postgresql.ARRAY(Integer)),
+ Column('strarr', postgresql.ARRAY(String)),
+ )
+ is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+ is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+ def test_array_getitem_slice_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', postgresql.ARRAY(Integer)),
+ Column('strarr', postgresql.ARRAY(String)),
+ )
+
+ # type affinity is Array...
+ is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+
+ # but the slice returns the actual type
+ assert isinstance(arrtable.c.intarr[1:3].type, postgresql.ARRAY)
+ assert isinstance(arrtable.c.strarr[1:3].type, postgresql.ARRAY)
+
+ def test_array_functions_plus_getitem(self):
+ """test parenthesizing of functions plus indexing, which seems
+ to be required by Postgresql.
+
+ """
+ stmt = select([
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[2:5]
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT (array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))"
+ "[%(param_7)s:%(param_8)s] AS anon_1"
+ )
+
+ self.assert_compile(
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[3],
+ "(array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))[%(param_7)s]"
+ )
+
+ def test_array_agg_generic(self):
+ expr = func.array_agg(column('q', Integer))
+ is_(expr.type.__class__, types.Array)
+ is_(expr.type.item_type.__class__, Integer)
+
+ def test_array_agg_specific(self):
+ from sqlalchemy.dialects.postgresql import array_agg
+ expr = array_agg(column('q', Integer))
+ is_(expr.type.__class__, postgresql.ARRAY)
+ is_(expr.type.item_type.__class__, Integer)
+
+
+class ArrayRoundTripTest(fixtures.TablesTest, AssertsExecutionResults):
__only_on__ = 'postgresql'
__backend__ = True
@@ -754,6 +963,89 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
assert isinstance(tbl.c.intarr.type.item_type, Integer)
assert isinstance(tbl.c.strarr.type.item_type, String)
+ @testing.provide_metadata
+ def test_array_agg(self):
+ values_table = Table('values', self.metadata, Column('value', Integer))
+ self.metadata.create_all(testing.db)
+ testing.db.execute(
+ values_table.insert(),
+ [{'value': i} for i in range(1, 10)]
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ list(range(1, 10))
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)[3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ 3
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)[2:4]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3, 4]
+ )
+
+ def test_array_index_slice_exprs(self):
+ """test a variety of expressions that sometimes need parenthesizing"""
+
+ stmt = select([array([1, 2, 3, 4])[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3]
+ )
+
+ stmt = select([array([1, 2, 3, 4])[2]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ 2
+ )
+
+ stmt = select([(array([1, 2]) + array([3, 4]))[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3]
+ )
+
+ stmt = select([array([1, 2]) + array([3, 4])[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [1, 2, 4]
+ )
+
+ stmt = select([array([1, 2])[2:3] + array([3, 4])])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3, 4]
+ )
+
+ stmt = select([
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[2:5]
+ ])
+ eq_(
+ testing.db.execute(stmt).scalar(), [2, 3, 4, 5]
+ )
+
+ def test_any_all_exprs(self):
+ stmt = select([
+ 3 == any_(func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ ))
+ ])
+ eq_(
+ testing.db.execute(stmt).scalar(), True
+ )
+
def test_insert_array(self):
arrtable = self.tables.arrtable
arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
@@ -828,16 +1120,6 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
), True
)
- def test_array_getitem_single_type(self):
- arrtable = self.tables.arrtable
- is_(arrtable.c.intarr[1].type._type_affinity, Integer)
- is_(arrtable.c.strarr[1].type._type_affinity, String)
-
- def test_array_getitem_slice_type(self):
- arrtable = self.tables.arrtable
- is_(arrtable.c.intarr[1:3].type._type_affinity, postgresql.ARRAY)
- is_(arrtable.c.strarr[1:3].type._type_affinity, postgresql.ARRAY)
-
def test_array_getitem_single_exec(self):
arrtable = self.tables.arrtable
self._fixture_456(arrtable)
@@ -926,6 +1208,14 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
lambda elem: (
x for x in elem))
+ def test_multi_dim_roundtrip(self):
+ arrtable = self.tables.arrtable
+ testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4, 5, 6]])
+ eq_(
+ testing.db.scalar(select([arrtable.c.dimarr])),
+ [[-1, 0, 1], [2, 3, 4]]
+ )
+
def test_array_contained_by_exec(self):
arrtable = self.tables.arrtable
with testing.db.connect() as conn:
@@ -1030,12 +1320,98 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
set([('1', '2', '3'), ('4', '5', '6'), (('4', '5'), ('6', '7'))])
)
- def test_dimension(self):
- arrtable = self.tables.arrtable
- testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4, 5, 6]])
+ def test_array_plus_native_enum_create(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column(
+ 'data_1',
+ postgresql.ARRAY(
+ postgresql.ENUM('a', 'b', 'c', name='my_enum_1')
+ )
+ ),
+ Column(
+ 'data_2',
+ postgresql.ARRAY(
+ types.Enum('a', 'b', 'c', name='my_enum_2')
+ )
+ )
+ )
+
+ t.create(testing.db)
eq_(
- testing.db.scalar(select([arrtable.c.dimarr])),
- [[-1, 0, 1], [2, 3, 4]]
+ set(e['name'] for e in inspect(testing.db).get_enums()),
+ set(['my_enum_1', 'my_enum_2'])
+ )
+ t.drop(testing.db)
+ eq_(inspect(testing.db).get_enums(), [])
+
+
+class HashableFlagORMTest(fixtures.TestBase):
+ """test the various 'collection' types that they flip the 'hashable' flag
+ appropriately. [ticket:3499]"""
+
+ __only_on__ = 'postgresql'
+
+ def _test(self, type_, data):
+ Base = declarative_base(metadata=self.metadata)
+
+ class A(Base):
+ __tablename__ = 'a1'
+ id = Column(Integer, primary_key=True)
+ data = Column(type_)
+ Base.metadata.create_all(testing.db)
+ s = Session(testing.db)
+ s.add_all([
+ A(data=elem) for elem in data
+ ])
+ s.commit()
+
+ eq_(
+ [(obj.A.id, obj.data) for obj in
+ s.query(A, A.data).order_by(A.id)],
+ list(enumerate(data, 1))
+ )
+
+ @testing.provide_metadata
+ def test_array(self):
+ self._test(
+ postgresql.ARRAY(Text()),
+ [['a', 'b', 'c'], ['d', 'e', 'f']]
+ )
+
+ @testing.requires.hstore
+ @testing.provide_metadata
+ def test_hstore(self):
+ self._test(
+ postgresql.HSTORE(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': '5', 'f': '6'}
+ ]
+ )
+
+ @testing.provide_metadata
+ def test_json(self):
+ self._test(
+ postgresql.JSON(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': {'e1': '5', 'e2': '6'},
+ 'f': {'f1': [9, 10, 11]}}
+ ]
+ )
+
+ @testing.requires.postgresql_jsonb
+ @testing.provide_metadata
+ def test_jsonb(self):
+ self._test(
+ postgresql.JSONB(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': {'e1': '5', 'e2': '6'},
+ 'f': {'f1': [9, 10, 11]}}
+ ]
)
@@ -1051,6 +1427,16 @@ class TimestampTest(fixtures.TestBase, AssertsExecutionResults):
result = connection.execute(s).first()
eq_(result[0], datetime.datetime(2007, 12, 25, 0, 0))
+ def test_interval_arithmetic(self):
+ # basically testing that we get timedelta back for an INTERVAL
+ # result. more of a driver assertion.
+ engine = testing.db
+ connection = engine.connect()
+
+ s = select([text("timestamp '2007-12-25' - timestamp '2007-11-15'")])
+ result = connection.execute(s).first()
+ eq_(result[0], datetime.timedelta(40))
+
class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
@@ -1372,6 +1758,19 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
{"key1": "value1", "key2": "value2"}
)
+ def test_ret_type_text(self):
+ col = column('x', HSTORE())
+
+ is_(col['foo'].type.__class__, Text)
+
+ def test_ret_type_custom(self):
+ class MyType(types.UserDefinedType):
+ pass
+
+ col = column('x', HSTORE(text_type=MyType))
+
+ is_(col['foo'].type.__class__, MyType)
+
def test_where_has_key(self):
self._test_where(
# hide from 2to3
@@ -1394,7 +1793,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_defined(self):
self._test_where(
self.hashcol.defined('foo'),
- "defined(test_table.hash, %(param_1)s)"
+ "defined(test_table.hash, %(defined_1)s)"
)
def test_where_contains(self):
@@ -1425,7 +1824,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_delete_single_key(self):
self._test_cols(
self.hashcol.delete('foo'),
- "delete(test_table.hash, %(param_1)s) AS delete_1",
+ "delete(test_table.hash, %(delete_2)s) AS delete_1",
True
)
@@ -1440,7 +1839,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_delete_matching_pairs(self):
self._test_cols(
self.hashcol.delete(hstore('1', '2')),
- ("delete(test_table.hash, hstore(%(param_1)s, %(param_2)s)) "
+ ("delete(test_table.hash, hstore(%(hstore_1)s, %(hstore_2)s)) "
"AS delete_1"),
True
)
@@ -1456,7 +1855,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_hstore_pair_text(self):
self._test_cols(
hstore('foo', '3')['foo'],
- "hstore(%(param_1)s, %(param_2)s) -> %(hstore_1)s AS anon_1",
+ "hstore(%(hstore_1)s, %(hstore_2)s) -> %(hstore_3)s AS anon_1",
False
)
@@ -1481,14 +1880,14 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
self._test_cols(
self.hashcol.concat(hstore(cast(self.test_table.c.id, Text), '3')),
("test_table.hash || hstore(CAST(test_table.id AS TEXT), "
- "%(param_1)s) AS anon_1"),
+ "%(hstore_1)s) AS anon_1"),
True
)
def test_cols_concat_op(self):
self._test_cols(
hstore('foo', 'bar') + self.hashcol,
- "hstore(%(param_1)s, %(param_2)s) || test_table.hash AS anon_1",
+ "hstore(%(hstore_1)s, %(hstore_2)s) || test_table.hash AS anon_1",
True
)
@@ -2093,19 +2492,59 @@ class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
"(test_table.test_column #> %(test_column_1)s) IS NULL"
)
+ def test_path_typing(self):
+ col = column('x', JSON())
+ is_(
+ col['q'].type._type_affinity, JSON
+ )
+ is_(
+ col[('q', )].type._type_affinity, JSON
+ )
+ is_(
+ col['q']['p'].type._type_affinity, JSON
+ )
+ is_(
+ col[('q', 'p')].type._type_affinity, JSON
+ )
+
+ def test_custom_astext_type(self):
+ class MyType(types.UserDefinedType):
+ pass
+
+ col = column('x', JSON(astext_type=MyType))
+
+ is_(
+ col['q'].astext.type.__class__, MyType
+ )
+
+ is_(
+ col[('q', 'p')].astext.type.__class__, MyType
+ )
+
+ is_(
+ col['q']['p'].astext.type.__class__, MyType
+ )
+
def test_where_getitem_as_text(self):
self._test_where(
self.jsoncol['bar'].astext == None,
"(test_table.test_column ->> %(test_column_1)s) IS NULL"
)
- def test_where_getitem_as_cast(self):
+ def test_where_getitem_astext_cast(self):
self._test_where(
- self.jsoncol['bar'].cast(Integer) == 5,
+ self.jsoncol['bar'].astext.cast(Integer) == 5,
"CAST(test_table.test_column ->> %(test_column_1)s AS INTEGER) "
"= %(param_1)s"
)
+ def test_where_getitem_json_cast(self):
+ self._test_where(
+ self.jsoncol['bar'].cast(Integer) == 5,
+ "CAST(test_table.test_column -> %(test_column_1)s AS INTEGER) "
+ "= %(param_1)s"
+ )
+
def test_where_path_as_text(self):
self._test_where(
self.jsoncol[("foo", 1)].astext == None,
@@ -2144,6 +2583,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
{'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}},
{'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}},
{'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2", "k3": 5}},
+ {'name': 'r6', 'data': {"k1": {"r6v1": {'subr': [1, 2, 3]}}}},
)
def _assert_data(self, compare, column='data'):
@@ -2164,6 +2604,15 @@ class JSONRoundTripTest(fixtures.TablesTest):
).fetchall()
eq_([d for d, in data], [None])
+ def _assert_column_is_JSON_NULL(self, column='data'):
+ col = self.tables.data_table.c[column]
+
+ data = testing.db.execute(
+ select([col]).
+ where(cast(col, String) == "null")
+ ).fetchall()
+ eq_([d for d, in data], [None])
+
def _test_insert(self, engine):
engine.execute(
self.tables.data_table.insert(),
@@ -2185,6 +2634,13 @@ class JSONRoundTripTest(fixtures.TablesTest):
)
self._assert_column_is_NULL(column='nulldata')
+ def _test_insert_nulljson_into_none_as_null(self, engine):
+ engine.execute(
+ self.tables.data_table.insert(),
+ {'name': 'r1', 'nulldata': JSON.NULL}
+ )
+ self._assert_column_is_JSON_NULL(column='nulldata')
+
def _non_native_engine(self, json_serializer=None, json_deserializer=None):
if json_serializer is not None or json_deserializer is not None:
options = {
@@ -2233,6 +2689,11 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._test_insert_none_as_null(engine)
+ @testing.requires.psycopg2_native_json
+ def test_insert_native_nulljson_into_none_as_null(self):
+ engine = testing.db
+ self._test_insert_nulljson_into_none_as_null(engine)
+
def test_insert_python(self):
engine = self._non_native_engine()
self._test_insert(engine)
@@ -2245,6 +2706,10 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_insert_none_as_null(engine)
+ def test_insert_python_nulljson_into_none_as_null(self):
+ engine = self._non_native_engine()
+ self._test_insert_nulljson_into_none_as_null(engine)
+
def _test_custom_serialize_deserialize(self, native):
import json
@@ -2309,12 +2774,28 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._fixture_data(engine)
data_table = self.tables.data_table
+
result = engine.execute(
- select([data_table.c.data]).where(
- data_table.c.data[('k1',)].astext == 'r3v1'
+ select([data_table.c.name]).where(
+ data_table.c.data[('k1', 'r6v1', 'subr')].astext == "[1, 2, 3]"
)
- ).first()
- eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
+ )
+ eq_(result.scalar(), 'r6')
+
+ @testing.fails_on(
+ "postgresql < 9.4",
+ "Improvement in Postgresql behavior?")
+ def test_multi_index_query(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ data_table = self.tables.data_table
+
+ result = engine.execute(
+ select([data_table.c.name]).where(
+ data_table.c.data['k1']['r6v1']['subr'].astext == "[1, 2, 3]"
+ )
+ )
+ eq_(result.scalar(), 'r6')
def test_query_returned_as_text(self):
engine = testing.db
@@ -2330,7 +2811,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
self._fixture_data(engine)
data_table = self.tables.data_table
result = engine.execute(
- select([data_table.c.data['k3'].cast(Integer)]).where(
+ select([data_table.c.data['k3'].astext.cast(Integer)]).where(
data_table.c.name == 'r5')
).first()
assert isinstance(result[0], int)
@@ -2398,6 +2879,36 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._test_unicode_round_trip(engine)
+ def test_eval_none_flag_orm(self):
+ Base = declarative_base()
+
+ class Data(Base):
+ __table__ = self.tables.data_table
+
+ s = Session(testing.db)
+
+ d1 = Data(name='d1', data=None, nulldata=None)
+ s.add(d1)
+ s.commit()
+
+ s.bulk_insert_mappings(
+ Data, [{"name": "d2", "data": None, "nulldata": None}]
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd1').first(),
+ ("null", None)
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd2').first(),
+ ("null", None)
+ )
+
class JSONBTest(JSONTest):
@@ -2444,7 +2955,6 @@ class JSONBTest(JSONTest):
class JSONBRoundTripTest(JSONRoundTripTest):
- __only_on__ = ('postgresql >= 9.4',)
__requires__ = ('postgresql_jsonb', )
test_type = JSONB
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index e080568cf..dd4a888ff 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_
from sqlalchemy import *
from sqlalchemy import types as sqltypes, exc, schema
from sqlalchemy.sql import table, column
+from sqlalchemy.sql.elements import quoted_name
from sqlalchemy.testing import fixtures, AssertsExecutionResults, AssertsCompiledSQL
from sqlalchemy import testing
from sqlalchemy.util import u, b
@@ -1859,6 +1860,31 @@ class TableReflectionTest(fixtures.TestBase):
tbl = Table('test_compress', m2, autoload=True)
assert tbl.dialect_options['oracle']['compress'] == "OLTP"
+ @testing.provide_metadata
+ def test_reflect_lowercase_forced_tables(self):
+ metadata = self.metadata
+
+ Table(
+ quoted_name('t1', quote=True), metadata,
+ Column('id', Integer, primary_key=True),
+ )
+ Table(
+ quoted_name('t2', quote=True), metadata,
+ Column('id', Integer, primary_key=True),
+ Column('t1id', ForeignKey('t1.id'))
+ )
+ metadata.create_all()
+
+ m2 = MetaData(testing.db)
+ t2_ref = Table(quoted_name('t2', quote=True), m2, autoload=True)
+ t1_ref = m2.tables['t1']
+ assert t2_ref.c.t1id.references(t1_ref.c.id)
+
+ m3 = MetaData(testing.db)
+ m3.reflect(only=lambda name, m: name.lower() in ('t1', 't2'))
+ assert m3.tables['t2'].c.t1id.references(m3.tables['t1'].c.id)
+
+
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = 'oracle'
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index 17920c127..33903ff89 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -20,7 +20,7 @@ from sqlalchemy.engine.url import make_url
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults, engines
from sqlalchemy import testing
-from sqlalchemy.schema import CreateTable
+from sqlalchemy.schema import CreateTable, FetchedValue
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.testing import mock
@@ -535,29 +535,12 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
assert e.pool.__class__ is pool.NullPool
-
-class AttachedMemoryDBTest(fixtures.TestBase):
+class AttachedDBTest(fixtures.TestBase):
__only_on__ = 'sqlite'
- dbname = None
-
- def setUp(self):
- self.conn = conn = testing.db.connect()
- if self.dbname is None:
- dbname = ':memory:'
- else:
- dbname = self.dbname
- conn.execute('ATTACH DATABASE "%s" AS test_schema' % dbname)
- self.metadata = MetaData()
-
- def tearDown(self):
- self.metadata.drop_all(self.conn)
- self.conn.execute('DETACH DATABASE test_schema')
- if self.dbname:
- os.remove(self.dbname)
-
def _fixture(self):
meta = self.metadata
+ self.conn = testing.db.connect()
ct = Table(
'created', meta,
Column('id', Integer),
@@ -567,6 +550,14 @@ class AttachedMemoryDBTest(fixtures.TestBase):
meta.create_all(self.conn)
return ct
+ def setup(self):
+ self.conn = testing.db.connect()
+ self.metadata = MetaData()
+
+ def teardown(self):
+ self.metadata.drop_all(self.conn)
+ self.conn.close()
+
def test_no_tables(self):
insp = inspect(self.conn)
eq_(insp.get_table_names("test_schema"), [])
@@ -581,6 +572,18 @@ class AttachedMemoryDBTest(fixtures.TestBase):
insp = inspect(self.conn)
eq_(insp.get_table_names("test_schema"), ["created"])
+ def test_schema_names(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_schema_names(), ["main", "test_schema"])
+
+ # implicitly creates a "temp" schema
+ self.conn.execute("select * from sqlite_temp_master")
+
+ # we're not including it
+ insp = inspect(self.conn)
+ eq_(insp.get_schema_names(), ["main", "test_schema"])
+
def test_reflect_system_table(self):
meta = MetaData(self.conn)
alt_master = Table(
@@ -633,10 +636,6 @@ class AttachedMemoryDBTest(fixtures.TestBase):
eq_(row['name'], 'foo')
-class AttachedFileDBTest(AttachedMemoryDBTest):
- dbname = 'attached_db.db'
-
-
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
"""Tests SQLite-dialect specific compilation."""
@@ -752,6 +751,17 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
"WHERE data > 'a' AND data < 'b''s'",
dialect=sqlite.dialect())
+ def test_no_autoinc_on_composite_pk(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column('x', Integer, primary_key=True, autoincrement=True),
+ Column('y', Integer, primary_key=True))
+ assert_raises_message(
+ exc.CompileError,
+ "SQLite does not support autoincrement for composite",
+ CreateTable(t).compile, dialect=sqlite.dialect()
+ )
class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@@ -782,23 +792,46 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk2(self):
+ # now raises CompileError due to [ticket:3216]
assert_raises(
- exc.DBAPIError, self._test_empty_insert,
+ exc.CompileError, self._test_empty_insert,
Table(
'b', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
- def test_empty_insert_pk3(self):
+ def test_empty_insert_pk2_fv(self):
assert_raises(
exc.DBAPIError, self._test_empty_insert,
Table(
+ 'b', MetaData(testing.db),
+ Column('x', Integer, primary_key=True,
+ server_default=FetchedValue()),
+ Column('y', Integer, primary_key=True,
+ server_default=FetchedValue())))
+
+ @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
+ def test_empty_insert_pk3(self):
+ # now raises CompileError due to [ticket:3216]
+ assert_raises(
+ exc.CompileError, self._test_empty_insert,
+ Table(
'c', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, DefaultClause('123'), primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
+ def test_empty_insert_pk3_fv(self):
+ assert_raises(
+ exc.DBAPIError, self._test_empty_insert,
+ Table(
+ 'c', MetaData(testing.db),
+ Column('x', Integer, primary_key=True,
+ server_default=FetchedValue()),
+ Column('y', Integer, DefaultClause('123'), primary_key=True)))
+
+ @testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk4(self):
self._test_empty_insert(
Table(
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 451cb8b0e..8551e1fcb 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -8,8 +8,9 @@ from sqlalchemy.testing import eq_, assert_raises, is_not_, is_
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing import fixtures
import random
-from sqlalchemy.testing.mock import Mock, call, patch
+from sqlalchemy.testing.mock import Mock, call, patch, ANY
import weakref
+import collections
join_timeout = 10
@@ -1480,6 +1481,98 @@ class QueuePoolTest(PoolTestBase):
time.sleep(1.5)
self._assert_cleanup_on_pooled_reconnect(dbapi, p)
+ def test_connect_handler_not_called_for_recycled(self):
+ """test [ticket:3497]"""
+
+ dbapi, p = self._queuepool_dbapi_fixture(
+ pool_size=2, max_overflow=2)
+
+ canary = Mock()
+
+ c1 = p.connect()
+ c2 = p.connect()
+
+ c1.close()
+ c2.close()
+
+ dbapi.shutdown(True)
+
+ bad = p.connect()
+ p._invalidate(bad)
+ bad.close()
+ assert p._invalidate_time
+
+ event.listen(p, "connect", canary.connect)
+ event.listen(p, "checkout", canary.checkout)
+
+ assert_raises(
+ Exception,
+ p.connect
+ )
+
+ p._pool.queue = collections.deque(
+ [
+ c for c in p._pool.queue
+ if c.connection is not None
+ ]
+ )
+
+ dbapi.shutdown(False)
+ c = p.connect()
+ c.close()
+
+ eq_(
+ canary.mock_calls,
+ [
+ call.connect(ANY, ANY),
+ call.checkout(ANY, ANY, ANY)
+ ]
+ )
+
+ def test_connect_checkout_handler_always_gets_info(self):
+ """test [ticket:3497]"""
+
+ dbapi, p = self._queuepool_dbapi_fixture(
+ pool_size=2, max_overflow=2)
+
+ c1 = p.connect()
+ c2 = p.connect()
+
+ c1.close()
+ c2.close()
+
+ dbapi.shutdown(True)
+
+ bad = p.connect()
+ p._invalidate(bad)
+ bad.close()
+ assert p._invalidate_time
+
+ @event.listens_for(p, "connect")
+ def connect(conn, conn_rec):
+ conn_rec.info['x'] = True
+
+ @event.listens_for(p, "checkout")
+ def checkout(conn, conn_rec, conn_f):
+ assert 'x' in conn_rec.info
+
+ assert_raises(
+ Exception,
+ p.connect
+ )
+
+ p._pool.queue = collections.deque(
+ [
+ c for c in p._pool.queue
+ if c.connection is not None
+ ]
+ )
+
+ dbapi.shutdown(False)
+ c = p.connect()
+ c.close()
+
+
def test_error_on_pooled_reconnect_cleanup_wcheckout_event(self):
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1,
max_overflow=2)
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index 83650609d..b7bf87d63 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -311,22 +311,22 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Don't mark this test as unsupported for any backend !
- (technically it fails with MySQL InnoDB since "id" comes before "id2")
-
"""
meta = self.metadata
- Table('test', meta,
+ Table(
+ 'test', meta,
Column('id', sa.Integer, primary_key=True),
Column('data', sa.String(50)),
- mysql_engine='MyISAM'
+ mysql_engine='InnoDB'
)
- Table('test2', meta,
- Column('id', sa.Integer, sa.ForeignKey('test.id'),
- primary_key=True),
+ Table(
+ 'test2', meta,
+ Column(
+ 'id', sa.Integer, sa.ForeignKey('test.id'), primary_key=True),
Column('id2', sa.Integer, primary_key=True),
Column('data', sa.String(50)),
- mysql_engine='MyISAM'
+ mysql_engine='InnoDB'
)
meta.create_all()
m2 = MetaData(testing.db)
@@ -334,7 +334,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
assert t1a._autoincrement_column is t1a.c.id
t2a = Table('test2', m2, autoload=True)
- assert t2a._autoincrement_column is t2a.c.id2
+ assert t2a._autoincrement_column is None
+
@skip('sqlite')
@testing.provide_metadata
diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py
index ab0de801c..ae1a85f8b 100644
--- a/test/ext/declarative/test_basic.py
+++ b/test/ext/declarative/test_basic.py
@@ -102,6 +102,29 @@ class DeclarativeTest(DeclarativeTestBase):
assert User.addresses.property.mapper.class_ is Address
+ def test_unicode_string_resolve_backref(self):
+ class User(Base, fixtures.ComparableEntity):
+ __tablename__ = 'users'
+
+ id = Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ name = Column('name', String(50))
+
+ class Address(Base, fixtures.ComparableEntity):
+ __tablename__ = 'addresses'
+
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ email = Column(String(50), key='_email')
+ user_id = Column('user_id', Integer, ForeignKey('users.id'),
+ key='_user_id')
+ user = relationship(
+ User,
+ backref=backref("addresses",
+ order_by=util.u("Address.email")))
+
+ assert Address.user.property.mapper.class_ is User
+
def test_no_table(self):
def go():
class User(Base):
@@ -1570,8 +1593,7 @@ class DeclarativeTest(DeclarativeTestBase):
meta = MetaData(testing.db)
t1 = Table(
't1', meta,
- Column('id', String(50),
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', String(50), primary_key=True),
Column('data', String(50)))
meta.create_all()
try:
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index 3e6980190..274a6aa28 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -1453,3 +1453,33 @@ class ConcreteExtensionConfigTest(
"FROM actual_documents) AS pjoin"
)
+ def test_column_attr_names(self):
+ """test #3480"""
+
+ class Document(Base, AbstractConcreteBase):
+ documentType = Column('documenttype', String)
+
+ class Offer(Document):
+ __tablename__ = 'offers'
+
+ id = Column(Integer, primary_key=True)
+ __mapper_args__ = {
+ 'polymorphic_identity': 'offer'
+ }
+
+ configure_mappers()
+ session = Session()
+ self.assert_compile(
+ session.query(Document),
+ "SELECT pjoin.documenttype AS pjoin_documenttype, "
+ "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type FROM "
+ "(SELECT offers.documenttype AS documenttype, offers.id AS id, "
+ "'offer' AS type FROM offers) AS pjoin"
+ )
+
+ self.assert_compile(
+ session.query(Document.documentType),
+ "SELECT pjoin.documenttype AS pjoin_documenttype FROM "
+ "(SELECT offers.documenttype AS documenttype, offers.id AS id, "
+ "'offer' AS type FROM offers) AS pjoin"
+ )
diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py
index b9e40421c..1f9fa1dfa 100644
--- a/test/ext/declarative/test_mixin.py
+++ b/test/ext/declarative/test_mixin.py
@@ -1441,7 +1441,7 @@ class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
"SELECT b.x AS b_x, b.x + :x_1 AS anon_1, b.id AS b_id FROM b"
)
-
+ @testing.requires.predictable_gc
def test_singleton_gc(self):
counter = mock.Mock()
diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py
index 8fb335b06..98e40b11e 100644
--- a/test/ext/test_associationproxy.py
+++ b/test/ext/test_associationproxy.py
@@ -1593,3 +1593,23 @@ class DictOfTupleUpdateTest(fixtures.TestBase):
a1.elements.update,
(("B", 3), 'elem2'), (("C", 4), "elem3")
)
+
+
+class InfoTest(fixtures.TestBase):
+ def test_constructor(self):
+ assoc = association_proxy('a', 'b', info={'some_assoc': 'some_value'})
+ eq_(assoc.info, {"some_assoc": "some_value"})
+
+ def test_empty(self):
+ assoc = association_proxy('a', 'b')
+ eq_(assoc.info, {})
+
+ def test_via_cls(self):
+ class Foob(object):
+ assoc = association_proxy('a', 'b')
+
+ eq_(Foob.assoc.info, {})
+
+ Foob.assoc.info["foo"] = 'bar'
+
+ eq_(Foob.assoc.info, {'foo': 'bar'})
diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py
index 78c43fc7e..8bfa58403 100644
--- a/test/ext/test_baked.py
+++ b/test/ext/test_baked.py
@@ -1,6 +1,7 @@
from sqlalchemy.orm import Session, subqueryload, \
mapper, relationship, lazyload, clear_mappers
-from sqlalchemy.testing import eq_, is_, is_not_, assert_raises
+from sqlalchemy.testing import eq_, is_, is_not_
+from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import testing
from test.orm import _fixtures
from sqlalchemy.ext.baked import BakedQuery, baked_lazyload, BakedLazyLoader
@@ -151,25 +152,68 @@ class LikeQueryTest(BakedTest):
(8, )
)
+ def test_one_or_none_no_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'asdf')
+
+ eq_(
+ bq(Session()).one_or_none(),
+ None
+ )
+
+ def test_one_or_none_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'ed')
+
+ u1 = bq(Session()).one_or_none()
+ eq_(u1.name, 'ed')
+
+ def test_one_or_none_multiple_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+
+ assert_raises_message(
+ orm_exc.MultipleResultsFound,
+ "Multiple rows were found for one_or_none()",
+ bq(Session()).one_or_none
+ )
+
def test_one_no_result(self):
User = self.classes.User
bq = self.bakery(lambda s: s.query(User))
bq += lambda q: q.filter(User.name == 'asdf')
- assert_raises(
+ assert_raises_message(
orm_exc.NoResultFound,
+ "No row was found for one()",
bq(Session()).one
)
+ def test_one_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'ed')
+
+ u1 = bq(Session()).one()
+ eq_(u1.name, 'ed')
+
def test_one_multiple_result(self):
User = self.classes.User
bq = self.bakery(lambda s: s.query(User))
bq += lambda q: q.filter(User.name.like('%ed%'))
- assert_raises(
+ assert_raises_message(
orm_exc.MultipleResultsFound,
+ "Multiple rows were found for one()",
bq(Session()).one
)
@@ -227,6 +271,32 @@ class LikeQueryTest(BakedTest):
eq_(u2.name, 'chuck')
self.assert_sql_count(testing.db, go, 0)
+ def test_get_includes_getclause(self):
+ # test issue #3597
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+
+ for i in range(5):
+ sess = Session()
+ u1 = bq(sess).get(7)
+ eq_(u1.name, 'jack')
+ sess.close()
+
+ eq_(len(bq._bakery), 2)
+
+ # simulate race where mapper._get_clause
+ # may be generated more than once
+ from sqlalchemy import inspect
+ del inspect(User).__dict__['_get_clause']
+
+ for i in range(5):
+ sess = Session()
+ u1 = bq(sess).get(7)
+ eq_(u1.name, 'jack')
+ sess.close()
+ eq_(len(bq._bakery), 4)
+
class ResultTest(BakedTest):
__backend__ = True
@@ -552,14 +622,14 @@ class ResultTest(BakedTest):
class LazyLoaderTest(BakedTest):
run_setup_mappers = 'each'
- def _o2m_fixture(self, lazy="select"):
+ def _o2m_fixture(self, lazy="select", **kw):
User = self.classes.User
Address = self.classes.Address
mapper(User, self.tables.users, properties={
'addresses': relationship(
Address, order_by=self.tables.addresses.c.id,
- lazy=lazy)
+ lazy=lazy, **kw)
})
mapper(Address, self.tables.addresses)
return User, Address
@@ -647,6 +717,24 @@ class LazyLoaderTest(BakedTest):
u1._sa_instance_state
)
+ def test_systemwide_loaders_loadable_via_lazyloader(self):
+ from sqlalchemy.orm import configure_mappers
+ from sqlalchemy.orm.strategies import LazyLoader
+
+ baked.bake_lazy_loaders()
+ try:
+ User, Address = self._o2m_fixture(lazy='joined')
+
+ configure_mappers()
+
+ is_(
+ User.addresses.property.
+ _get_strategy_by_cls(LazyLoader).__class__,
+ BakedLazyLoader
+ )
+ finally:
+ baked.unbake_lazy_loaders()
+
def test_invocation_systemwide_loaders(self):
baked.bake_lazy_loaders()
try:
@@ -676,6 +764,50 @@ class LazyLoaderTest(BakedTest):
# not invoked
eq_(el.mock_calls, [])
+ def test_baked_lazy_loading_relationship_flag_true(self):
+ self._test_baked_lazy_loading_relationship_flag(True)
+
+ def test_baked_lazy_loading_relationship_flag_false(self):
+ self._test_baked_lazy_loading_relationship_flag(False)
+
+ def _test_baked_lazy_loading_relationship_flag(self, flag):
+ baked.bake_lazy_loaders()
+ try:
+ User, Address = self._o2m_fixture(bake_queries=flag)
+
+ sess = Session()
+ u1 = sess.query(User).first()
+
+ from sqlalchemy.orm import Query
+
+ canary = mock.Mock()
+
+ # I would think Mock can do this but apparently
+ # it cannot (wrap / autospec don't work together)
+ real_compile_context = Query._compile_context
+
+ def _my_compile_context(*arg, **kw):
+ if arg[0].column_descriptions[0]['entity'] is Address:
+ canary()
+ return real_compile_context(*arg, **kw)
+
+ with mock.patch.object(
+ Query,
+ "_compile_context",
+ _my_compile_context
+ ):
+ u1.addresses
+
+ sess.expire(u1)
+ u1.addresses
+ finally:
+ baked.unbake_lazy_loaders()
+
+ if flag:
+ eq_(canary.call_count, 1)
+ else:
+ eq_(canary.call_count, 2)
+
def test_baked_lazy_loading_option_o2m(self):
User, Address = self._o2m_fixture()
self._test_baked_lazy_loading(set_option=True)
diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py
index a6bcdc47f..ed97a0d92 100644
--- a/test/ext/test_mutable.py
+++ b/test/ext/test_mutable.py
@@ -136,6 +136,38 @@ class _MutableDictTestBase(_MutableDictTestFixture):
eq_(f1.data, {'a': 'z'})
+ def test_pop(self):
+ sess = Session()
+
+ f1 = Foo(data={'a': 'b', 'c': 'd'})
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop('a'), 'b')
+ sess.commit()
+
+ eq_(f1.data, {'c': 'd'})
+
+ def test_popitem(self):
+ sess = Session()
+
+ orig = {'a': 'b', 'c': 'd'}
+
+ # the orig dict remains unchanged when we assign,
+ # but just making this future-proof
+ data = dict(orig)
+ f1 = Foo(data=data)
+ sess.add(f1)
+ sess.commit()
+
+ k, v = f1.data.popitem()
+ assert k in ('a', 'c')
+ orig.pop(k)
+
+ sess.commit()
+
+ eq_(f1.data, orig)
+
def test_setdefault(self):
sess = Session()
diff --git a/test/orm/inheritance/test_poly_persistence.py b/test/orm/inheritance/test_poly_persistence.py
index c6a54c0b5..361377de8 100644
--- a/test/orm/inheritance/test_poly_persistence.py
+++ b/test/orm/inheritance/test_poly_persistence.py
@@ -1,6 +1,6 @@
"""tests basic polymorphic mapper loading/saving, minimal relationships"""
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, is_, assert_raises, assert_raises_message
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.orm import exc as orm_exc
@@ -271,18 +271,30 @@ def _generate_round_trip_test(include_base, lazy_relationship,
# into the "person_join" conversion.
palias = people.alias("palias")
dilbert = session.query(Person).get(dilbert.person_id)
- assert dilbert is session.query(Person).filter(
- (palias.c.name=='dilbert') & \
- (palias.c.person_id==Person.person_id)).first()
- assert dilbert is session.query(Engineer).filter(
- (palias.c.name=='dilbert') & \
- (palias.c.person_id==Person.person_id)).first()
- assert dilbert is session.query(Person).filter(
- (Engineer.engineer_name=="engineer1") & \
- (engineers.c.person_id==people.c.person_id)
- ).first()
- assert dilbert is session.query(Engineer).\
- filter(Engineer.engineer_name=="engineer1")[0]
+ is_(
+ dilbert,
+ session.query(Person).filter(
+ (palias.c.name == 'dilbert') &
+ (palias.c.person_id == Person.person_id)).first()
+ )
+ is_(
+ dilbert,
+ session.query(Engineer).filter(
+ (palias.c.name == 'dilbert') &
+ (palias.c.person_id == Person.person_id)).first()
+ )
+ is_(
+ dilbert,
+ session.query(Person).filter(
+ (Engineer.engineer_name == "engineer1") &
+ (engineers.c.person_id == people.c.person_id)
+ ).first()
+ )
+ is_(
+ dilbert,
+ session.query(Engineer).
+ filter(Engineer.engineer_name == "engineer1")[0]
+ )
session.flush()
session.expunge_all()
diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py
index b1d99415d..e75d974d4 100644
--- a/test/orm/inheritance/test_relationship.py
+++ b/test/orm/inheritance/test_relationship.py
@@ -1,6 +1,6 @@
from sqlalchemy.orm import create_session, relationship, mapper, \
contains_eager, joinedload, subqueryload, subqueryload_all,\
- Session, aliased, with_polymorphic
+ Session, aliased, with_polymorphic, joinedload_all
from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.engine import default
@@ -1360,6 +1360,216 @@ class SubClassToSubClassMultiTest(AssertsCompiledSQL, fixtures.MappedTest):
"JOIN ep2 ON anon_1.base2_id = ep2.base2_id"
)
+
+class JoinedloadOverWPolyAliased(
+ fixtures.DeclarativeMappedTest,
+ testing.AssertsCompiledSQL):
+ """exercise issues in #3593 and #3611"""
+
+ run_setup_mappers = 'each'
+ run_setup_classes = 'each'
+ run_define_tables = 'each'
+ __dialect__ = 'default'
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Owner(Base):
+ __tablename__ = 'owner'
+
+ id = Column(Integer, primary_key=True)
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'with_polymorphic': ('*', None),
+ }
+
+ class SubOwner(Owner):
+ __mapper_args__ = {'polymorphic_identity': 'so'}
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'with_polymorphic': ('*', None),
+ }
+
+ class Sub1(Parent):
+ __mapper_args__ = {'polymorphic_identity': 's1'}
+
+ class Link(Base):
+ __tablename__ = 'link'
+
+ parent_id = Column(
+ Integer, ForeignKey('parent.id'), primary_key=True)
+ child_id = Column(
+ Integer, ForeignKey('parent.id'), primary_key=True)
+
+ def _fixture_from_base(self):
+ Parent = self.classes.Parent
+ Link = self.classes.Link
+ Link.child = relationship(
+ Parent, primaryjoin=Link.child_id == Parent.id)
+
+ Parent.links = relationship(
+ Link,
+ primaryjoin=Parent.id == Link.parent_id,
+ )
+ return Parent
+
+ def _fixture_from_subclass(self):
+ Sub1 = self.classes.Sub1
+ Link = self.classes.Link
+ Parent = self.classes.Parent
+ Link.child = relationship(
+ Parent, primaryjoin=Link.child_id == Parent.id)
+
+ Sub1.links = relationship(
+ Link,
+ primaryjoin=Sub1.id == Link.parent_id,
+ )
+ return Sub1
+
+ def _fixture_to_subclass_to_base(self):
+ Owner = self.classes.Owner
+ Parent = self.classes.Parent
+ Sub1 = self.classes.Sub1
+ Link = self.classes.Link
+
+ # Link -> Sub1 -> Owner
+
+ Link.child = relationship(
+ Sub1, primaryjoin=Link.child_id == Sub1.id)
+
+ Parent.owner_id = Column(ForeignKey('owner.id'))
+
+ Parent.owner = relationship(Owner)
+ return Parent
+
+ def _fixture_to_base_to_base(self):
+ Owner = self.classes.Owner
+ Parent = self.classes.Parent
+ Link = self.classes.Link
+
+ # Link -> Parent -> Owner
+
+ Link.child = relationship(
+ Parent, primaryjoin=Link.child_id == Parent.id)
+
+ Parent.owner_id = Column(ForeignKey('owner.id'))
+
+ Parent.owner = relationship(Owner)
+ return Parent
+
+ def test_from_base(self):
+ self._test_poly_single_poly(self._fixture_from_base)
+
+ def test_from_sub(self):
+ self._test_poly_single_poly(self._fixture_from_subclass)
+
+ def test_to_sub_to_base(self):
+ self._test_single_poly_poly(self._fixture_to_subclass_to_base)
+
+ def test_to_base_to_base(self):
+ self._test_single_poly_poly(self._fixture_to_base_to_base)
+
+ def _test_poly_single_poly(self, fn):
+ cls = fn()
+ Link = self.classes.Link
+
+ session = Session()
+ q = session.query(cls).options(
+ joinedload_all(
+ cls.links,
+ Link.child,
+ cls.links
+ )
+ )
+ if cls is self.classes.Sub1:
+ extra = " WHERE parent.type IN (:type_1)"
+ else:
+ extra = ""
+
+ self.assert_compile(
+ q,
+ "SELECT parent.id AS parent_id, parent.type AS parent_type, "
+ "link_1.parent_id AS link_1_parent_id, "
+ "link_1.child_id AS link_1_child_id, "
+ "parent_1.id AS parent_1_id, parent_1.type AS parent_1_type, "
+ "link_2.parent_id AS link_2_parent_id, "
+ "link_2.child_id AS link_2_child_id "
+ "FROM parent "
+ "LEFT OUTER JOIN link AS link_1 ON parent.id = link_1.parent_id "
+ "LEFT OUTER JOIN parent "
+ "AS parent_1 ON link_1.child_id = parent_1.id "
+ "LEFT OUTER JOIN link AS link_2 "
+ "ON parent_1.id = link_2.parent_id" + extra
+ )
+
+ def _test_single_poly_poly(self, fn):
+ parent_cls = fn()
+ Link = self.classes.Link
+
+ session = Session()
+ q = session.query(Link).options(
+ joinedload_all(
+ Link.child,
+ parent_cls.owner
+ )
+ )
+
+ if Link.child.property.mapper.class_ is self.classes.Sub1:
+ extra = "AND parent_1.type IN (:type_1) "
+ else:
+ extra = ""
+
+ self.assert_compile(
+ q,
+ "SELECT link.parent_id AS link_parent_id, "
+ "link.child_id AS link_child_id, parent_1.id AS parent_1_id, "
+ "parent_1.type AS parent_1_type, "
+ "parent_1.owner_id AS parent_1_owner_id, "
+ "owner_1.id AS owner_1_id, owner_1.type AS owner_1_type "
+ "FROM link LEFT OUTER JOIN parent AS parent_1 "
+ "ON link.child_id = parent_1.id " + extra +
+ "LEFT OUTER JOIN owner AS owner_1 "
+ "ON owner_1.id = parent_1.owner_id"
+ )
+
+ def test_local_wpoly(self):
+ Sub1 = self._fixture_from_subclass()
+ Parent = self.classes.Parent
+ Link = self.classes.Link
+
+ poly = with_polymorphic(Parent, [Sub1])
+
+ session = Session()
+ q = session.query(poly).options(
+ joinedload(poly.Sub1.links).
+ joinedload(Link.child.of_type(Sub1)).
+ joinedload(poly.Sub1.links)
+ )
+ self.assert_compile(
+ q,
+ "SELECT parent.id AS parent_id, parent.type AS parent_type, "
+ "link_1.parent_id AS link_1_parent_id, "
+ "link_1.child_id AS link_1_child_id, "
+ "parent_1.id AS parent_1_id, parent_1.type AS parent_1_type, "
+ "link_2.parent_id AS link_2_parent_id, "
+ "link_2.child_id AS link_2_child_id FROM parent "
+ "LEFT OUTER JOIN link AS link_1 ON parent.id = link_1.parent_id "
+ "LEFT OUTER JOIN parent AS parent_1 "
+ "ON link_1.child_id = parent_1.id "
+ "LEFT OUTER JOIN link AS link_2 ON parent_1.id = link_2.parent_id"
+ )
+
+
class JoinAcrossJoinedInhMultiPath(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
"""test long join paths with a joined-inh in the middle, where we go multiple
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index 9f5d21a43..0d102c065 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -9,6 +9,8 @@ from sqlalchemy.testing.schema import Table, Column
class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
+ __dialect__ = 'default'
+
@classmethod
def define_tables(cls, metadata):
Table('employees', metadata,
@@ -208,6 +210,19 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
eq_(sess.query(Manager).filter(Manager.name.like('%m%')).count(), 2)
eq_(sess.query(Employee).filter(Employee.name.like('%m%')).count(), 3)
+ def test_exists_standalone(self):
+ Engineer = self.classes.Engineer
+
+ sess = create_session()
+
+ self.assert_compile(
+ sess.query(
+ sess.query(Engineer).filter(Engineer.name == 'foo').exists()),
+ "SELECT EXISTS (SELECT 1 FROM employees WHERE "
+ "employees.name = :name_1 AND employees.type "
+ "IN (:type_1, :type_2)) AS anon_1"
+ )
+
def test_type_filtering(self):
Employee, Manager, reports, Engineer = (self.classes.Employee,
self.classes.Manager,
diff --git a/test/orm/test_bulk.py b/test/orm/test_bulk.py
index e2a1464a6..878560cf6 100644
--- a/test/orm/test_bulk.py
+++ b/test/orm/test_bulk.py
@@ -2,7 +2,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import fixtures
-from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey, FetchedValue
from sqlalchemy.orm import mapper, Session
from sqlalchemy.testing.assertsql import CompiledSQL
from test.orm import _fixtures
@@ -156,6 +156,59 @@ class BulkInsertUpdateTest(BulkTest, _fixtures.FixtureTest):
)
+class BulkUDPostfetchTest(BulkTest, fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer,
+ server_default=FetchedValue(),
+ server_onupdate=FetchedValue()))
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Comparable):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A = cls.classes.A
+ a = cls.tables.a
+
+ mapper(A, a)
+
+ def test_insert_w_fetch(self):
+ A = self.classes.A
+
+ s = Session()
+ a1 = A(x=1)
+ s.bulk_save_objects([a1])
+ s.commit()
+
+ def test_update_w_fetch(self):
+ A = self.classes.A
+
+ s = Session()
+ a1 = A(x=1, y=2)
+ s.add(a1)
+ s.commit()
+
+ eq_(a1.id, 1) # force a load
+ a1.x = 5
+ s.expire(a1, ['y'])
+ assert 'y' not in a1.__dict__
+ s.bulk_save_objects([a1])
+ s.commit()
+
+ eq_(a1.x, 5)
+ eq_(a1.y, 2)
+
+
class BulkInheritanceTest(BulkTest, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py
index 8b777dcdf..48027ec2d 100644
--- a/test/orm/test_composites.py
+++ b/test/orm/test_composites.py
@@ -313,8 +313,7 @@ class PrimaryKeyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('graphs', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True),
Column('version_id', Integer, primary_key=True,
nullable=True),
Column('name', String(30)))
diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py
index c95b8d152..b5c1b6467 100644
--- a/test/orm/test_cycles.py
+++ b/test/orm/test_cycles.py
@@ -10,7 +10,7 @@ from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, sessionmaker
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_
from sqlalchemy.testing.assertsql import RegexSQL, CompiledSQL, AllOf
from sqlalchemy.testing import fixtures
@@ -816,6 +816,39 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
{'id': b4.id}])
)
+ def test_post_update_m2o_detect_none(self):
+ person, ball, Ball, Person = (
+ self.tables.person,
+ self.tables.ball,
+ self.classes.Ball,
+ self.classes.Person)
+
+ mapper(Ball, ball, properties={
+ 'person': relationship(
+ Person, post_update=True,
+ primaryjoin=person.c.id == ball.c.person_id)
+ })
+ mapper(Person, person)
+
+ sess = create_session(autocommit=False, expire_on_commit=True)
+ sess.add(Ball(person=Person()))
+ sess.commit()
+ b1 = sess.query(Ball).first()
+
+ # needs to be unloaded
+ assert 'person' not in b1.__dict__
+ b1.person = None
+
+ self.assert_sql_execution(
+ testing.db,
+ sess.flush,
+ CompiledSQL(
+ "UPDATE ball SET person_id=:person_id WHERE ball.id = :ball_id",
+ lambda ctx: {'person_id': None, 'ball_id': b1.id})
+ )
+
+ is_(b1.person, None)
+
class SelfReferentialPostUpdateTest(fixtures.MappedTest):
"""Post_update on a single self-referential mapper.
@@ -1181,9 +1214,10 @@ class PostUpdateBatchingTest(fixtures.MappedTest):
testing.db,
sess.flush,
CompiledSQL(
- "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
- "c3_id=:c3_id WHERE parent.id = :parent_id",
- lambda ctx: {'c2_id': c23.id, 'parent_id': p1.id, 'c1_id': c12.id, 'c3_id': c31.id}
+ "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, c3_id=:c3_id "
+ "WHERE parent.id = :parent_id",
+ lambda ctx: {'c2_id': c23.id, 'parent_id': p1.id,
+ 'c1_id': c12.id, 'c3_id': c31.id}
)
)
@@ -1193,8 +1227,9 @@ class PostUpdateBatchingTest(fixtures.MappedTest):
testing.db,
sess.flush,
CompiledSQL(
- "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
- "c3_id=:c3_id WHERE parent.id = :parent_id",
- lambda ctx: {'c2_id': None, 'parent_id': p1.id, 'c1_id': None, 'c3_id': None}
+ "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, c3_id=:c3_id "
+ "WHERE parent.id = :parent_id",
+ lambda ctx: {'c2_id': None, 'parent_id': p1.id,
+ 'c1_id': None, 'c3_id': None}
)
)
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index 6d9d9ec4b..1c3b57690 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -5,7 +5,7 @@ import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.orm import joinedload, deferred, undefer, \
joinedload_all, backref, Session,\
- defaultload, Load
+ defaultload, Load, load_only
from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, \
func, text
from sqlalchemy.testing.schema import Table, Column
@@ -2442,6 +2442,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
"""test #2188"""
__dialect__ = 'default'
+ run_create_tables = None
@classmethod
def define_tables(cls, metadata):
@@ -4013,6 +4014,7 @@ class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
class CyclicalInheritingEagerTestThree(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
__dialect__ = 'default'
+ run_create_tables = None
@classmethod
def setup_classes(cls):
@@ -4067,3 +4069,112 @@ class CyclicalInheritingEagerTestThree(fixtures.DeclarativeMappedTest,
"director_1.id = persistent_1.id) "
"ON director.other_id = persistent_1.id"
)
+
+
+class EnsureColumnsAddedTest(
+ fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+ run_create_tables = None
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ arb = Column(Integer, unique=True)
+ data = Column(Integer)
+ o2mchild = relationship("O2MChild")
+ m2mchild = relationship("M2MChild", secondary=Table(
+ 'parent_to_m2m', Base.metadata,
+ Column('parent_id', ForeignKey('parent.arb')),
+ Column('child_id', ForeignKey('m2mchild.id'))
+ ))
+
+ class O2MChild(Base):
+ __tablename__ = 'o2mchild'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ parent_id = Column(ForeignKey('parent.arb'))
+
+ class M2MChild(Base):
+ __tablename__ = 'm2mchild'
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
+
+ def test_joinedload_defered_pk_limit_o2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.o2mchild)).limit(10),
+ "SELECT anon_1.parent_id AS anon_1_parent_id, "
+ "anon_1.parent_data AS anon_1_parent_data, "
+ "anon_1.parent_arb AS anon_1_parent_arb, "
+ "o2mchild_1.id AS o2mchild_1_id, "
+ "o2mchild_1.parent_id AS o2mchild_1_parent_id "
+ "FROM (SELECT parent.id AS parent_id, parent.data AS parent_data, "
+ "parent.arb AS parent_arb FROM parent LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN o2mchild AS o2mchild_1 "
+ "ON anon_1.parent_arb = o2mchild_1.parent_id"
+ )
+
+ def test_joinedload_defered_pk_limit_m2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.m2mchild)).limit(10),
+ "SELECT anon_1.parent_id AS anon_1_parent_id, "
+ "anon_1.parent_data AS anon_1_parent_data, "
+ "anon_1.parent_arb AS anon_1_parent_arb, "
+ "m2mchild_1.id AS m2mchild_1_id "
+ "FROM (SELECT parent.id AS parent_id, "
+ "parent.data AS parent_data, parent.arb AS parent_arb "
+ "FROM parent LIMIT :param_1) AS anon_1 "
+ "LEFT OUTER JOIN (parent_to_m2m AS parent_to_m2m_1 "
+ "JOIN m2mchild AS m2mchild_1 "
+ "ON m2mchild_1.id = parent_to_m2m_1.child_id) "
+ "ON anon_1.parent_arb = parent_to_m2m_1.parent_id"
+ )
+
+ def test_joinedload_defered_pk_o2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.o2mchild)),
+ "SELECT parent.id AS parent_id, parent.data AS parent_data, "
+ "parent.arb AS parent_arb, o2mchild_1.id AS o2mchild_1_id, "
+ "o2mchild_1.parent_id AS o2mchild_1_parent_id "
+ "FROM parent LEFT OUTER JOIN o2mchild AS o2mchild_1 "
+ "ON parent.arb = o2mchild_1.parent_id"
+ )
+
+ def test_joinedload_defered_pk_m2m(self):
+ Parent = self.classes.Parent
+
+ s = Session()
+
+ self.assert_compile(
+ s.query(Parent).options(
+ load_only('data'),
+ joinedload(Parent.m2mchild)),
+ "SELECT parent.id AS parent_id, parent.data AS parent_data, "
+ "parent.arb AS parent_arb, m2mchild_1.id AS m2mchild_1_id "
+ "FROM parent LEFT OUTER JOIN (parent_to_m2m AS parent_to_m2m_1 "
+ "JOIN m2mchild AS m2mchild_1 "
+ "ON m2mchild_1.id = parent_to_m2m_1.child_id) "
+ "ON parent.arb = parent_to_m2m_1.parent_id"
+ )
diff --git a/test/orm/test_events.py b/test/orm/test_events.py
index ae7ba98c1..ab61077ae 100644
--- a/test/orm/test_events.py
+++ b/test/orm/test_events.py
@@ -111,6 +111,43 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
event.listen(mapper, meth, evt(meth), **kw)
return canary
+ def test_init_allow_kw_modify(self):
+ User, users = self.classes.User, self.tables.users
+ mapper(User, users)
+
+ @event.listens_for(User, 'init')
+ def add_name(obj, args, kwargs):
+ kwargs['name'] = 'ed'
+
+ u1 = User()
+ eq_(u1.name, 'ed')
+
+ def test_init_failure_hook(self):
+ users = self.tables.users
+
+ class Thing(object):
+ def __init__(self, **kw):
+ if kw.get('fail'):
+ raise Exception("failure")
+
+ mapper(Thing, users)
+
+ canary = Mock()
+ event.listen(Thing, 'init_failure', canary)
+
+ Thing()
+ eq_(canary.mock_calls, [])
+
+ assert_raises_message(
+ Exception,
+ "failure",
+ Thing, fail=True
+ )
+ eq_(
+ canary.mock_calls,
+ [call(ANY, (), {'fail': True})]
+ )
+
def test_listen_doesnt_force_compile(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users, properties={
@@ -1580,6 +1617,506 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
)
+class SessionLifecycleEventsTest(_RemoveListeners, _fixtures.FixtureTest):
+ run_inserts = None
+
+ def _fixture(self, include_address=False):
+ users, User = self.tables.users, self.classes.User
+
+ if include_address:
+ addresses, Address = self.tables.addresses, self.classes.Address
+ mapper(User, users, properties={
+ "addresses": relationship(
+ Address, cascade="all, delete-orphan")
+ })
+ mapper(Address, addresses)
+ else:
+ mapper(User, users)
+
+ listener = Mock()
+
+ sess = Session()
+
+ def start_events():
+ event.listen(
+ sess, "transient_to_pending", listener.transient_to_pending)
+ event.listen(
+ sess, "pending_to_transient", listener.pending_to_transient)
+ event.listen(
+ sess, "persistent_to_transient",
+ listener.persistent_to_transient)
+ event.listen(
+ sess, "pending_to_persistent", listener.pending_to_persistent)
+ event.listen(
+ sess, "detached_to_persistent",
+ listener.detached_to_persistent)
+ event.listen(
+ sess, "loaded_as_persistent", listener.loaded_as_persistent)
+
+ event.listen(
+ sess, "persistent_to_detached",
+ listener.persistent_to_detached)
+ event.listen(
+ sess, "deleted_to_detached", listener.deleted_to_detached)
+
+ event.listen(
+ sess, "persistent_to_deleted", listener.persistent_to_deleted)
+ event.listen(
+ sess, "deleted_to_persistent", listener.deleted_to_persistent)
+ return listener
+
+ if include_address:
+ return sess, User, Address, start_events
+ else:
+ return sess, User, start_events
+
+ def test_transient_to_pending(self):
+ sess, User, start_events = self._fixture()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "transient_to_pending")
+ def trans_to_pending(session, instance):
+ assert instance in session
+ listener.flag_checked(instance)
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.transient_to_pending(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_transient_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session
+ listener.flag_checked(instance)
+
+ sess.rollback()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_transient_via_expunge(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session
+ listener.flag_checked(instance)
+
+ sess.expunge(u1)
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_persistent")
+ def test_flag(session, instance):
+ assert instance in session
+ assert instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.key in session.identity_map
+ listener.flag_checked(instance)
+
+ sess.flush()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_detached_to_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ sess.expunge(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ listener.flag_checked()
+
+ sess.add(u1)
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.flag_checked()
+ ]
+ )
+
+ def test_loaded_as_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "loaded_as_persistent")
+ def test_identity_flag(session, instance):
+ assert instance in session
+ assert instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.key in session.identity_map
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ u1 = sess.query(User).filter_by(name='u1').one()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.loaded_as_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_detached_to_persistent_via_deleted(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag_persistent(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.dtp_flag_checked(instance)
+
+ @event.listens_for(sess, "persistent_to_deleted")
+ def test_deleted_flag_detached(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ listener.ptd_flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.dtp_flag_checked(u1)
+ ]
+ )
+
+ sess.flush()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.dtp_flag_checked(u1),
+ call.persistent_to_deleted(sess, u1),
+ call.ptd_flag_checked(u1),
+ ]
+ )
+
+ def test_detached_to_persistent_via_cascaded_delete(self):
+ sess, User, Address, start_events = self._fixture(include_address=True)
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ a1 = Address(email_address='e1')
+ u1.addresses.append(a1)
+ sess.commit()
+ u1.addresses # ensure u1.addresses refers to a1 before detachment
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+ assert a1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.flag_checked(u1),
+ call.detached_to_persistent(sess, a1),
+ call.flag_checked(a1),
+ ]
+ )
+
+ sess.flush()
+
+ def test_persistent_to_deleted(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_deleted")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ []
+ )
+
+ sess.flush()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_deleted(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_detached_via_expunge(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_detached")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 in sess
+ sess.expunge(u1)
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_detached_via_expunge_all(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_detached")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 in sess
+ sess.expunge_all()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_transient_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.transient
+ listener.flag_checked(instance)
+
+ sess.rollback()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_deleted_to_persistent_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ sess.delete(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "deleted_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 not in sess
+ assert u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.detached
+
+ sess.rollback()
+
+ assert u1 in sess
+ assert u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.detached
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.deleted_to_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_deleted_to_detached_via_commit(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ sess.delete(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "deleted_to_detached")
+ def test_detached_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ listener.flag_checked(instance)
+
+ assert u1 not in sess
+ assert u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.detached
+
+ sess.commit()
+
+ assert u1 not in sess
+ assert not u1._sa_instance_state.deleted
+ assert u1._sa_instance_state.detached
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.deleted_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+
class MapperExtensionTest(_fixtures.FixtureTest):
"""Superseded by MapperEventsTest - test backwards
diff --git a/test/orm/test_hasparent.py b/test/orm/test_hasparent.py
index fd246b527..df4b05980 100644
--- a/test/orm/test_hasparent.py
+++ b/test/orm/test_hasparent.py
@@ -116,7 +116,7 @@ class ParentRemovalTest(fixtures.MappedTest):
User = self.classes.User
s, u1, a1 = self._fixture()
- s._expunge_state(attributes.instance_state(u1))
+ s._expunge_states([attributes.instance_state(u1)])
del u1
gc_collect()
@@ -178,7 +178,7 @@ class ParentRemovalTest(fixtures.MappedTest):
u2 = User(addresses=[a1])
s.add(u2)
s.flush()
- s._expunge_state(attributes.instance_state(u2))
+ s._expunge_states([attributes.instance_state(u2)])
del u2
gc_collect()
diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py
index ea39753b4..f2e1db2da 100644
--- a/test/orm/test_lazy_relations.py
+++ b/test/orm/test_lazy_relations.py
@@ -1073,3 +1073,78 @@ class RefersToSelfLazyLoadInterferenceTest(fixtures.MappedTest):
session.query(B).options(
sa.orm.joinedload('parent').joinedload('zc')).all()
+
+class TypeCoerceTest(fixtures.MappedTest, testing.AssertsExecutionResults,):
+ """ORM-level test for [ticket:3531]"""
+
+ # mysql is having a recursion issue in the bind_expression
+ __only_on__ = ('sqlite', 'postgresql')
+
+ class StringAsInt(TypeDecorator):
+ impl = String(50)
+
+ def column_expression(self, col):
+ return sa.cast(col, Integer)
+
+ def bind_expression(self, col):
+ return sa.cast(col, String)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'person', metadata,
+ Column("id", cls.StringAsInt, primary_key=True),
+ )
+ Table(
+ "pets", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("person_id", Integer),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Person(cls.Basic):
+ pass
+
+ class Pet(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.Person, cls.tables.person, properties=dict(
+ pets=relationship(
+ cls.classes.Pet, primaryjoin=(
+ orm.foreign(cls.tables.pets.c.person_id) ==
+ sa.cast(
+ sa.type_coerce(cls.tables.person.c.id, Integer),
+ Integer
+ )
+ )
+ )
+ ))
+
+ mapper(cls.classes.Pet, cls.tables.pets)
+
+ def test_lazyload_singlecast(self):
+ Person = self.classes.Person
+ Pet = self.classes.Pet
+
+ s = Session()
+ s.add_all([
+ Person(id=5), Pet(id=1, person_id=5)
+ ])
+ s.commit()
+
+ p1 = s.query(Person).first()
+
+ with self.sql_execution_asserter() as asserter:
+ p1.pets
+
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT pets.id AS pets_id, pets.person_id "
+ "AS pets_person_id FROM pets "
+ "WHERE pets.person_id = CAST(:param_1 AS INTEGER)",
+ [{'param_1': 5}]
+ )
+ )
diff --git a/test/orm/test_load_on_fks.py b/test/orm/test_load_on_fks.py
index 813d8d17a..471c8665a 100644
--- a/test/orm/test_load_on_fks.py
+++ b/test/orm/test_load_on_fks.py
@@ -301,7 +301,8 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
c2 = Child()
if attach:
- sess._attach(instance_state(c2))
+ state = instance_state(c2)
+ state.session_id = sess.hash_key
if enable_relationship_rel:
sess.enable_relationship_loading(c2)
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 264b386d4..6845ababb 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.engine import default
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, class_mapper, configure_mappers, reconstructor, \
- validates, aliased, defer, deferred, synonym, attributes, \
+ aliased, deferred, synonym, attributes, \
column_property, composite, dynamic_loader, \
comparable_property, Session
from sqlalchemy.orm.persistence import _sort_states
@@ -19,6 +19,7 @@ from sqlalchemy.testing.assertsql import CompiledSQL
import logging
import logging.handlers
+
class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -26,33 +27,34 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""A backref name may not shadow an existing property name."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users,
- properties={
- 'addresses':relationship(Address, backref='email_address')
- })
+ properties={
+ 'addresses': relationship(Address, backref='email_address')
+ })
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_update_attr_keys(self):
- """test that update()/insert() use the correct key when given InstrumentedAttributes."""
+ """test that update()/insert() use the correct key when given
+ InstrumentedAttributes."""
User, users = self.classes.User, self.tables.users
-
mapper(User, users, properties={
- 'foobar':users.c.name
+ 'foobar': users.c.name
})
- users.insert().values({User.foobar:'name1'}).execute()
- eq_(sa.select([User.foobar]).where(User.foobar=='name1').execute().fetchall(), [('name1',)])
+ users.insert().values({User.foobar: 'name1'}).execute()
+ eq_(sa.select([User.foobar]).where(User.foobar == 'name1').
+ execute().fetchall(), [('name1',)])
- users.update().values({User.foobar:User.foobar + 'foo'}).execute()
- eq_(sa.select([User.foobar]).where(User.foobar=='name1foo').execute().fetchall(), [('name1foo',)])
+ users.update().values({User.foobar: User.foobar + 'foo'}).execute()
+ eq_(sa.select([User.foobar]).where(User.foobar == 'name1foo').
+ execute().fetchall(), [('name1foo',)])
def test_utils(self):
users = self.tables.users
@@ -63,12 +65,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Foo(object):
x = "something"
+
@property
def y(self):
return "something else"
-
- m = mapper(Foo, users, properties={"addresses":relationship(Address)})
+ m = mapper(Foo, users, properties={"addresses": relationship(Address)})
mapper(Address, addresses)
a1 = aliased(Foo)
@@ -100,14 +102,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Foo(object):
x = "something"
+
@property
def y(self):
return "something else"
m = mapper(Foo, users)
a1 = aliased(Foo)
- f = Foo()
-
for arg, key, ret in [
(m, "x", Foo.x),
(Foo, "x", Foo.x),
@@ -122,7 +123,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def boom():
raise Exception("it broke")
mapper(User, users, properties={
- 'addresses':relationship(boom)
+ 'addresses': relationship(boom)
})
# test that QueryableAttribute.__str__() doesn't
@@ -137,12 +138,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""
Address, addresses, User = (self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User)
+ 'user': relationship(User)
})
try:
@@ -156,8 +156,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"initialize - can't proceed with "
"initialization of other mappers. "
"Original exception was: Class "
- "'test.orm._fixtures.User' is not mapped$"
- , configure_mappers)
+ "'test.orm._fixtures.User' is not mapped$",
+ configure_mappers)
def test_column_prefix(self):
users, User = self.tables.users, self.classes.User
@@ -169,7 +169,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
s = create_session()
u = s.query(User).get(7)
eq_(u._name, 'jack')
- eq_(u._id,7)
+ eq_(u._id, 7)
u2 = s.query(User).filter_by(user_name='jack').one()
assert u is u2
@@ -190,16 +190,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
still triggers a check against all mappers."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
sa.orm.configure_mappers()
assert sa.orm.mapperlib.Mapper._new_mappers is False
m = mapper(Address, addresses, properties={
- 'user': relationship(User, backref="addresses")})
+ 'user': relationship(User, backref="addresses")})
assert m.configured is False
assert sa.orm.mapperlib.Mapper._new_mappers is True
@@ -232,13 +232,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_column_not_present(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
assert_raises_message(sa.exc.ArgumentError,
"not represented in the mapper's table",
- mapper, User, users, properties={'foo'
- : addresses.c.user_id})
+ mapper, User, users,
+ properties={'foo': addresses.c.user_id})
def test_constructor_exc(self):
"""TypeError is raised for illegal constructor args,
@@ -246,10 +246,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users, addresses = self.tables.users, self.tables.addresses
-
class Foo(object):
+
def __init__(self):
pass
+
class Bar(object):
pass
@@ -266,13 +267,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""
class Foo(object):
+
def __init__(self, id):
self.id = id
m = MetaData()
foo_t = Table('foo', m,
- Column('id', String, primary_key=True)
- )
+ Column('id', String, primary_key=True)
+ )
m = mapper(Foo, foo_t)
+
class DontCompareMeToString(int):
if util.py2k:
def __lt__(self, other):
@@ -292,24 +295,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
[states[4], states[3], states[0], states[1], states[2]]
)
-
def test_props(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- m = mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ m = mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
assert User.addresses.property is m.get_property('addresses')
def test_unicode_relationship_backref_names(self):
# test [ticket:2901]
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
@@ -322,56 +324,62 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_prop_1(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
- User.addresses.any(Address.email_address=='foo@bar.com')
+ User.addresses.any(Address.email_address == 'foo@bar.com')
def test_configure_on_prop_2(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
- eq_(str(User.id == 3), str(users.c.id==3))
+ eq_(str(User.id == 3), str(users.c.id == 3))
def test_configure_on_prop_3(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
+
+ class Foo(User):
+ pass
- class Foo(User):pass
mapper(User, users)
mapper(Foo, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
assert getattr(Foo().__class__, 'name').impl is not None
def test_deferred_subclass_attribute_instrument(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
+
+ class Foo(User):
+ pass
- class Foo(User):pass
mapper(User, users)
configure_mappers()
mapper(Foo, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
assert getattr(Foo().__class__, 'name').impl is not None
def test_check_descriptor_as_method(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users)
+
class MyClass(User):
+
def foo(self):
pass
m._is_userland_descriptor(MyClass.foo)
@@ -379,7 +387,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_get_props_1(self):
User, users = self.classes.User, self.tables.users
- m =mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
assert list(m.iterate_properties)
assert m.configured
@@ -387,29 +395,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_get_props_2(self):
User, users = self.classes.User, self.tables.users
- m= mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
assert m.get_property('name')
assert m.configured
def test_configure_on_get_props_3(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- m= mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
configure_mappers()
m2 = mapper(Address, addresses, properties={
- 'user':relationship(User, backref='addresses')
- })
+ 'user': relationship(User, backref='addresses')
+ })
assert m.get_property('addresses')
def test_info(self):
users = self.tables.users
Address = self.classes.Address
+
class MyComposite(object):
pass
for constructor, args in [
@@ -434,17 +443,17 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# create specific tables here as we don't want
# users.c.id.info to be pre-initialized
users = Table('u', m, Column('id', Integer, primary_key=True),
- Column('name', String))
+ Column('name', String))
addresses = Table('a', m, Column('id', Integer, primary_key=True),
- Column('name', String),
- Column('user_id', Integer, ForeignKey('u.id')))
+ Column('name', String),
+ Column('user_id', Integer, ForeignKey('u.id')))
Address = self.classes.Address
User = self.classes.User
mapper(User, users, properties={
- "name_lower": column_property(func.lower(users.c.name)),
- "addresses": relationship(Address)
- })
+ "name_lower": column_property(func.lower(users.c.name)),
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
# attr.info goes down to the original Column object
@@ -460,18 +469,19 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# same for relationships
is_(User.addresses.info, User.addresses.property.info)
-
def test_add_property(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
assert_col = []
class User(fixtures.ComparableEntity):
+
def _get_name(self):
assert_col.append(('get', self._name))
return self._name
+
def _set_name(self, name):
assert_col.append(('set', name))
self._name = name
@@ -503,7 +513,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
m.add_property('addresses', relationship(Address))
m.add_property('uc_name', sa.orm.comparable_property(UCComparator))
m.add_property('uc_name2', sa.orm.comparable_property(
- UCComparator, User.uc_name2))
+ UCComparator, User.uc_name2))
sess = create_session(autocommit=False)
assert sess.query(User).get(7)
@@ -534,7 +544,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User()
m2 = mapper(Address, addresses, properties={
- 'user':relationship(User, backref="addresses")
+ 'user': relationship(User, backref="addresses")
})
# configure mappers takes place when User is generated
User()
@@ -545,7 +555,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users, User = self.tables.users, self.classes.User
m = mapper(User, users)
- m.add_property('_name',users.c.name)
+ m.add_property('_name', users.c.name)
m.add_property('name', synonym('_name'))
sess = create_session()
@@ -572,8 +582,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
addresses, Address = self.tables.addresses, self.classes.Address
m = mapper(User, users, properties={
- "addresses": relationship(Address)
- })
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
assert_raises_message(
@@ -588,14 +598,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_add_column_prop_deannotate(self):
User, users = self.classes.User, self.tables.users
Address, addresses = self.classes.Address, self.tables.addresses
+
class SubUser(User):
pass
m = mapper(User, users)
m2 = mapper(SubUser, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
m3 = mapper(Address, addresses, properties={
- 'foo':relationship(m2)
+ 'foo': relationship(m2)
})
# add property using annotated User.name,
# needs to be deannotated
@@ -612,7 +623,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"addresses_1.email_address AS "
"addresses_1_email_address, "
"users_1.name || :name_1 AS anon_1 "
- "FROM addresses JOIN (users AS users_1 JOIN addresses AS addresses_1 ON users_1.id = "
+ "FROM addresses JOIN (users AS users_1 JOIN addresses "
+ "AS addresses_1 ON users_1.id = "
"addresses_1.user_id) ON "
"users_1.id = addresses.user_id"
)
@@ -638,20 +650,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert User.y.property.columns[0] is not expr2
assert User.y.property.columns[0].element.\
- _raw_columns[0] is users.c.name
+ _raw_columns[0] is users.c.name
assert User.y.property.columns[0].element.\
- _raw_columns[1] is users.c.id
+ _raw_columns[1] is users.c.id
def test_synonym_replaces_backref(self):
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
assert_calls = []
+
class Address(object):
+
def _get_user(self):
assert_calls.append("get")
return self._user
+
def _set_user(self, user):
assert_calls.append("set")
self._user = user
@@ -659,20 +674,20 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# synonym is created against nonexistent prop
mapper(Address, addresses, properties={
- 'user':synonym('_user')
+ 'user': synonym('_user')
})
sa.orm.configure_mappers()
# later, backref sets up the prop
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='_user')
+ 'addresses': relationship(Address, backref='_user')
})
sess = create_session()
u1 = sess.query(User).get(7)
u2 = sess.query(User).get(8)
# comparaison ops need to work
- a1 = sess.query(Address).filter(Address.user==u1).one()
+ a1 = sess.query(Address).filter(Address.user == u1).one()
eq_(a1.id, 1)
a1.user = u2
assert a1.user is u2
@@ -680,16 +695,19 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_self_ref_synonym(self):
t = Table('nodes', MetaData(),
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id')))
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')))
class Node(object):
pass
mapper(Node, t, properties={
- '_children':relationship(Node, backref=backref('_parent', remote_side=t.c.id)),
- 'children':synonym('_children'),
- 'parent':synonym('_parent')
+ '_children': relationship(
+ Node, backref=backref('_parent', remote_side=t.c.id)),
+ 'children': synonym('_children'),
+ 'parent': synonym('_parent')
})
n1 = Node()
@@ -702,13 +720,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_non_primary_identity_class(self):
User = self.classes.User
users, addresses = self.tables.users, self.tables.addresses
+
class AddressUser(User):
pass
m1 = mapper(User, users, polymorphic_identity='user')
m2 = mapper(AddressUser, addresses, inherits=User,
- polymorphic_identity='address', properties={
- 'address_id': addresses.c.id
- })
+ polymorphic_identity='address', properties={
+ 'address_id': addresses.c.id
+ })
m3 = mapper(AddressUser, addresses, non_primary=True)
assert m3._identity_class is m2._identity_class
eq_(
@@ -719,6 +738,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_reassign_polymorphic_identity_warns(self):
User = self.classes.User
users = self.tables.users
+
class MyUser(User):
pass
m1 = mapper(User, users, polymorphic_on=users.c.name,
@@ -730,17 +750,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
MyUser, users, inherits=User, polymorphic_identity='user'
)
-
def test_illegal_non_primary(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses)
mapper(User, users, non_primary=True, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
assert_raises_message(
sa.exc.ArgumentError,
@@ -762,62 +781,90 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Base(object):
pass
+
class Sub(Base):
pass
mapper(Base, users)
assert_raises_message(sa.exc.InvalidRequestError,
- "Configure a primary mapper first",
- mapper, Sub, addresses, non_primary=True
- )
+ "Configure a primary mapper first",
+ mapper, Sub, addresses, non_primary=True
+ )
def test_prop_filters(self):
t = Table('person', MetaData(),
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('type', String(128)),
Column('name', String(128)),
Column('employee_number', Integer),
Column('boss_id', Integer, ForeignKey('person.id')),
Column('vendor_id', Integer))
- class Person(object): pass
- class Vendor(Person): pass
- class Employee(Person): pass
- class Manager(Employee): pass
- class Hoho(object): pass
- class Lala(object): pass
- class Fub(object):pass
- class Frob(object):pass
+ class Person(object):
+ pass
+
+ class Vendor(Person):
+ pass
+
+ class Employee(Person):
+ pass
+
+ class Manager(Employee):
+ pass
+
+ class Hoho(object):
+ pass
+
+ class Lala(object):
+ pass
+
+ class Fub(object):
+ pass
+
+ class Frob(object):
+ pass
+
class HasDef(object):
+
def name(self):
pass
- class Empty(object):pass
- empty = mapper(Empty, t, properties={'empty_id' : t.c.id},
- include_properties=[])
+ class Empty(object):
+ pass
+
+ mapper(
+ Empty, t, properties={'empty_id': t.c.id},
+ include_properties=[])
p_m = mapper(Person, t, polymorphic_on=t.c.type,
include_properties=('id', 'type', 'name'))
e_m = mapper(Employee, inherits=p_m,
- polymorphic_identity='employee', properties={'boss'
- : relationship(Manager, backref=backref('peon'),
- remote_side=t.c.id)},
+ polymorphic_identity='employee',
+ properties={
+ 'boss': relationship(
+ Manager, backref=backref('peon'),
+ remote_side=t.c.id)},
exclude_properties=('vendor_id', ))
- m_m = mapper(Manager, inherits=e_m, polymorphic_identity='manager',
- include_properties=('id', 'type'))
+ mapper(
+ Manager, inherits=e_m, polymorphic_identity='manager',
+ include_properties=('id', 'type'))
- v_m = mapper(Vendor, inherits=p_m, polymorphic_identity='vendor',
- exclude_properties=('boss_id', 'employee_number'))
- h_m = mapper(Hoho, t, include_properties=('id', 'type', 'name'))
- l_m = mapper(Lala, t, exclude_properties=('vendor_id', 'boss_id'),
- column_prefix="p_")
+ mapper(
+ Vendor, inherits=p_m, polymorphic_identity='vendor',
+ exclude_properties=('boss_id', 'employee_number'))
+ mapper(Hoho, t, include_properties=('id', 'type', 'name'))
+ mapper(
+ Lala, t, exclude_properties=('vendor_id', 'boss_id'),
+ column_prefix="p_")
- hd_m = mapper(HasDef, t, column_prefix="h_")
+ mapper(HasDef, t, column_prefix="h_")
- fb_m = mapper(Fub, t, include_properties=(t.c.id, t.c.type))
- frb_m = mapper(Frob, t, column_prefix='f_',
- exclude_properties=(t.c.boss_id,
- 'employee_number', t.c.vendor_id))
+ mapper(Fub, t, include_properties=(t.c.id, t.c.type))
+ mapper(
+ Frob, t, column_prefix='f_',
+ exclude_properties=(
+ t.c.boss_id,
+ 'employee_number', t.c.vendor_id))
configure_mappers()
@@ -832,13 +879,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(have, want)
assert_props(HasDef, ['h_boss_id', 'h_employee_number', 'h_id',
- 'name', 'h_name', 'h_vendor_id', 'h_type'])
+ 'name', 'h_name', 'h_vendor_id', 'h_type'])
assert_props(Person, ['id', 'name', 'type'])
assert_instrumented(Person, ['id', 'name', 'type'])
assert_props(Employee, ['boss', 'boss_id', 'employee_number',
'id', 'name', 'type'])
- assert_instrumented(Employee,['boss', 'boss_id', 'employee_number',
- 'id', 'name', 'type'])
+ assert_instrumented(Employee, ['boss', 'boss_id', 'employee_number',
+ 'id', 'name', 'type'])
assert_props(Manager, ['boss', 'boss_id', 'employee_number', 'peon',
'id', 'name', 'type'])
@@ -851,7 +898,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert_props(Fub, ['id', 'type'])
assert_props(Frob, ['f_id', 'f_type', 'f_name', ])
-
# putting the discriminator column in exclude_properties,
# very weird. As of 0.7.4 this re-maps it.
class Foo(Person):
@@ -869,10 +915,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_prop_filters_defaults(self):
metadata = self.metadata
t = Table('t', metadata,
- Column('id', Integer(), primary_key=True, test_needs_autoincrement=True),
- Column('x', Integer(), nullable=False, server_default='0')
- )
+ Column(
+ 'id', Integer(), primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer(), nullable=False, server_default='0')
+ )
t.create()
+
class A(object):
pass
mapper(A, t, include_properties=['id'])
@@ -882,6 +931,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_bool(self):
class NoBoolAllowed(object):
+
def __bool__(self):
raise Exception("nope")
mapper(NoBoolAllowed, self.tables.users)
@@ -894,6 +944,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_eq(self):
class NoEqAllowed(object):
+
def __eq__(self, other):
raise Exception("nope")
@@ -901,7 +952,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(NoEqAllowed, users, properties={
- 'addresses':relationship(Address, backref='user')
+ 'addresses': relationship(Address, backref='user')
})
mapper(Address, addresses)
@@ -919,9 +970,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Test implicit merging of two cols raises."""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.users,
+ self.classes.User)
usersaddresses = sa.join(users, addresses,
users.c.id == addresses.c.user_id)
@@ -935,14 +985,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a join"""
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
-
+ self.tables.addresses,
+ self.tables.users)
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
mapper(User, usersaddresses, primary_key=[users.c.id],
- properties={'add_id':addresses.c.id}
+ properties={'add_id': addresses.c.id}
)
l = create_session().query(User).order_by(users.c.id).all()
eq_(l, self.static.user_result[:3])
@@ -951,9 +1000,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a join"""
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
-
+ self.tables.addresses,
+ self.tables.users)
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
@@ -965,13 +1013,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_mapping_to_join_no_pk(self):
email_bounces, addresses, Address = (self.tables.email_bounces,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
m = mapper(Address,
- addresses.join(email_bounces),
- properties={'id':[addresses.c.id, email_bounces.c.id]}
- )
+ addresses.join(email_bounces),
+ properties={'id': [addresses.c.id, email_bounces.c.id]}
+ )
configure_mappers()
assert addresses in m._pks_by_table
assert email_bounces not in m._pks_by_table
@@ -988,10 +1036,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to an outer join with a nullable composite primary key."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users.outerjoin(addresses),
primary_key=[users.c.id, addresses.c.id],
@@ -1013,13 +1059,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""test the allow_partial_pks=False flag."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users.outerjoin(addresses),
- allow_partial_pks=False,
+ allow_partial_pks=False,
primary_key=[users.c.id, addresses.c.id],
properties=dict(
address_id=addresses.c.id))
@@ -1037,11 +1081,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_scalar_pk_arg(self):
users, Keyword, items, Item, User, keywords = (self.tables.users,
- self.classes.Keyword,
- self.tables.items,
- self.classes.Item,
- self.classes.User,
- self.tables.keywords)
+ self.classes.Keyword,
+ self.tables.items,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.keywords)
m1 = mapper(Item, items, primary_key=[items.c.id])
m2 = mapper(Keyword, keywords, primary_key=keywords.c.id)
@@ -1051,18 +1095,17 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert m2.primary_key[0] is keywords.c.id
assert m3.primary_key[0] is users.c.id
-
def test_custom_join(self):
"""select_from totally replace the FROM parameters."""
- users, items, order_items, orders, Item, User, Order = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
+ users, items, order_items, orders, Item, User, Order = (
+ self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
mapper(Item, items)
@@ -1086,18 +1129,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, order_by=users.c.name.desc())
- assert "order by users.name desc" in str(create_session().query(User).statement).lower()
- assert "order by" not in str(create_session().query(User).order_by(None).statement).lower()
- assert "order by users.name asc" in str(create_session().query(User).order_by(User.name.asc()).statement).lower()
+ assert "order by users.name desc" in \
+ str(create_session().query(User).statement).lower()
+ assert "order by" not in \
+ str(create_session().query(User).order_by(None).statement).lower()
+ assert "order by users.name asc" in \
+ str(create_session().query(User).order_by(
+ User.name.asc()).statement).lower()
eq_(
create_session().query(User).all(),
- [User(id=7, name='jack'), User(id=9, name='fred'), User(id=8, name='ed'), User(id=10, name='chuck')]
+ [User(id=7, name='jack'), User(id=9, name='fred'),
+ User(id=8, name='ed'), User(id=10, name='chuck')]
)
eq_(
create_session().query(User).order_by(User.name).all(),
- [User(id=10, name='chuck'), User(id=8, name='ed'), User(id=9, name='fred'), User(id=7, name='jack')]
+ [User(id=10, name='chuck'), User(id=8, name='ed'),
+ User(id=9, name='fred'), User(id=7, name='jack')]
)
# 'Raises a "expression evaluation not supported" error at prepare time
@@ -1106,9 +1155,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a SELECT statement that has functions in it."""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.users,
+ self.classes.User)
s = sa.select([users,
(users.c.id * 2).label('concat'),
@@ -1129,29 +1177,29 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User, users = self.classes.User, self.tables.users
-
mapper(User, users)
session = create_session()
q = session.query(User)
eq_(q.count(), 4)
- eq_(q.filter(User.id.in_([8,9])).count(), 2)
- eq_(q.filter(users.c.id.in_([8,9])).count(), 2)
+ eq_(q.filter(User.id.in_([8, 9])).count(), 2)
+ eq_(q.filter(users.c.id.in_([8, 9])).count(), 2)
eq_(session.query(User.id).count(), 4)
eq_(session.query(User.id).filter(User.id.in_((8, 9))).count(), 2)
def test_many_to_many_count(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, item_keywords, lazy='select')))
+ keywords=relationship(Keyword, item_keywords, lazy='select')))
session = create_session()
q = (session.query(Item).
@@ -1164,9 +1212,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Overriding a column raises an error."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
def go():
mapper(User, users,
@@ -1179,10 +1227,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""exclude_properties cancels the error."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users,
exclude_properties=['name'],
@@ -1195,9 +1242,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""The column being named elsewhere also cancels the error,"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users,
properties=dict(
@@ -1206,28 +1253,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_synonym(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
-
+ self.tables.addresses,
+ self.classes.Address)
assert_col = []
+
class extendedproperty(property):
attribute = 123
class User(object):
+
def _get_name(self):
assert_col.append(('get', self.name))
return self.name
+
def _set_name(self, name):
assert_col.append(('set', name))
self.name = name
uname = extendedproperty(_get_name, _set_name)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select'),
- uname = synonym('name'),
- adlist = synonym('addresses'),
- adname = synonym('addresses')
+ addresses=relationship(mapper(Address, addresses), lazy='select'),
+ uname=synonym('name'),
+ adlist=synonym('addresses'),
+ adname=synonym('addresses')
))
# ensure the synonym can get at the proxied comparators without
@@ -1251,7 +1300,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
row = sess.query(User.id, User.uname).first()
assert row.uname == row[1]
- u = sess.query(User).filter(User.uname=='jack').one()
+ u = sess.query(User).filter(User.uname == 'jack').one()
fixture = self.static.user_address_result[0].addresses
eq_(u.adlist, fixture)
@@ -1274,25 +1323,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(User.uname.attribute, 123)
def test_synonym_of_synonym(self):
- users, User = (self.tables.users,
- self.classes.User)
+ users, User = (self.tables.users,
+ self.classes.User)
mapper(User, users, properties={
- 'x':synonym('id'),
- 'y':synonym('x')
+ 'x': synonym('id'),
+ 'y': synonym('x')
})
s = Session()
- u = s.query(User).filter(User.y==8).one()
+ u = s.query(User).filter(User.y == 8).one()
eq_(u.y, 8)
-
def test_synonym_column_location(self):
users, User = self.tables.users, self.classes.User
def go():
mapper(User, users, properties={
- 'not_name':synonym('_name', map_column=True)})
+ 'not_name': synonym('_name', map_column=True)})
assert_raises_message(
sa.exc.ArgumentError,
@@ -1301,28 +1349,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
go)
def test_column_synonyms(self):
- """Synonyms which automatically instrument properties, set up aliased column, etc."""
+ """Synonyms which automatically instrument properties,
+ set up aliased column, etc."""
addresses, users, Address = (self.tables.addresses,
- self.tables.users,
- self.classes.Address)
-
-
+ self.tables.users,
+ self.classes.Address)
assert_col = []
+
class User(object):
+
def _get_name(self):
assert_col.append(('get', self._name))
return self._name
+
def _set_name(self, name):
assert_col.append(('set', name))
self._name = name
name = property(_get_name, _set_name)
mapper(Address, addresses)
- mapper(User, users, properties = {
- 'addresses':relationship(Address, lazy='select'),
- 'name':synonym('_name', map_column=True)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address, lazy='select'),
+ 'name': synonym('_name', map_column=True)
})
# test compile
@@ -1369,6 +1419,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
return "method1"
from sqlalchemy.orm.properties import ColumnProperty
+
class UCComparator(ColumnProperty.Comparator):
__hash__ = None
@@ -1388,6 +1439,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def map_(with_explicit_property):
class User(object):
+
@extendedproperty
def uc_name(self):
if self.name is None:
@@ -1398,7 +1450,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
else:
args = (UCComparator,)
mapper(User, users, properties=dict(
- uc_name = sa.orm.comparable_property(*args)))
+ uc_name=sa.orm.comparable_property(*args)))
return User
for User in (map_(True), map_(False)):
@@ -1415,12 +1467,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert_raises_message(
AttributeError,
"Neither 'extendedproperty' object nor 'UCComparator' "
- "object associated with User.uc_name has an attribute 'nonexistent'",
+ "object associated with User.uc_name has an attribute "
+ "'nonexistent'",
getattr, User.uc_name, 'nonexistent')
# test compile
assert not isinstance(User.uc_name == 'jack', bool)
- u = q.filter(User.uc_name=='JACK').one()
+ u = q.filter(User.uc_name == 'JACK').one()
assert u.uc_name == "JACK"
assert u not in sess.dirty
@@ -1447,10 +1500,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class MyComparator(sa.orm.properties.ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
# lower case comparison
return func.lower(self.__clause_element__()
- ) == func.lower(other)
+ ) == func.lower(other)
def intersects(self, other):
# non-standard comparator
@@ -1458,7 +1512,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, properties={
'name': sa.orm.column_property(users.c.name,
- comparator_factory=MyComparator)
+ comparator_factory=MyComparator)
})
assert_raises_message(
@@ -1470,39 +1524,41 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
str((User.name == 'ed').compile(
- dialect=sa.engine.default.DefaultDialect())),
+ dialect=sa.engine.default.DefaultDialect())),
"lower(users.name) = lower(:lower_1)")
eq_(
str((User.name.intersects('ed')).compile(
- dialect=sa.engine.default.DefaultDialect())),
+ dialect=sa.engine.default.DefaultDialect())),
"users.name &= :name_1")
-
def test_reentrant_compile(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
class MyFakeProperty(sa.orm.properties.ColumnProperty):
+
def post_instrument_class(self, mapper):
super(MyFakeProperty, self).post_instrument_class(mapper)
configure_mappers()
m1 = mapper(User, users, properties={
- 'name':MyFakeProperty(users.c.name)
+ 'name': MyFakeProperty(users.c.name)
})
m2 = mapper(Address, addresses)
configure_mappers()
sa.orm.clear_mappers()
+
class MyFakeProperty(sa.orm.properties.ColumnProperty):
+
def post_instrument_class(self, mapper):
super(MyFakeProperty, self).post_instrument_class(mapper)
configure_mappers()
m1 = mapper(User, users, properties={
- 'name':MyFakeProperty(users.c.name)
+ 'name': MyFakeProperty(users.c.name)
})
m2 = mapper(Address, addresses)
configure_mappers()
@@ -1513,6 +1569,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
recon = []
class User(object):
+
@reconstructor
def reconstruct(self):
recon.append('go')
@@ -1528,19 +1585,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users = self.tables.users
recon = []
+
class A(object):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, A)
recon.append('A')
class B(A):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, B)
recon.append('B')
class C(A):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, C)
@@ -1566,7 +1627,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users = self.tables.users
recon = []
+
class Base(object):
+
@reconstructor
def reconstruct(self):
recon.append('go')
@@ -1584,15 +1647,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_unmapped_error(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
sa.orm.clear_mappers()
mapper(User, users, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
assert_raises_message(
@@ -1621,9 +1684,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(User, users, properties={
- "addresses": relationship(Address,
- primaryjoin=lambda: users.c.id == addresses.wrong.user_id)
- })
+ "addresses": relationship(
+ Address,
+ primaryjoin=lambda: users.c.id == addresses.wrong.user_id)
+ })
mapper(Address, addresses)
assert_raises_message(
AttributeError,
@@ -1638,10 +1702,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(User, users, properties={
- "addresses": relationship(Address,
- primaryjoin=lambda: users.c.id ==
- addresses.__dict__['wrong'].user_id)
- })
+ "addresses": relationship(Address,
+ primaryjoin=lambda: users.c.id ==
+ addresses.__dict__['wrong'].user_id)
+ })
mapper(Address, addresses)
assert_raises_message(
KeyError,
@@ -1654,6 +1718,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Base(object):
pass
+
class Sub(Base):
pass
@@ -1671,7 +1736,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# using it with an ORM operation, raises
assert_raises(sa.orm.exc.UnmappedClassError,
- create_session().add, Sub())
+ create_session().add, Sub())
def test_unmapped_subclass_error_premap(self):
users = self.tables.users
@@ -1697,13 +1762,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# using it with an ORM operation, raises
assert_raises(sa.orm.exc.UnmappedClassError,
- create_session().add, Sub())
+ create_session().add, Sub())
def test_oldstyle_mixin(self):
users = self.tables.users
class OldStyle:
pass
+
class NewStyle(object):
pass
@@ -1717,22 +1783,26 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(B, users)
+
class DocumentTest(fixtures.TestBase):
def test_doc_propagate(self):
metadata = MetaData()
t1 = Table('t1', metadata,
- Column('col1', Integer, primary_key=True, doc="primary key column"),
- Column('col2', String, doc="data col"),
- Column('col3', String, doc="data col 2"),
- Column('col4', String, doc="data col 3"),
- Column('col5', String),
- )
+ Column('col1', Integer, primary_key=True,
+ doc="primary key column"),
+ Column('col2', String, doc="data col"),
+ Column('col3', String, doc="data col 2"),
+ Column('col4', String, doc="data col 3"),
+ Column('col5', String),
+ )
t2 = Table('t2', metadata,
- Column('col1', Integer, primary_key=True, doc="primary key column"),
- Column('col2', String, doc="data col"),
- Column('col3', Integer, ForeignKey('t1.col1'), doc="foreign key to t1.col1")
- )
+ Column('col1', Integer, primary_key=True,
+ doc="primary key column"),
+ Column('col2', String, doc="data col"),
+ Column('col3', Integer, ForeignKey('t1.col1'),
+ doc="foreign key to t1.col1")
+ )
class Foo(object):
pass
@@ -1741,12 +1811,12 @@ class DocumentTest(fixtures.TestBase):
pass
mapper(Foo, t1, properties={
- 'bars':relationship(Bar,
- doc="bar relationship",
- backref=backref('foo',doc='foo relationship')
- ),
- 'foober':column_property(t1.c.col3, doc='alternate data col'),
- 'hoho':synonym("col4", doc="syn of col4")
+ 'bars': relationship(Bar,
+ doc="bar relationship",
+ backref=backref('foo', doc='foo relationship')
+ ),
+ 'foober': column_property(t1.c.col3, doc='alternate data col'),
+ 'hoho': synonym("col4", doc="syn of col4")
})
mapper(Bar, t2)
configure_mappers()
@@ -1759,7 +1829,9 @@ class DocumentTest(fixtures.TestBase):
eq_(Bar.col1.__doc__, "primary key column")
eq_(Bar.foo.__doc__, "foo relationship")
+
class ORMLoggingTest(_fixtures.FixtureTest):
+
def setup(self):
self.buf = logging.handlers.BufferingHandler(100)
for log in [
@@ -1787,18 +1859,19 @@ class ORMLoggingTest(_fixtures.FixtureTest):
for msg in self._current_messages():
assert msg.startswith('(User|%%(%d anon)s) ' % id(tb))
+
class OptionsTest(_fixtures.FixtureTest):
def test_synonym_options(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select',
- order_by=addresses.c.id),
- adlist = synonym('addresses')))
+ addresses=relationship(mapper(Address, addresses), lazy='select',
+ order_by=addresses.c.id),
+ adlist=synonym('addresses')))
def go():
sess = create_session()
@@ -1814,13 +1887,13 @@ class OptionsTest(_fixtures.FixtureTest):
"""A lazy relationship can be upgraded to an eager relationship."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses),
- order_by=addresses.c.id)))
+ addresses=relationship(mapper(Address, addresses),
+ order_by=addresses.c.id)))
sess = create_session()
l = (sess.query(User).
@@ -1833,9 +1906,9 @@ class OptionsTest(_fixtures.FixtureTest):
def test_eager_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
addresses=relationship(mapper(Address, addresses), lazy='select')))
@@ -1858,12 +1931,12 @@ class OptionsTest(_fixtures.FixtureTest):
def test_lazy_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined')))
+ addresses=relationship(mapper(Address, addresses), lazy='joined')))
sess = create_session()
u = (sess.query(User).
@@ -1880,16 +1953,17 @@ class OptionsTest(_fixtures.FixtureTest):
if eager columns are not available"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses),
- lazy='joined', order_by=addresses.c.id)))
+ addresses=relationship(mapper(Address, addresses),
+ lazy='joined', order_by=addresses.c.id)))
sess = create_session()
# first test straight eager load, 1 statement
+
def go():
l = sess.query(User).order_by(User.id).all()
eq_(l, self.static.user_address_result)
@@ -1902,24 +1976,27 @@ class OptionsTest(_fixtures.FixtureTest):
# (previous users in session fell out of scope and were removed from
# session's identity map)
r = users.select().order_by(users.c.id).execute()
+
def go():
l = list(sess.query(User).instances(r))
eq_(l, self.static.user_address_result)
self.sql_count_(4, go)
def test_eager_degrade_deep(self):
- users, Keyword, items, order_items, orders, Item, User, Address, keywords, item_keywords, Order, addresses = (self.tables.users,
- self.classes.Keyword,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Address,
- self.tables.keywords,
- self.tables.item_keywords,
- self.classes.Order,
- self.tables.addresses)
+ users, Keyword, items, order_items, orders, \
+ Item, User, Address, keywords, item_keywords, Order, addresses = (
+ self.tables.users,
+ self.classes.Keyword,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Address,
+ self.tables.keywords,
+ self.tables.item_keywords,
+ self.classes.Order,
+ self.tables.addresses)
# test with a deeper set of eager loads. when we first load the three
# users, they will have no addresses or orders. the number of lazy
@@ -1931,18 +2008,18 @@ class OptionsTest(_fixtures.FixtureTest):
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
- lazy='joined',
- order_by=item_keywords.c.keyword_id)))
+ lazy='joined',
+ order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy='joined',
- order_by=order_items.c.item_id)))
+ order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined',
- order_by=addresses.c.id),
+ order_by=addresses.c.id),
orders=relationship(Order, lazy='joined',
- order_by=orders.c.id)))
+ order_by=orders.c.id)))
sess = create_session()
@@ -1957,6 +2034,7 @@ class OptionsTest(_fixtures.FixtureTest):
# then select just from users. run it into instances.
# then assert the data, which will launch 6 more lazy loads
r = users.select().execute()
+
def go():
l = list(sess.query(User).instances(r))
eq_(l, self.static.user_all_result)
@@ -1966,12 +2044,12 @@ class OptionsTest(_fixtures.FixtureTest):
"""An eager relationship can be upgraded to a lazy relationship."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined')
+ addresses=relationship(mapper(Address, addresses), lazy='joined')
))
sess = create_session()
@@ -1984,19 +2062,20 @@ class OptionsTest(_fixtures.FixtureTest):
self.sql_count_(4, go)
def test_option_propagate(self):
- users, items, order_items, Order, Item, User, orders = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.orders)
+ users, items, order_items, Order, Item, User, orders = (
+ self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.orders)
mapper(User, users, properties=dict(
- orders = relationship(Order)
+ orders=relationship(Order)
))
mapper(Order, orders, properties=dict(
- items = relationship(Item, secondary=order_items)
+ items=relationship(Item, secondary=order_items)
))
mapper(Item, items)
@@ -2005,35 +2084,39 @@ class OptionsTest(_fixtures.FixtureTest):
oalias = aliased(Order)
opt1 = sa.orm.joinedload(User.orders, Order.items)
opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
- u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2).first()
+ u1 = sess.query(User).join(oalias, User.orders).\
+ options(opt1, opt2).first()
ustate = attributes.instance_state(u1)
assert opt1 in ustate.load_options
assert opt2 not in ustate.load_options
class DeepOptionsTest(_fixtures.FixtureTest):
+
@classmethod
def setup_mappers(cls):
- users, Keyword, items, order_items, Order, Item, User, keywords, item_keywords, orders = (cls.tables.users,
- cls.classes.Keyword,
- cls.tables.items,
- cls.tables.order_items,
- cls.classes.Order,
- cls.classes.Item,
- cls.classes.User,
- cls.tables.keywords,
- cls.tables.item_keywords,
- cls.tables.orders)
+ users, Keyword, items, order_items, Order, Item, User, \
+ keywords, item_keywords, orders = (
+ cls.tables.users,
+ cls.classes.Keyword,
+ cls.tables.items,
+ cls.tables.order_items,
+ cls.classes.Order,
+ cls.classes.Item,
+ cls.classes.User,
+ cls.tables.keywords,
+ cls.tables.item_keywords,
+ cls.tables.orders)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, item_keywords,
- order_by=item_keywords.c.item_id)))
+ order_by=item_keywords.c.item_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, order_items,
- order_by=items.c.id)))
+ order_by=items.c.id)))
mapper(User, users, order_by=users.c.id, properties=dict(
orders=relationship(Order, order_by=orders.c.id)))
@@ -2045,8 +2128,9 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# joinedload nothing.
u = sess.query(User).all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.assert_sql_count(testing.db, go, 3)
def test_deep_options_2(self):
@@ -2054,24 +2138,24 @@ class DeepOptionsTest(_fixtures.FixtureTest):
User = self.classes.User
-
sess = create_session()
l = (sess.query(User).
- options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+ options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+
def go():
- x = l[0].orders[1].items[0].keywords[1]
+ l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
sess = create_session()
l = (sess.query(User).
- options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+ options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+
def go():
- x = l[0].orders[1].items[0].keywords[1]
+ l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
-
def test_deep_options_3(self):
User = self.classes.User
@@ -2083,14 +2167,15 @@ class DeepOptionsTest(_fixtures.FixtureTest):
options(sa.orm.joinedload('orders.items')).
options(sa.orm.joinedload('orders.items.keywords')))
u = q2.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
def test_deep_options_4(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = create_session()
@@ -2103,25 +2188,31 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# joinedload "keywords" on items. it will lazy load "orders", then
# lazy load the "items" on the order, but on "items" it will eager
# load the "keywords"
- q3 = sess.query(User).options(sa.orm.joinedload('orders.items.keywords'))
+ q3 = sess.query(User).options(
+ sa.orm.joinedload('orders.items.keywords'))
u = q3.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
sess = create_session()
q3 = sess.query(User).options(
- sa.orm.joinedload(User.orders, Order.items, Item.keywords))
+ sa.orm.joinedload(User.orders, Order.items, Item.keywords))
u = q3.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
+
class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+
def test_kwarg_accepted(self):
users, Address = self.tables.users, self.classes.Address
class DummyComposite(object):
+
def __init__(self, x, y):
pass
@@ -2151,41 +2242,56 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class MyFactory(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
- return func.foobar(self.__clause_element__()) == func.foobar(other)
- mapper(User, users, properties={'name':column_property(users.c.name, comparator_factory=MyFactory)})
- self.assert_compile(User.name == 'ed', "foobar(users.name) = foobar(:foobar_1)", dialect=default.DefaultDialect())
- self.assert_compile(aliased(User).name == 'ed', "foobar(users_1.name) = foobar(:foobar_1)", dialect=default.DefaultDialect())
+ return func.foobar(self.__clause_element__()) == \
+ func.foobar(other)
+ mapper(
+ User, users,
+ properties={
+ 'name': column_property(
+ users.c.name, comparator_factory=MyFactory)})
+ self.assert_compile(
+ User.name == 'ed',
+ "foobar(users.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect()
+ )
+ self.assert_compile(
+ aliased(User).name == 'ed',
+ "foobar(users_1.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
def test_synonym(self):
users, User = self.tables.users, self.classes.User
from sqlalchemy.orm.properties import ColumnProperty
+
class MyFactory(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self.__clause_element__()) ==\
- func.foobar(other)
+ func.foobar(other)
mapper(User, users, properties={
- 'name':synonym('_name', map_column=True,
- comparator_factory=MyFactory)
- })
+ 'name': synonym('_name', map_column=True,
+ comparator_factory=MyFactory)
+ })
self.assert_compile(
- User.name == 'ed',
- "foobar(users.name) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ User.name == 'ed',
+ "foobar(users.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(User).name == 'ed',
- "foobar(users_1.name) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ aliased(User).name == 'ed',
+ "foobar(users_1.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
def test_relationship(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
from sqlalchemy.orm.properties import RelationshipProperty
@@ -2194,46 +2300,50 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# primaryjoin/secondaryjoin
class MyFactory(RelationshipProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self._source_selectable().c.user_id) == \
func.foobar(other.id)
class MyFactory2(RelationshipProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self._source_selectable().c.id) == \
func.foobar(other.user_id)
mapper(User, users)
mapper(Address, addresses, properties={
- 'user': relationship(User, comparator_factory=MyFactory,
+ 'user': relationship(
+ User, comparator_factory=MyFactory,
backref=backref("addresses", comparator_factory=MyFactory2)
)
- }
+ }
)
# these are kind of nonsensical tests.
self.assert_compile(Address.user == User(id=5),
- "foobar(addresses.user_id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ "foobar(addresses.user_id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(User.addresses == Address(id=5, user_id=7),
- "foobar(users.id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ "foobar(users.id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(Address).user == User(id=5),
- "foobar(addresses_1.user_id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ aliased(Address).user == User(id=5),
+ "foobar(addresses_1.user_id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(User).addresses == Address(id=5, user_id=7),
- "foobar(users_1.id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
-
+ aliased(User).addresses == Address(id=5, user_id=7),
+ "foobar(users_1.id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
class SecondaryOptionsTest(fixtures.MappedTest):
- """test that the contains_eager() option doesn't bleed into a secondary load."""
+
+ """test that the contains_eager() option doesn't bleed
+ into a secondary load."""
run_inserts = 'once'
@@ -2242,80 +2352,84 @@ class SecondaryOptionsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("base", metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(50), nullable=False)
- )
+ Column('id', Integer, primary_key=True),
+ Column('type', String(50), nullable=False)
+ )
Table("child1", metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('child2id', Integer, ForeignKey('child2.id'), nullable=False)
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column(
+ 'child2id', Integer, ForeignKey('child2.id'), nullable=False)
+ )
Table("child2", metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ )
Table('related', metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
child1, child2, base, related = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.base,
- cls.tables.related)
+ cls.tables.child2,
+ cls.tables.base,
+ cls.tables.related)
class Base(cls.Comparable):
pass
+
class Child1(Base):
pass
+
class Child2(Base):
pass
+
class Related(cls.Comparable):
pass
mapper(Base, base, polymorphic_on=base.c.type, properties={
- 'related':relationship(Related, uselist=False)
+ 'related': relationship(Related, uselist=False)
})
mapper(Child1, child1, inherits=Base,
- polymorphic_identity='child1',
- properties={
- 'child2':relationship(Child2,
- primaryjoin=child1.c.child2id==base.c.id,
- foreign_keys=child1.c.child2id)
- })
+ polymorphic_identity='child1',
+ properties={
+ 'child2': relationship(Child2,
+ primaryjoin=child1.c.child2id == base.c.id,
+ foreign_keys=child1.c.child2id)
+ })
mapper(Child2, child2, inherits=Base, polymorphic_identity='child2')
mapper(Related, related)
@classmethod
def insert_data(cls):
child1, child2, base, related = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.base,
- cls.tables.related)
+ cls.tables.child2,
+ cls.tables.base,
+ cls.tables.related)
base.insert().execute([
- {'id':1, 'type':'child1'},
- {'id':2, 'type':'child1'},
- {'id':3, 'type':'child1'},
- {'id':4, 'type':'child2'},
- {'id':5, 'type':'child2'},
- {'id':6, 'type':'child2'},
+ {'id': 1, 'type': 'child1'},
+ {'id': 2, 'type': 'child1'},
+ {'id': 3, 'type': 'child1'},
+ {'id': 4, 'type': 'child2'},
+ {'id': 5, 'type': 'child2'},
+ {'id': 6, 'type': 'child2'},
])
child2.insert().execute([
- {'id':4},
- {'id':5},
- {'id':6},
+ {'id': 4},
+ {'id': 5},
+ {'id': 6},
])
child1.insert().execute([
- {'id':1, 'child2id':4},
- {'id':2, 'child2id':5},
- {'id':3, 'child2id':6},
+ {'id': 1, 'child2id': 4},
+ {'id': 2, 'child2id': 5},
+ {'id': 3, 'child2id': 6},
])
related.insert().execute([
- {'id':1},
- {'id':2},
- {'id':3},
- {'id':4},
- {'id':5},
- {'id':6},
+ {'id': 1},
+ {'id': 2},
+ {'id': 3},
+ {'id': 4},
+ {'id': 5},
+ {'id': 6},
])
def test_contains_eager(self):
@@ -2324,9 +2438,9 @@ class SecondaryOptionsTest(fixtures.MappedTest):
sess = create_session()
child1s = sess.query(Child1).\
- join(Child1.related).\
- options(sa.orm.contains_eager(Child1.related)).\
- order_by(Child1.id)
+ join(Child1.related).\
+ options(sa.orm.contains_eager(Child1.related)).\
+ order_by(Child1.id)
def go():
eq_(
@@ -2345,10 +2459,11 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type "
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type "
"FROM base JOIN child2 ON base.id = child2.id "
"WHERE base.id = :param_1",
- {'param_1':4}
+ {'param_1': 4}
)
)
@@ -2357,12 +2472,15 @@ class SecondaryOptionsTest(fixtures.MappedTest):
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(
+ sa.orm.joinedload(Child1.related)).order_by(Child1.id)
def go():
eq_(
child1s.all(),
- [Child1(id=1, related=Related(id=1)), Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3))]
+ [Child1(id=1, related=Related(id=1)),
+ Child1(id=2, related=Related(id=2)),
+ Child1(id=3, related=Related(id=3))]
)
self.assert_sql_count(testing.db, go, 1)
@@ -2372,30 +2490,32 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type "
- "FROM base JOIN child2 ON base.id = child2.id WHERE base.id = :param_1",
-
-# joinedload- this shouldn't happen
-# "SELECT base.id AS base_id, child2.id AS child2_id, base.type AS base_type, "
-# "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
-# "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
- {'param_1':4}
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type "
+ "FROM base JOIN child2 ON base.id = child2.id "
+ "WHERE base.id = :param_1",
+
+ {'param_1': 4}
)
)
def test_joinedload_on_same(self):
Child1, Child2, Related = (self.classes.Child1,
- self.classes.Child2,
- self.classes.Related)
+ self.classes.Child2,
+ self.classes.Related)
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.child2, Child2.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(
+ sa.orm.joinedload(Child1.child2, Child2.related)
+ ).order_by(Child1.id)
def go():
eq_(
child1s.all(),
- [Child1(id=1, related=Related(id=1)), Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3))]
+ [Child1(id=1, related=Related(id=1)),
+ Child1(id=2, related=Related(id=2)),
+ Child1(id=3, related=Related(id=3))]
)
self.assert_sql_count(testing.db, go, 4)
@@ -2406,32 +2526,43 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type, "
- "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
- "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
- {'param_1':4}
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type, "
+ "related_1.id AS related_1_id FROM base JOIN child2 "
+ "ON base.id = child2.id "
+ "LEFT OUTER JOIN related AS related_1 "
+ "ON base.id = related_1.id WHERE base.id = :param_1",
+ {'param_1': 4}
)
)
class DeferredPopulationTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("thing", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("name", String(20)))
+ Column(
+ "id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(20)))
Table("human", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("thing_id", Integer, ForeignKey("thing.id")),
- Column("name", String(20)))
+ Column(
+ "id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("thing_id", Integer, ForeignKey("thing.id")),
+ Column("name", String(20)))
@classmethod
def setup_mappers(cls):
thing, human = cls.tables.thing, cls.tables.human
- class Human(cls.Basic): pass
- class Thing(cls.Basic): pass
+ class Human(cls.Basic):
+ pass
+
+ class Thing(cls.Basic):
+ pass
mapper(Human, human, properties={"thing": relationship(Thing)})
mapper(Thing, thing, properties={"name": deferred(thing.c.name)})
@@ -2462,7 +2593,7 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing = self.classes.Thing
session = create_session()
- result = session.query(Thing).first()
+ result = session.query(Thing).first() # noqa
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2471,7 +2602,7 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing = self.classes.Thing
session = create_session()
- result = session.query(Thing).first()
+ result = session.query(Thing).first() # noqa
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2479,7 +2610,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- human = session.query(Human).options(sa.orm.joinedload("thing")).first()
+ human = session.query(Human).options( # noqa
+ sa.orm.joinedload("thing")).first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2488,7 +2620,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- human = session.query(Human).options(sa.orm.joinedload("thing")).first()
+ human = session.query(Human).options( # noqa
+ sa.orm.joinedload("thing")).first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2496,7 +2629,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- result = session.query(Human).add_entity(Thing).join("thing").first()
+ result = session.query(Human).add_entity( # noqa
+ Thing).join("thing").first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2505,88 +2639,119 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- result = session.query(Human).add_entity(Thing).join("thing").first()
+ result = session.query(Human).add_entity( # noqa
+ Thing).join("thing").first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
-
-
class NoLoadTest(_fixtures.FixtureTest):
run_inserts = 'once'
run_deletes = None
- def test_basic(self):
- """A basic one-to-many lazy load"""
+ def test_o2m_noload(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='noload')
+ addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m)
l = [None]
+
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
l[0] = x
self.assert_sql_count(testing.db, go, 1)
- self.assert_result(l[0], User,
- {'id' : 7, 'addresses' : (Address, [])},
- )
+ self.assert_result(
+ l[0], User,
+ {'id': 7, 'addresses': (Address, [])},
+ )
- def test_options(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ def test_upgrade_o2m_noload_lazyload_option(self):
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='noload')
+ addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m).options(sa.orm.lazyload('addresses'))
l = [None]
+
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
l[0] = x
self.sql_count_(2, go)
- self.assert_result(l[0], User,
- {'id' : 7, 'addresses' : (Address, [{'id' : 1}])},
- )
-
+ self.assert_result(
+ l[0], User,
+ {'id': 7, 'addresses': (Address, [{'id': 1}])},
+ )
+ def test_m2o_noload_option(self):
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+ mapper(Address, addresses, properties={
+ 'user': relationship(User)
+ })
+ mapper(User, users)
+ s = Session()
+ a1 = s.query(Address).filter_by(id=1).options(
+ sa.orm.noload('user')).first()
+ def go():
+ eq_(a1.user, None)
+ self.sql_count_(0, go)
class RequirementsTest(fixtures.MappedTest):
+
"""Tests the contract for user classes."""
@classmethod
def define_tables(cls, metadata):
Table('ht1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('value', String(10)))
Table('ht2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('ht1_id', Integer, ForeignKey('ht1.id')),
Column('value', String(10)))
Table('ht3', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('value', String(10)))
Table('ht4', metadata,
- Column('ht1_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('ht3_id', Integer, ForeignKey('ht3.id'), primary_key=True))
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht3_id', Integer, ForeignKey('ht3.id'),
+ primary_key=True))
Table('ht5', metadata,
- Column('ht1_id', Integer, ForeignKey('ht1.id'), primary_key=True))
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True))
Table('ht6', metadata,
- Column('ht1a_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('ht1b_id', Integer, ForeignKey('ht1.id'), primary_key=True),
+ Column('ht1a_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht1b_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
Column('value', String(10)))
if util.py2k:
@@ -2604,16 +2769,21 @@ class RequirementsTest(fixtures.MappedTest):
pass
# TODO: is weakref support detectable without an instance?
- #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
+ # self.assertRaises(
+ # sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
class _ValueBase(object):
+
def __init__(self, value='abc', id=None):
self.id = id
self.value = value
+
def __bool__(self):
return False
+
def __hash__(self):
return hash(self.value)
+
def __eq__(self, other):
if isinstance(other, type(self)):
return self.value == other.value
@@ -2630,19 +2800,21 @@ class RequirementsTest(fixtures.MappedTest):
"""
ht6, ht5, ht4, ht3, ht2, ht1 = (self.tables.ht6,
- self.tables.ht5,
- self.tables.ht4,
- self.tables.ht3,
- self.tables.ht2,
- self.tables.ht1)
-
+ self.tables.ht5,
+ self.tables.ht4,
+ self.tables.ht3,
+ self.tables.ht2,
+ self.tables.ht1)
class H1(self._ValueBase):
pass
+
class H2(self._ValueBase):
pass
+
class H3(self._ValueBase):
pass
+
class H6(self._ValueBase):
pass
@@ -2651,10 +2823,10 @@ class RequirementsTest(fixtures.MappedTest):
'h3s': relationship(H3, secondary=ht4, backref='h1s'),
'h1s': relationship(H1, secondary=ht5, backref='parent_h1'),
't6a': relationship(H6, backref='h1a',
- primaryjoin=ht1.c.id==ht6.c.ht1a_id),
+ primaryjoin=ht1.c.id == ht6.c.ht1a_id),
't6b': relationship(H6, backref='h1b',
- primaryjoin=ht1.c.id==ht6.c.ht1b_id),
- })
+ primaryjoin=ht1.c.id == ht6.c.ht1b_id),
+ })
mapper(H2, ht2)
mapper(H3, ht3)
mapper(H6, ht6)
@@ -2709,18 +2881,19 @@ class RequirementsTest(fixtures.MappedTest):
sa.orm.joinedload_all('h3s.h1s')).all()
eq_(len(h1s), 5)
-
def test_composite_results(self):
ht2, ht1 = (self.tables.ht2,
- self.tables.ht1)
-
+ self.tables.ht1)
class H1(self._ValueBase):
+
def __init__(self, value, id, h2s):
self.value = value
self.id = id
self.h2s = h2s
+
class H2(self._ValueBase):
+
def __init__(self, value, id):
self.value = value
self.id = id
@@ -2745,8 +2918,8 @@ class RequirementsTest(fixtures.MappedTest):
s.commit()
eq_(
[(h1.value, h1.id, h2.value, h2.id)
- for h1, h2 in
- s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)],
+ for h1, h2 in
+ s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)],
[
('abc', 1, 'abc', 1),
('abc', 1, 'def', 2),
@@ -2761,6 +2934,7 @@ class RequirementsTest(fixtures.MappedTest):
ht1 = self.tables.ht1
class H1(object):
+
def __len__(self):
return len(self.get_value())
@@ -2769,6 +2943,7 @@ class RequirementsTest(fixtures.MappedTest):
return self.value
class H2(object):
+
def __bool__(self):
return bool(self.get_value())
@@ -2781,19 +2956,21 @@ class RequirementsTest(fixtures.MappedTest):
h1 = H1()
h1.value = "Asdf"
- h1.value = "asdf asdf" # ding
+ h1.value = "asdf asdf" # ding
h2 = H2()
h2.value = "Asdf"
- h2.value = "asdf asdf" # ding
+ h2.value = "asdf asdf" # ding
+
class IsUserlandTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer, primary_key=True),
- Column('someprop', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('someprop', Integer)
+ )
def _test(self, value, instancelevel=None):
class Foo(object):
@@ -2842,17 +3019,20 @@ class IsUserlandTest(fixtures.MappedTest):
return "hi"
self._test(property(somefunc), "hi")
+
class MagicNamesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('cartographers', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)),
Column('alias', String(50)),
Column('quip', String(100)))
Table('maps', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('cart_id', Integer,
ForeignKey('cartographers.id')),
Column('state', String(2)),
@@ -2868,9 +3048,9 @@ class MagicNamesTest(fixtures.MappedTest):
def test_mappish(self):
maps, Cartographer, cartographers, Map = (self.tables.maps,
- self.classes.Cartographer,
- self.tables.cartographers,
- self.classes.Map)
+ self.classes.Cartographer,
+ self.tables.cartographers,
+ self.classes.Map)
mapper(Cartographer, cartographers, properties=dict(
query=cartographers.c.quip))
@@ -2879,7 +3059,7 @@ class MagicNamesTest(fixtures.MappedTest):
c = Cartographer(name='Lenny', alias='The Dude',
query='Where be dragons?')
- m = Map(state='AK', mapper=c)
+ Map(state='AK', mapper=c)
sess = create_session()
sess.add(c)
@@ -2889,16 +3069,18 @@ class MagicNamesTest(fixtures.MappedTest):
for C, M in ((Cartographer, Map),
(sa.orm.aliased(Cartographer), sa.orm.aliased(Map))):
c1 = (sess.query(C).
- filter(C.alias=='The Dude').
- filter(C.query=='Where be dragons?')).one()
- m1 = sess.query(M).filter(M.mapper==c1).one()
+ filter(C.alias == 'The Dude').
+ filter(C.query == 'Where be dragons?')).one()
+ sess.query(M).filter(M.mapper == c1).one()
def test_direct_stateish(self):
for reserved in (sa.orm.instrumentation.ClassManager.STATE_ATTR,
sa.orm.instrumentation.ClassManager.MANAGER_ATTR):
t = Table('t', sa.MetaData(),
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column(reserved, Integer))
+
class T(object):
pass
assert_raises_message(
@@ -2920,6 +3102,4 @@ class MagicNamesTest(fixtures.MappedTest):
('requested attribute name conflicts with '
'instrumentation attribute of the same name'),
mapper, M, maps, properties={
- reserved: maps.c.state})
-
-
+ reserved: maps.c.state})
diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py
index a52274896..dab9f4305 100644
--- a/test/orm/test_merge.py
+++ b/test/orm/test_merge.py
@@ -1102,6 +1102,101 @@ class MergeTest(_fixtures.FixtureTest):
eq_(ustate.load_path.path, (umapper, ))
eq_(ustate.load_options, set([opt2]))
+ def test_resolve_conflicts_pending_doesnt_interfere_no_ident(self):
+ User, Address, Order = (
+ self.classes.User, self.classes.Address, self.classes.Order)
+ users, addresses, orders = (
+ self.tables.users, self.tables.addresses, self.tables.orders)
+
+ mapper(User, users, properties={
+ 'orders': relationship(Order)
+ })
+ mapper(Order, orders, properties={
+ 'address': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ u1 = User(id=7, name='x')
+ u1.orders = [
+ Order(description='o1', address=Address(email_address='a')),
+ Order(description='o2', address=Address(email_address='b')),
+ Order(description='o3', address=Address(email_address='c'))
+ ]
+
+ sess = Session()
+ sess.merge(u1)
+ sess.flush()
+
+ eq_(
+ sess.query(Address.email_address).order_by(
+ Address.email_address).all(),
+ [('a', ), ('b', ), ('c', )]
+ )
+
+ def test_resolve_conflicts_pending(self):
+ User, Address, Order = (
+ self.classes.User, self.classes.Address, self.classes.Order)
+ users, addresses, orders = (
+ self.tables.users, self.tables.addresses, self.tables.orders)
+
+ mapper(User, users, properties={
+ 'orders': relationship(Order)
+ })
+ mapper(Order, orders, properties={
+ 'address': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ u1 = User(id=7, name='x')
+ u1.orders = [
+ Order(description='o1', address=Address(id=1, email_address='a')),
+ Order(description='o2', address=Address(id=1, email_address='b')),
+ Order(description='o3', address=Address(id=1, email_address='c'))
+ ]
+
+ sess = Session()
+ sess.merge(u1)
+ sess.flush()
+
+ eq_(
+ sess.query(Address).one(),
+ Address(id=1, email_address='c')
+ )
+
+ def test_resolve_conflicts_persistent(self):
+ User, Address, Order = (
+ self.classes.User, self.classes.Address, self.classes.Order)
+ users, addresses, orders = (
+ self.tables.users, self.tables.addresses, self.tables.orders)
+
+ mapper(User, users, properties={
+ 'orders': relationship(Order)
+ })
+ mapper(Order, orders, properties={
+ 'address': relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ sess = Session()
+ sess.add(Address(id=1, email_address='z'))
+ sess.commit()
+
+ u1 = User(id=7, name='x')
+ u1.orders = [
+ Order(description='o1', address=Address(id=1, email_address='a')),
+ Order(description='o2', address=Address(id=1, email_address='b')),
+ Order(description='o3', address=Address(id=1, email_address='c'))
+ ]
+
+ sess = Session()
+ sess.merge(u1)
+ sess.flush()
+
+ eq_(
+ sess.query(Address).one(),
+ Address(id=1, email_address='c')
+ )
+
class M2ONoUseGetLoadingTest(fixtures.MappedTest):
"""Merge a one-to-many. The many-to-one on the other side is set up
diff --git a/test/orm/test_options.py b/test/orm/test_options.py
index 1c1a797a6..e1e26c62c 100644
--- a/test/orm/test_options.py
+++ b/test/orm/test_options.py
@@ -2,7 +2,7 @@ from sqlalchemy import inspect
from sqlalchemy.orm import attributes, mapper, relationship, backref, \
configure_mappers, create_session, synonym, Session, class_mapper, \
aliased, column_property, joinedload_all, joinedload, Query,\
- util as orm_util, Load
+ util as orm_util, Load, defer
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_message
@@ -46,8 +46,18 @@ class PathTest(object):
set([self._make_path(p) for p in paths])
)
+
class LoadTest(PathTest, QueryTest):
+ def test_str(self):
+ User = self.classes.User
+ l = Load(User)
+ l.strategy = (('deferred', False), ('instrument', True))
+ eq_(
+ str(l),
+ "Load(strategy=(('deferred', False), ('instrument', True)))"
+ )
+
def test_gen_path_attr_entity(self):
User = self.classes.User
Address = self.classes.Address
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 55af023b1..d2f9e4a66 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -579,8 +579,7 @@ class GetTest(QueryTest):
table = Table(
'unicode_data', metadata,
Column(
- 'id', Unicode(40), primary_key=True,
- test_needs_autoincrement=True),
+ 'id', Unicode(40), primary_key=True),
Column('data', Unicode(40)))
metadata.create_all()
ustring = util.b('petit voix m\xe2\x80\x99a').decode('utf-8')
@@ -776,6 +775,42 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
meth, q, *arg, **kw
)
+ def test_illegal_coercions(self):
+ User = self.classes.User
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, User
+ )
+
+ ua = aliased(User)
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, ua
+ )
+
+ s = Session()
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ lambda: s.query(User).filter(User.name == User)
+ )
+
+ u1 = User()
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, u1
+ )
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ lambda: s.query(User).filter(User.name == u1)
+ )
+
class OperatorTest(QueryTest, AssertsCompiledSQL):
"""test sql.Comparator implementation for MapperProperties"""
@@ -1960,13 +1995,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
sess.query(User). \
filter(User.addresses.any(email_address='fred@fred.com')).all()
- # test that any() doesn't overcorrelate
- assert [User(id=7), User(id=8)] == \
- sess.query(User).join("addresses"). \
- filter(
- ~User.addresses.any(
- Address.email_address == 'fred@fred.com')).all()
-
# test that the contents are not adapted by the aliased join
assert [User(id=7), User(id=8)] == \
sess.query(User).join("addresses", aliased=True). \
@@ -1978,6 +2006,18 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
sess.query(User).outerjoin("addresses", aliased=True). \
filter(~User.addresses.any()).all()
+ def test_any_doesnt_overcorrelate(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ # test that any() doesn't overcorrelate
+ assert [User(id=7), User(id=8)] == \
+ sess.query(User).join("addresses"). \
+ filter(
+ ~User.addresses.any(
+ Address.email_address == 'fred@fred.com')).all()
+
def test_has(self):
Dingaling, User, Address = (
self.classes.Dingaling, self.classes.User, self.classes.Address)
@@ -2190,6 +2230,42 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
)
+class HasMapperEntitiesTest(QueryTest):
+ def test_entity(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User)
+
+ assert q._has_mapper_entities
+
+ def test_cols(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id)
+
+ assert not q._has_mapper_entities
+
+ def test_cols_set_entities(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id)
+
+ q._set_entities(User)
+ assert q._has_mapper_entities
+
+ def test_entity_set_entities(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User)
+
+ q._set_entities(User.id)
+ assert not q._has_mapper_entities
+
+
class SetOpsTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -3140,6 +3216,39 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
# sess.query(Order).with_parent(None, property='addresses').all()
# == [Order(description="order 5")]
+ def test_select_from(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+ u1 = sess.query(User).get(7)
+ q = sess.query(Address).select_from(Address).with_parent(u1)
+ self.assert_compile(
+ q,
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM addresses WHERE :param_1 = addresses.user_id",
+ {'param_1': 7}
+ )
+
+ @testing.fails("issue #3607")
+ def test_select_from_alias(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+ u1 = sess.query(User).get(7)
+ a1 = aliased(Address)
+ q = sess.query(a1).with_parent(u1)
+ self.assert_compile(
+ q,
+ "SELECT addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM addresses AS addresses_1 "
+ "WHERE :param_1 = addresses_1.user_id",
+ {'param_1': 7}
+ )
+
def test_noparent(self):
Item, User = self.classes.Item, self.classes.User
@@ -3547,13 +3656,17 @@ class ImmediateTest(_fixtures.FixtureTest):
sess = create_session()
- assert_raises(
+ assert_raises_message(
sa.orm.exc.NoResultFound,
+ "No row was found for one\(\)",
sess.query(User).filter(User.id == 99).one)
eq_(sess.query(User).filter(User.id == 7).one().id, 7)
- assert_raises(sa.orm.exc.MultipleResultsFound, sess.query(User).one)
+ assert_raises_message(
+ sa.orm.exc.MultipleResultsFound,
+ "Multiple rows were found for one\(\)",
+ sess.query(User).one)
assert_raises(
sa.orm.exc.NoResultFound,
@@ -3598,6 +3711,60 @@ class ImmediateTest(_fixtures.FixtureTest):
sess.query(User).join(User.addresses).filter(User.id.in_([8, 9])).
order_by(User.id).one)
+ def test_one_or_none(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ eq_(sess.query(User).filter(User.id == 99).one_or_none(), None)
+
+ eq_(sess.query(User).filter(User.id == 7).one_or_none().id, 7)
+
+ assert_raises_message(
+ sa.orm.exc.MultipleResultsFound,
+ "Multiple rows were found for one_or_none\(\)",
+ sess.query(User).one_or_none)
+
+ eq_(sess.query(User.id, User.name).filter(User.id == 99).one_or_none(), None)
+
+ eq_(sess.query(User.id, User.name).filter(User.id == 7).one_or_none(),
+ (7, 'jack'))
+
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User.id, User.name).one_or_none)
+
+ eq_(
+ (sess.query(User, Address).join(User.addresses).
+ filter(Address.id == 99)).one_or_none(), None)
+
+ eq_((sess.query(User, Address).
+ join(User.addresses).
+ filter(Address.id == 4)).one_or_none(),
+ (User(id=8), Address(id=4)))
+
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User, Address).join(User.addresses).one_or_none)
+
+ # this result returns multiple rows, the first
+ # two rows being the same. but uniquing is
+ # not applied for a column based result.
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User.id).join(User.addresses).
+ filter(User.id.in_([8, 9])).order_by(User.id).one_or_none)
+
+ # test that a join which ultimately returns
+ # multiple identities across many rows still
+ # raises, even though the first two rows are of
+ # the same identity and unique filtering
+ # is applied ([ticket:1688])
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User).join(User.addresses).filter(User.id.in_([8, 9])).
+ order_by(User.id).one_or_none)
+
@testing.future
def test_getslice(self):
assert False
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 9e4b38a90..061187330 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -931,14 +931,12 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True),
Column("foo", Integer,),
test_needs_fk=True)
Table("tableB", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True),
Column("_a_id", Integer, key='a_id', primary_key=True),
test_needs_fk=True)
@@ -1093,7 +1091,7 @@ class FKsAsPksTest(fixtures.MappedTest):
'tablec', tableA.metadata,
Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('tableA.id'),
- primary_key=True, autoincrement=False, nullable=True))
+ primary_key=True, nullable=True))
tableC.create()
class C(fixtures.BasicEntity):
@@ -2703,8 +2701,7 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True,
- test_needs_autoincrement=True),
+ Column('id', String(50), primary_key=True),
Column('data', String(50)))
Table('t2', metadata,
Column('id', Integer, primary_key=True,
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 58551d763..caeb08530 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -17,6 +17,7 @@ from sqlalchemy.util import pypy
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import event, ForeignKey
+from sqlalchemy.util.compat import inspect_getargspec
class ExecutionTest(_fixtures.FixtureTest):
@@ -493,8 +494,10 @@ class SessionStateTest(_fixtures.FixtureTest):
'is already attached to session',
s2.delete, user)
u2 = s2.query(User).get(user.id)
- assert_raises_message(sa.exc.InvalidRequestError,
- 'another instance with key', s.delete, u2)
+ s2.expunge(u2)
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ 'another instance .* is already present', s.delete, u2)
s.expire(user)
s.expunge(user)
assert user not in s
@@ -543,8 +546,14 @@ class SessionStateTest(_fixtures.FixtureTest):
s.expunge(u2)
s.identity_map.add(sa.orm.attributes.instance_state(u1))
- assert_raises(AssertionError, s.identity_map.add,
- sa.orm.attributes.instance_state(u2))
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Can't attach instance <User.*?>; another instance "
+ "with key .*? is already "
+ "present in this session.",
+ s.identity_map.add,
+ sa.orm.attributes.instance_state(u2)
+ )
def test_pickled_update(self):
users, User = self.tables.users, pickleable.User
@@ -581,7 +590,13 @@ class SessionStateTest(_fixtures.FixtureTest):
assert u2 is not None and u2 is not u1
assert u2 in sess
- assert_raises(AssertionError, lambda: sess.add(u1))
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Can't attach instance <User.*?>; another instance "
+ "with key .*? is already "
+ "present in this session.",
+ sess.add, u1
+ )
sess.expunge(u2)
assert u2 not in sess
@@ -1124,11 +1139,56 @@ class WeakIdentityMapTest(_fixtures.FixtureTest):
class StrongIdentityMapTest(_fixtures.FixtureTest):
run_inserts = None
+ def _strong_ident_fixture(self):
+ sess = create_session(weak_identity_map=False)
+ return sess, sess.prune
+
+ def _event_fixture(self):
+ session = create_session()
+
+ @event.listens_for(session, "pending_to_persistent")
+ @event.listens_for(session, "deleted_to_persistent")
+ @event.listens_for(session, "detached_to_persistent")
+ @event.listens_for(session, "loaded_as_persistent")
+ def strong_ref_object(sess, instance):
+ if 'refs' not in sess.info:
+ sess.info['refs'] = refs = set()
+ else:
+ refs = sess.info['refs']
+
+ refs.add(instance)
+
+ @event.listens_for(session, "persistent_to_detached")
+ @event.listens_for(session, "persistent_to_deleted")
+ @event.listens_for(session, "persistent_to_transient")
+ def deref_object(sess, instance):
+ sess.info['refs'].discard(instance)
+
+ def prune():
+ if 'refs' not in session.info:
+ return 0
+
+ sess_size = len(session.identity_map)
+ session.info['refs'].clear()
+ gc_collect()
+ session.info['refs'] = set(
+ s.obj() for s in session.identity_map.all_states())
+ return sess_size - len(session.identity_map)
+
+ return session, prune
+
@testing.uses_deprecated()
- def test_strong_ref(self):
+ def test_strong_ref_imap(self):
+ self._test_strong_ref(self._strong_ident_fixture)
+
+ def test_strong_ref_events(self):
+ self._test_strong_ref(self._event_fixture)
+
+ def _test_strong_ref(self, fixture):
+ s, prune = fixture()
+
users, User = self.tables.users, self.classes.User
- s = create_session(weak_identity_map=False)
mapper(User, users)
# save user
@@ -1148,12 +1208,19 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
eq_(users.select().execute().fetchall(), [(user.id, 'u2')])
@testing.uses_deprecated()
+ def test_prune_imap(self):
+ self._test_prune(self._strong_ident_fixture)
+
+ def test_prune_events(self):
+ self._test_prune(self._event_fixture)
+
@testing.fails_if(lambda: pypy, "pypy has a real GC")
@testing.fails_on('+zxjdbc', 'http://www.sqlalchemy.org/trac/ticket/1473')
- def test_prune(self):
+ def _test_prune(self, fixture):
+ s, prune = fixture()
+
users, User = self.tables.users, self.classes.User
- s = create_session(weak_identity_map=False)
mapper(User, users)
for o in [User(name='u%s' % x) for x in range(10)]:
@@ -1161,43 +1228,44 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
# o is still live after this loop...
self.assert_(len(s.identity_map) == 0)
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
s.flush()
gc_collect()
- self.assert_(s.prune() == 9)
+ eq_(prune(), 9)
+ # o is still in local scope here, so still present
self.assert_(len(s.identity_map) == 1)
id = o.id
del o
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
u = s.query(User).get(id)
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
u.name = 'squiznart'
del u
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
s.flush()
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
s.add(User(name='x'))
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 0)
s.flush()
self.assert_(len(s.identity_map) == 1)
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
u = s.query(User).get(id)
s.delete(u)
del u
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
s.flush()
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 0)
@@ -1416,7 +1484,7 @@ class SessionInterface(fixtures.TestBase):
for meth in Session.public_methods:
if meth in blacklist:
continue
- spec = inspect.getargspec(getattr(Session, meth))
+ spec = inspect_getargspec(getattr(Session, meth))
if len(spec[0]) > 1 or spec[1]:
ok.add(meth)
return ok
diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py
index 91846a67e..7efb5942b 100644
--- a/test/orm/test_transaction.py
+++ b/test/orm/test_transaction.py
@@ -657,6 +657,34 @@ class SessionTransactionTest(FixtureTest):
assert session.transaction is not None, \
'autocommit=False should start a new transaction'
+ @testing.skip_if("oracle", "oracle doesn't support release of savepoint")
+ @testing.requires.savepoints
+ def test_report_primary_error_when_rollback_fails(self):
+ User, users = self.classes.User, self.tables.users
+
+ mapper(User, users)
+
+ session = Session(testing.db)
+
+ with expect_warnings(".*due to an additional ROLLBACK.*INSERT INTO"):
+ session.begin_nested()
+ savepoint = session.\
+ connection()._Connection__transaction._savepoint
+
+ # force the savepoint to disappear
+ session.connection().dialect.do_release_savepoint(
+ session.connection(), savepoint
+ )
+
+ # now do a broken flush
+ session.add_all([User(id=1), User(id=1)])
+
+ assert_raises_message(
+ sa_exc.DBAPIError,
+ "ROLLBACK TO SAVEPOINT ",
+ session.flush
+ )
+
class _LocalFixture(FixtureTest):
run_setup_mappers = 'once'
@@ -895,7 +923,13 @@ class AutoExpireTest(_LocalFixture):
assert u1_state.obj() is None
s.rollback()
- assert u1_state in s.identity_map.all_states()
+ # new in 1.1, not in identity map if the object was
+ # gc'ed and we restore snapshot; we've changed update_impl
+ # to just skip this object
+ assert u1_state not in s.identity_map.all_states()
+
+ # in any version, the state is replaced by the query
+ # because the identity map would switch it
u1 = s.query(User).filter_by(name='ed').one()
assert u1_state not in s.identity_map.all_states()
assert s.scalar(users.count()) == 1
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index 5a47903f0..2f67943f1 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -260,7 +260,7 @@ class PKTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('multipk1', metadata,
Column('multi_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=not testing.against('sqlite')),
Column('multi_rev', Integer, primary_key=True),
Column('name', String(50), nullable=False),
Column('value', String(100)))
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index 9e9f400be..c8ce13c91 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -5,7 +5,8 @@ from sqlalchemy.testing.schema import Table, Column
from test.orm import _fixtures
from sqlalchemy import exc, util
from sqlalchemy.testing import fixtures, config
-from sqlalchemy import Integer, String, ForeignKey, func, literal
+from sqlalchemy import Integer, String, ForeignKey, func, \
+ literal, FetchedValue, text
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, unitofwork, attributes,\
Session, exc as orm_exc
@@ -1848,6 +1849,450 @@ class NoAttrEventInFlushTest(fixtures.MappedTest):
eq_(t1.returning_val, 5)
+class EagerDefaultsTest(fixtures.MappedTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, server_default="3")
+ )
+
+ Table(
+ 'test2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer),
+ Column('bar', Integer, server_onupdate=FetchedValue())
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ class Thing2(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test, eager_defaults=True)
+
+ Thing2 = cls.classes.Thing2
+
+ mapper(Thing2, cls.tables.test2, eager_defaults=True)
+
+ def test_insert_defaults_present(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ t1, t2 = (
+ Thing(id=1, foo=5),
+ Thing(id=2, foo=10)
+ )
+
+ s.add_all([t1, t2])
+
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (:id, :foo)",
+ [{'foo': 5, 'id': 1}, {'foo': 10, 'id': 2}]
+ ),
+ )
+
+ def go():
+ eq_(t1.foo, 5)
+ eq_(t2.foo, 10)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_insert_defaults_present_as_expr(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ t1, t2 = (
+ Thing(id=1, foo=text("2 + 5")),
+ Thing(id=2, foo=text("5 + 5"))
+ )
+
+ s.add_all([t1, t2])
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (%(id)s, 2 + 5) "
+ "RETURNING test.foo",
+ [{'id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (%(id)s, 5 + 5) "
+ "RETURNING test.foo",
+ [{'id': 2}],
+ dialect='postgresql'
+ )
+ )
+
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (:id, 2 + 5)",
+ [{'id': 1}]
+ ),
+ CompiledSQL(
+ "INSERT INTO test (id, foo) VALUES (:id, 5 + 5)",
+ [{'id': 2}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 2}]
+ ),
+ )
+
+ def go():
+ eq_(t1.foo, 7)
+ eq_(t2.foo, 10)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_insert_defaults_nonpresent(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ t1, t2 = (
+ Thing(id=1),
+ Thing(id=2)
+ )
+
+ s.add_all([t1, t2])
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (%(id)s) RETURNING test.foo",
+ [{'id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (%(id)s) RETURNING test.foo",
+ [{'id': 2}],
+ dialect='postgresql'
+ ),
+ )
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (:id)",
+ [{'id': 1}, {'id': 2}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test.foo AS test_foo FROM test "
+ "WHERE test.id = :param_1",
+ [{'param_1': 2}]
+ )
+ )
+
+ def test_update_defaults_nonpresent(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2, t3, t4 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3),
+ Thing2(id=3, foo=3, bar=4),
+ Thing2(id=4, foo=4, bar=5)
+ )
+
+ s.add_all([t1, t2, t3, t4])
+ s.flush()
+
+ t1.foo = 5
+ t2.foo = 6
+ t2.bar = 10
+ t3.foo = 7
+ t4.foo = 8
+ t4.bar = 12
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 5, 'test2_id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+ "WHERE test2.id = %(test2_id)s",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 7, 'test2_id': 3}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+ "WHERE test2.id = %(test2_id)s",
+ [{'foo': 8, 'bar': 12, 'test2_id': 4}],
+ dialect='postgresql'
+ ),
+ )
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 5, 'test2_id': 1}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 7, 'test2_id': 3}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 8, 'bar': 12, 'test2_id': 4}],
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 3}]
+ )
+ )
+
+ def go():
+ eq_(t1.bar, 2)
+ eq_(t2.bar, 10)
+ eq_(t3.bar, 4)
+ eq_(t4.bar, 12)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_update_defaults_present_as_expr(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2, t3, t4 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3),
+ Thing2(id=3, foo=3, bar=4),
+ Thing2(id=4, foo=4, bar=5)
+ )
+
+ s.add_all([t1, t2, t3, t4])
+ s.flush()
+
+ t1.foo = 5
+ t1.bar = text("1 + 1")
+ t2.foo = 6
+ t2.bar = 10
+ t3.foo = 7
+ t4.foo = 8
+ t4.bar = text("5 + 7")
+
+ if testing.db.dialect.implicit_returning:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=1 + 1 "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 5, 'test2_id': 1}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+ "WHERE test2.id = %(test2_id)s",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s "
+ "WHERE test2.id = %(test2_id)s "
+ "RETURNING test2.bar",
+ [{'foo': 7, 'test2_id': 3}],
+ dialect='postgresql'
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=%(foo)s, bar=5 + 7 "
+ "WHERE test2.id = %(test2_id)s RETURNING test2.bar",
+ [{'foo': 8, 'test2_id': 4}],
+ dialect='postgresql'
+ ),
+ )
+ else:
+ self.assert_sql_execution(
+ testing.db,
+ s.flush,
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=1 + 1 "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 5, 'test2_id': 1}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}],
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 7, 'test2_id': 3}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=5 + 7 "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 8, 'test2_id': 4}],
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 3}]
+ ),
+ CompiledSQL(
+ "SELECT test2.bar AS test2_bar FROM test2 "
+ "WHERE test2.id = :param_1",
+ [{'param_1': 4}]
+ )
+ )
+
+ def go():
+ eq_(t1.bar, 2)
+ eq_(t2.bar, 10)
+ eq_(t3.bar, 4)
+ eq_(t4.bar, 12)
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_insert_defaults_bulk_insert(self):
+ Thing = self.classes.Thing
+ s = Session()
+
+ mappings = [
+ {"id": 1},
+ {"id": 2}
+ ]
+
+ self.assert_sql_execution(
+ testing.db,
+ lambda: s.bulk_insert_mappings(Thing, mappings),
+ CompiledSQL(
+ "INSERT INTO test (id) VALUES (:id)",
+ [{'id': 1}, {'id': 2}]
+ )
+ )
+
+ def test_update_defaults_bulk_update(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2, t3, t4 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3),
+ Thing2(id=3, foo=3, bar=4),
+ Thing2(id=4, foo=4, bar=5)
+ )
+
+ s.add_all([t1, t2, t3, t4])
+ s.flush()
+
+ mappings = [
+ {"id": 1, "foo": 5},
+ {"id": 2, "foo": 6, "bar": 10},
+ {"id": 3, "foo": 7},
+ {"id": 4, "foo": 8}
+ ]
+
+ self.assert_sql_execution(
+ testing.db,
+ lambda: s.bulk_update_mappings(Thing2, mappings),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 5, 'test2_id': 1}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo, bar=:bar "
+ "WHERE test2.id = :test2_id",
+ [{'foo': 6, 'bar': 10, 'test2_id': 2}]
+ ),
+ CompiledSQL(
+ "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+ [{'foo': 7, 'test2_id': 3}, {'foo': 8, 'test2_id': 4}]
+ )
+ )
+
+ def test_update_defaults_present(self):
+ Thing2 = self.classes.Thing2
+ s = Session()
+
+ t1, t2 = (
+ Thing2(id=1, foo=1, bar=2),
+ Thing2(id=2, foo=2, bar=3)
+ )
+
+ s.add_all([t1, t2])
+ s.flush()
+
+ t1.bar = 5
+ t2.bar = 10
+
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "UPDATE test2 SET bar=%(bar)s WHERE test2.id = %(test2_id)s",
+ [{'bar': 5, 'test2_id': 1}, {'bar': 10, 'test2_id': 2}],
+ dialect='postgresql'
+ )
+ )
+
class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults):
"""test support for custom datatypes that return a non-__bool__ value
when compared via __eq__(), eg. ticket 3469"""
@@ -1954,3 +2399,215 @@ class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults):
eq_(
s.query(Thing.value).scalar().text, "foo"
)
+
+
+class NullEvaluatingTest(fixtures.MappedTest, testing.AssertsExecutionResults):
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy import TypeDecorator
+
+ class EvalsNull(TypeDecorator):
+ impl = String(50)
+
+ should_evaluate_none = True
+
+ def process_bind_param(self, value, dialect):
+ if value is None:
+ value = 'nothing'
+ return value
+
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('evals_null_no_default', EvalsNull()),
+ Column('evals_null_default', EvalsNull(), default='default_val'),
+ Column('no_eval_null_no_default', String(50)),
+ Column('no_eval_null_default', String(50), default='default_val'),
+ Column(
+ 'builtin_evals_null_no_default', String(50).evaluates_none()),
+ Column(
+ 'builtin_evals_null_default',
+ String(50).evaluates_none(), default='default_val'),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test)
+
+ def _assert_col(self, name, value):
+ Thing = self.classes.Thing
+ s = Session()
+
+ col = getattr(Thing, name)
+ obj = s.query(col).filter(col == value).one()
+ eq_(obj[0], value)
+
+ def _test_insert(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ t1 = Thing(**{attr: None})
+ s.add(t1)
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_bulk_insert(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.bulk_insert_mappings(
+ Thing, [{attr: None}]
+ )
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_insert_novalue(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ t1 = Thing()
+ s.add(t1)
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_bulk_insert_novalue(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.bulk_insert_mappings(
+ Thing, [{}]
+ )
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def test_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "evals_null_no_default", 'nothing'
+ )
+
+ def test_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "evals_null_no_default", 'nothing'
+ )
+
+ def test_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "evals_null_no_default", None
+ )
+
+ def test_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "evals_null_no_default", None
+ )
+
+ def test_evalnull_default_insert(self):
+ self._test_insert(
+ "evals_null_default", 'nothing'
+ )
+
+ def test_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "evals_null_default", 'nothing'
+ )
+
+ def test_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "evals_null_default", 'default_val'
+ )
+
+ def test_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "evals_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_default_insert(self):
+ self._test_insert(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_builtin_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_default_insert(self):
+ self._test_insert(
+ "builtin_evals_null_default", None
+ )
+
+ def test_builtin_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "builtin_evals_null_default", None
+ )
+
+ def test_builtin_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "builtin_evals_null_default", 'default_val'
+ )
+
+ def test_builtin_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "builtin_evals_null_default", 'default_val'
+ )
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index 973053947..593714a06 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -1,10 +1,11 @@
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, is_
from sqlalchemy.testing import fixtures
from sqlalchemy import Integer, String, ForeignKey, or_, exc, \
select, func, Boolean, case, text, column
from sqlalchemy.orm import mapper, relationship, backref, Session, \
joinedload, synonym, query
from sqlalchemy import testing
+from sqlalchemy.testing import mock
from sqlalchemy.testing.schema import Table, Column
@@ -609,6 +610,42 @@ class UpdateDeleteTest(fixtures.MappedTest):
synchronize_session='fetch')
assert john not in sess
+ def test_update_unordered_dict(self):
+ User = self.classes.User
+ session = Session()
+
+ # Do an update using unordered dict and check that the parameters used
+ # are ordered in table order
+ with mock.patch.object(session, "execute") as exec_:
+ session.query(User).filter(User.id == 15).update(
+ {'name': 'foob', 'id': 123})
+ # Confirm that parameters are a dict instead of tuple or list
+ params_type = type(exec_.mock_calls[0][1][0].parameters)
+ is_(params_type, dict)
+
+ def test_update_preserve_parameter_order(self):
+ User = self.classes.User
+ session = Session()
+
+ # Do update using a tuple and check that order is preserved
+ with mock.patch.object(session, "execute") as exec_:
+ session.query(User).filter(User.id == 15).update(
+ (('id', 123), ('name', 'foob')),
+ update_args={"preserve_parameter_order": True})
+ cols = [c.key
+ for c in exec_.mock_calls[0][1][0]._parameter_ordering]
+ eq_(['id', 'name'], cols)
+
+ # Now invert the order and use a list instead, and check that order is
+ # also preserved
+ with mock.patch.object(session, "execute") as exec_:
+ session.query(User).filter(User.id == 15).update(
+ [('name', 'foob'), ('id', 123)],
+ update_args={"preserve_parameter_order": True})
+ cols = [c.key
+ for c in exec_.mock_calls[0][1][0]._parameter_ordering]
+ eq_(['name', 'id'], cols)
+
class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py
index d46799c5a..07b090c60 100644
--- a/test/orm/test_versioning.py
+++ b/test/orm/test_versioning.py
@@ -112,6 +112,61 @@ class VersioningTest(fixtures.MappedTest):
else:
s1.commit()
+ def test_multiple_updates(self):
+ Foo = self.classes.Foo
+
+ s1 = self._fixture()
+ f1 = Foo(value='f1')
+ f2 = Foo(value='f2')
+ s1.add_all((f1, f2))
+ s1.commit()
+
+ f1.value = 'f1rev2'
+ f2.value = 'f2rev2'
+ s1.commit()
+
+ eq_(
+ s1.query(Foo.id, Foo.value, Foo.version_id).order_by(Foo.id).all(),
+ [(f1.id, 'f1rev2', 2), (f2.id, 'f2rev2', 2)]
+ )
+
+ def test_bulk_insert(self):
+ Foo = self.classes.Foo
+
+ s1 = self._fixture()
+ s1.bulk_insert_mappings(
+ Foo,
+ [{"id": 1, "value": "f1"}, {"id": 2, "value": "f2"}]
+ )
+ eq_(
+ s1.query(Foo.id, Foo.value, Foo.version_id).order_by(Foo.id).all(),
+ [(1, 'f1', 1), (2, 'f2', 1)]
+ )
+
+ def test_bulk_update(self):
+ Foo = self.classes.Foo
+
+ s1 = self._fixture()
+ f1 = Foo(value='f1')
+ f2 = Foo(value='f2')
+ s1.add_all((f1, f2))
+ s1.commit()
+
+ s1.bulk_update_mappings(
+ Foo,
+ [
+ {"id": f1.id, "value": "f1rev2", "version_id": 1},
+ {"id": f2.id, "value": "f2rev2", "version_id": 1},
+
+ ]
+ )
+ s1.commit()
+
+ eq_(
+ s1.query(Foo.id, Foo.value, Foo.version_id).order_by(Foo.id).all(),
+ [(f1.id, 'f1rev2', 2), (f2.id, 'f2rev2', 2)]
+ )
+
@testing.emits_warning_on(
'+zxjdbc', r'.*does not support (update|delete)d rowcount')
def test_bump_version(self):
@@ -876,19 +931,26 @@ class ServerVersioningTest(fixtures.MappedTest):
class Bar(cls.Basic):
pass
- def _fixture(self, expire_on_commit=True):
+ def _fixture(self, expire_on_commit=True, eager_defaults=False):
Foo, version_table = self.classes.Foo, self.tables.version_table
mapper(
Foo, version_table, version_id_col=version_table.c.version_id,
version_id_generator=False,
+ eager_defaults=eager_defaults
)
s1 = Session(expire_on_commit=expire_on_commit)
return s1
def test_insert_col(self):
- sess = self._fixture()
+ self._test_insert_col()
+
+ def test_insert_col_eager_defaults(self):
+ self._test_insert_col(eager_defaults=True)
+
+ def _test_insert_col(self, **kw):
+ sess = self._fixture(**kw)
f1 = self.classes.Foo(value='f1')
sess.add(f1)
@@ -917,7 +979,13 @@ class ServerVersioningTest(fixtures.MappedTest):
self.assert_sql_execution(testing.db, sess.flush, *statements)
def test_update_col(self):
- sess = self._fixture()
+ self._test_update_col()
+
+ def test_update_col_eager_defaults(self):
+ self._test_update_col(eager_defaults=True)
+
+ def _test_update_col(self, **kw):
+ sess = self._fixture(**kw)
f1 = self.classes.Foo(value='f1')
sess.add(f1)
@@ -952,6 +1020,76 @@ class ServerVersioningTest(fixtures.MappedTest):
)
self.assert_sql_execution(testing.db, sess.flush, *statements)
+ def test_multi_update(self):
+ sess = self._fixture()
+
+ f1 = self.classes.Foo(value='f1')
+ f2 = self.classes.Foo(value='f2')
+ f3 = self.classes.Foo(value='f3')
+ sess.add_all([f1, f2, f3])
+ sess.flush()
+
+ f1.value = 'f1a'
+ f2.value = 'f2a'
+ f3.value = 'f3a'
+
+ statements = [
+ # note that the assertsql tests the rule against
+ # "default" - on a "returning" backend, the statement
+ # includes "RETURNING"
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [
+ {
+ "version_table_id": 1,
+ "version_table_version_id": 1, "value": "f1a"}]
+ ),
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [
+ {
+ "version_table_id": 2,
+ "version_table_version_id": 1, "value": "f2a"}]
+ ),
+ CompiledSQL(
+ "UPDATE version_table SET version_id=2, value=:value "
+ "WHERE version_table.id = :version_table_id AND "
+ "version_table.version_id = :version_table_version_id",
+ lambda ctx: [
+ {
+ "version_table_id": 3,
+ "version_table_version_id": 1, "value": "f3a"}]
+ )
+ ]
+ if not testing.db.dialect.implicit_returning:
+ # DBs without implicit returning, we must immediately
+ # SELECT for the new version id
+ statements.extend([
+ CompiledSQL(
+ "SELECT version_table.version_id "
+ "AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 1}]
+ ),
+ CompiledSQL(
+ "SELECT version_table.version_id "
+ "AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 2}]
+ ),
+ CompiledSQL(
+ "SELECT version_table.version_id "
+ "AS version_table_version_id "
+ "FROM version_table WHERE version_table.id = :param_1",
+ lambda ctx: [{"param_1": 3}]
+ )
+ ])
+ self.assert_sql_execution(testing.db, sess.flush, *statements)
+
def test_delete_col(self):
sess = self._fixture()
diff --git a/test/profiles.txt b/test/profiles.txt
index 691d1a54d..f6b682be1 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -38,7 +38,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 157
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 153
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 153
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 157
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 153
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 153
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 153
@@ -60,7 +60,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 190
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 188
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 188
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 190
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 188
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 188
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 188
@@ -104,7 +104,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 146
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 147
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 146
@@ -117,7 +117,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_nocextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 146
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 147
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_nocextensions 146
@@ -126,7 +126,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.6_sqlite_pysqlite_nocextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_cextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4262
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4257
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4262
@@ -139,7 +139,7 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_pymysql_cextensions 4263
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_pymysql_nocextensions 4263
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_nocextensions 4263
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_nocextensions 4258
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_cextensions 4263
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_nocextensions 4263
@@ -170,7 +170,7 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.6_sqlite_pysqlite_nocextensions 26358
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_cextensions 16194
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_nocextensions 25197
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 28177
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 29184
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 37180
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 16329
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 25332
@@ -183,7 +183,7 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_n
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_cextensions 83733
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_nocextensions 92736
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 18221
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 27224
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 27201
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 18393
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 27396
@@ -192,7 +192,7 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_n
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.6_sqlite_pysqlite_nocextensions 26282
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_cextensions 22212
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_nocextensions 25215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 22183
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 23196
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 25186
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 22269
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 25272
@@ -205,7 +205,7 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pys
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_cextensions 47353
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_nocextensions 50356
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 24215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 27218
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 27220
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 24321
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 27324
@@ -236,7 +236,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.6_sqlite_pysqlite_nocextensions 161101
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 127101
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 128851
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 120101
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 123351
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 121851
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 156351
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 158054
@@ -249,7 +249,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_cextensions 187056
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_nocextensions 188855
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 128556
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 130306
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 130356
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 168806
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 170556
@@ -258,7 +258,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.6_sqlite_pysqlite_nocextensions 21505
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19393
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 19597
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 18881
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 19024
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19085
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 21186
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 21437
@@ -271,7 +271,7 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_cextensions 23716
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_nocextensions 23871
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 19552
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19744
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19727
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 22051
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 22255
@@ -280,7 +280,7 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.6_sqlite_pysqlite_nocextensions 1520
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1400
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1415
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1319
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1309
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1334
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1527
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1542
@@ -293,7 +293,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_cextensions 2038
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_nocextensions 2053
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1335
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1350
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1354
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1577
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1592
@@ -302,7 +302,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.6_sqlite_pysqlite_nocextensions 89,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_cextensions 93,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_nocextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 93,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 91,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 93,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 93,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 93,19
@@ -315,7 +315,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_cextensions 92,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_nocextensions 92,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 92,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 92,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 94,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 92,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 92,20
@@ -324,7 +324,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.6_sqlite_pysqlite_nocextensions 8064
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_cextensions 6220
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_nocextensions 6750
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_cextensions 6790
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_cextensions 6798
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_nocextensions 7320
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_cextensions 7564
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_nocextensions 8094
@@ -337,7 +337,7 @@ test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_cextensions 13744
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_nocextensions 14274
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_cextensions 6234
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6674
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6702
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_cextensions 7846
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocextensions 8376
@@ -346,7 +346,7 @@ test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.6_sqlite_pysqlite_nocextensions 1156
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_cextensions 1145
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_nocextensions 1148
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1160
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1139
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_nocextensions 1161
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_cextensions 1151
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_nocextensions 1145
@@ -359,7 +359,7 @@ test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_cextensions 1254
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_nocextensions 1280
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_cextensions 1247
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1262
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1263
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_cextensions 1238
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_nocextensions 1272
@@ -368,7 +368,7 @@ test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.6_sqlite_pysqlite_nocextensions 97
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_cextensions 95
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_nocextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 95
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 96
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 95
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 95
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 95
@@ -500,7 +500,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15439
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 488
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15488
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20477
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20497
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35477
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 419
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15419
@@ -522,7 +522,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15439
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 488
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45488
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20477
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20497
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35477
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 419
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15419
@@ -541,18 +541,18 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5811,295,3577,11462,1134,1973,2434
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5823,295,3721,11938,1146,2017,2481
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5833,295,3681,12720,1241,1980,2655
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5591,277,3569,11458,1134,1924,2489
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5613,277,3665,12630,1228,1931,2681
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5619,277,3705,11902,1144,1966,2532
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5624,277,3801,13074,1238,1970,2724
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5625,277,3809,13110,1240,1975,2733
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6256,402,6599,17140,1146,2569
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6437,410,6761,17665,1159,2627
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6341,407,6703,18167,1244,2598
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6228,393,6747,17582,1148,2623
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6318,398,6851,18609,1234,2652
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6257,393,6891,18056,1159,2671
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6341,398,6995,19083,1245,2700
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6418,401,7005,19115,1247,2706
diff --git a/test/requirements.py b/test/requirements.py
index db4daca20..ff93a9c3d 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -293,7 +293,6 @@ class DefaultRequirements(SuiteRequirements):
named 'test_schema'."""
return skip_if([
- "sqlite",
"firebird"
], "no schema support")
@@ -362,6 +361,32 @@ class DefaultRequirements(SuiteRequirements):
], 'no support for EXCEPT')
@property
+ def parens_in_union_contained_select_w_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when LIMIT/OFFSET is specifically present.
+
+ E.g. (SELECT ...) UNION (SELECT ..)
+
+ This is known to fail on SQLite.
+
+ """
+ return fails_if('sqlite')
+
+ @property
+ def parens_in_union_contained_select_wo_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when OFFSET/LIMIT is specifically not present.
+
+ E.g. (SELECT ... LIMIT ..) UNION (SELECT .. OFFSET ..)
+
+ This is known to fail on SQLite. It also fails on Oracle
+ because without LIMIT/OFFSET, there is currently no step that
+ creates an additional subquery.
+
+ """
+ return fails_if(['sqlite', 'oracle'])
+
+ @property
def offset(self):
"""Target database must support some method of adding OFFSET or
equivalent to a result set."""
@@ -758,7 +783,7 @@ class DefaultRequirements(SuiteRequirements):
@property
def postgresql_jsonb(self):
- return skip_if(
+ return only_on("postgresql >= 9.4") + skip_if(
lambda config:
config.db.dialect.driver == "pg8000" and
config.db.dialect._dbapi_version <= (1, 10, 1)
@@ -841,6 +866,10 @@ class DefaultRequirements(SuiteRequirements):
return skip_if(["oracle", "firebird"], "non-standard SELECT scalar syntax")
@property
+ def mysql_fsp(self):
+ return only_if('mysql >= 5.6.4')
+
+ @property
def mysql_fully_case_sensitive(self):
return only_if(self._has_mysql_fully_case_sensitive)
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 04e3171a9..ffd13309b 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -18,7 +18,7 @@ from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
literal, and_, null, type_coerce, alias, or_, literal_column,\
Float, TIMESTAMP, Numeric, Date, Text, union, except_,\
intersect, union_all, Boolean, distinct, join, outerjoin, asc, desc,\
- over, subquery, case, true
+ over, subquery, case, true, CheckConstraint
import decimal
from sqlalchemy.util import u
from sqlalchemy import exc, sql, util, types, schema
@@ -1643,14 +1643,12 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
s = select([column('foo'), column('bar')])
- # ORDER BY's even though not supported by
- # all DB's, are rendered if requested
self.assert_compile(
union(
s.order_by("foo"),
s.order_by("bar")),
- "SELECT foo, bar ORDER BY foo UNION SELECT foo, bar ORDER BY bar")
- # self_group() is honored
+ "(SELECT foo, bar ORDER BY foo) UNION "
+ "(SELECT foo, bar ORDER BY bar)")
self.assert_compile(
union(s.order_by("foo").self_group(),
s.order_by("bar").limit(10).self_group()),
@@ -1759,6 +1757,67 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT foo, bar FROM bat)"
)
+ # tests for [ticket:2528]
+ # sqlite hates all of these.
+ self.assert_compile(
+ union(
+ s.limit(1),
+ s.offset(2)
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT -1 OFFSET :param_2)"
+ )
+
+ self.assert_compile(
+ union(
+ s.order_by(column('bar')),
+ s.offset(2)
+ ),
+ "(SELECT foo, bar FROM bat ORDER BY bar) "
+ "UNION (SELECT foo, bar FROM bat LIMIT -1 OFFSET :param_1)"
+ )
+
+ self.assert_compile(
+ union(
+ s.limit(1).alias('a'),
+ s.limit(2).alias('b')
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2)"
+ )
+
+ self.assert_compile(
+ union(
+ s.limit(1).self_group(),
+ s.limit(2).self_group()
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2)"
+ )
+
+ self.assert_compile(
+ union(s.limit(1), s.limit(2).offset(3)).alias().select(),
+ "SELECT anon_1.foo, anon_1.bar FROM "
+ "((SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2 OFFSET :param_3)) "
+ "AS anon_1"
+ )
+
+ # this version works for SQLite
+ self.assert_compile(
+ union(
+ s.limit(1).alias().select(),
+ s.offset(2).alias().select(),
+ ),
+ "SELECT anon_1.foo, anon_1.bar "
+ "FROM (SELECT foo, bar FROM bat"
+ " LIMIT :param_1) AS anon_1 "
+ "UNION SELECT anon_2.foo, anon_2.bar "
+ "FROM (SELECT foo, bar "
+ "FROM bat"
+ " LIMIT -1 OFFSET :param_2) AS anon_2"
+ )
+
def test_binds(self):
for (
stmt,
@@ -2040,6 +2099,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
'Incorrect number of expected results')
eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)),
'CAST(casttest.v1 AS %s)' % expected_results[0])
+ eq_(str(tbl.c.v1.cast(Numeric).compile(dialect=dialect)),
+ 'CAST(casttest.v1 AS %s)' % expected_results[0])
eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)),
'CAST(casttest.v1 AS %s)' % expected_results[1])
eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)),
@@ -2855,6 +2916,45 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE TABLE t (x INTEGER, z INTEGER)"
)
+ def test_composite_pk_constraint_autoinc_first(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column('a', Integer, primary_key=True),
+ Column('b', Integer, primary_key=True, autoincrement=True)
+ )
+ self.assert_compile(
+ schema.CreateTable(t),
+ "CREATE TABLE t ("
+ "a INTEGER NOT NULL, "
+ "b INTEGER NOT NULL, "
+ "PRIMARY KEY (b, a))"
+ )
+
+ def test_table_no_cols(self):
+ m = MetaData()
+ t1 = Table('t1', m)
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE t1 ()"
+ )
+
+ def test_table_no_cols_w_constraint(self):
+ m = MetaData()
+ t1 = Table('t1', m, CheckConstraint('a = 1'))
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE t1 (CHECK (a = 1))"
+ )
+
+ def test_table_one_col_w_constraint(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('q', Integer), CheckConstraint('a = 1'))
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE t1 (q INTEGER, CHECK (a = 1))"
+ )
+
class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -3423,13 +3523,15 @@ class ResultMapTest(fixtures.TestBase):
tc = type_coerce(t.c.a, String)
stmt = select([t.c.a, l1, tc])
comp = stmt.compile()
- tc_anon_label = comp._create_result_map()['a_1'][1][0]
+ tc_anon_label = comp._create_result_map()['anon_1'][1][0]
eq_(
comp._create_result_map(),
{
'a': ('a', (t.c.a, 'a', 'a'), t.c.a.type),
'bar': ('bar', (l1, 'bar'), l1.type),
- 'a_1': ('%%(%d a)s' % id(tc), (tc_anon_label, 'a_1'), tc.type),
+ 'anon_1': (
+ '%%(%d anon)s' % id(tc),
+ (tc_anon_label, 'anon_1', tc), tc.type),
},
)
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index c154daa22..e21b21ab2 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -123,6 +123,14 @@ class DefaultTest(fixtures.TestBase):
def gen_default(cls, ctx):
return "hi"
+ class MyType(TypeDecorator):
+ impl = String(50)
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = "BIND" + value
+ return value
+
# select "count(1)" returns different results on different DBs also
# correct for "current_date" compatible as column default, value
# differences
@@ -211,7 +219,10 @@ class DefaultTest(fixtures.TestBase):
server_default='ddl'),
# python method w/ context
- Column('col10', String(20), default=MyClass.gen_default)
+ Column('col10', String(20), default=MyClass.gen_default),
+
+ # fixed default w/ type that has bound processor
+ Column('col11', MyType(), default='foo')
)
t.create()
@@ -290,6 +301,7 @@ class DefaultTest(fixtures.TestBase):
c = sa.ColumnDefault(fn)
c.arg("context")
+
@testing.fails_on('firebird', 'Data type unknown')
def test_standalone(self):
c = testing.db.engine.contextual_connect()
@@ -391,7 +403,7 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(), [
(x, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')
+ 12, today, 'py', 'hi', 'BINDfoo')
for x in range(51, 54)])
t.insert().execute(col9=None)
@@ -401,7 +413,7 @@ class DefaultTest(fixtures.TestBase):
eq_(t.select(t.c.col1 == 54).execute().fetchall(),
[(54, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, None, 'hi')])
+ 12, today, None, 'hi', 'BINDfoo')])
def test_insertmany(self):
t.insert().execute({}, {}, {})
@@ -411,11 +423,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')])
+ 12, today, 'py', 'hi', 'BINDfoo')])
@testing.requires.multivalues_inserts
def test_insert_multivalues(self):
@@ -427,11 +439,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')])
+ 12, today, 'py', 'hi', 'BINDfoo')])
def test_no_embed_in_sql(self):
"""Using a DefaultGenerator, Sequence, DefaultClause
@@ -498,11 +510,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'im the update', f2, ts, ts, ctexec, False, False,
- 13, today, 'py', 'hi'),
+ 13, today, 'py', 'hi', 'BINDfoo'),
(52, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py', 'hi'),
+ 13, today, 'py', 'hi', 'BINDfoo'),
(53, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py', 'hi')])
+ 13, today, 'py', 'hi', 'BINDfoo')])
@testing.fails_on('firebird', 'Data type unknown')
def test_update(self):
@@ -514,7 +526,7 @@ class DefaultTest(fixtures.TestBase):
l = l.first()
eq_(l,
(pk, 'im the update', f2, None, None, ctexec, True, False,
- 13, datetime.date.today(), 'py', 'hi'))
+ 13, datetime.date.today(), 'py', 'hi', 'BINDfoo'))
eq_(11, f2)
@testing.fails_on('firebird', 'Data type unknown')
@@ -721,7 +733,6 @@ class AutoIncrementTest(fixtures.TablesTest):
)
assert x._autoincrement_column is None
- @testing.fails_on('sqlite', 'FIXME: unknown')
def test_non_autoincrement(self):
# sqlite INT primary keys can be non-unique! (only for ints)
nonai = Table(
@@ -735,8 +746,9 @@ class AutoIncrementTest(fixtures.TablesTest):
# mysql in legacy mode fails on second row
nonai.insert().execute(data='row 1')
nonai.insert().execute(data='row 2')
- assert_raises(
- sa.exc.DBAPIError,
+ assert_raises_message(
+ sa.exc.CompileError,
+ ".*has no Python-side or server-side default.*",
go
)
@@ -793,6 +805,36 @@ class SequenceDDLTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=0, minvalue=0)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 MINVALUE 0",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=1, maxvalue=5)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 MAXVALUE 5",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=1, nomaxvalue=True)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 NO MAXVALUE",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=0, nominvalue=True)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 NO MINVALUE",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', start=1, maxvalue=10, cycle=True)),
+ "CREATE SEQUENCE foo_seq START WITH 1 MAXVALUE 10 CYCLE",
+ )
+
+ self.assert_compile(
DropSequence(Sequence('foo_seq')),
"DROP SEQUENCE foo_seq",
)
@@ -1039,6 +1081,23 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
assert not self._has_sequence('s1')
assert not self._has_sequence('s2')
+ @testing.requires.returning
+ @testing.provide_metadata
+ def test_freestanding_sequence_via_autoinc(self):
+ t = Table(
+ 'some_table', self.metadata,
+ Column(
+ 'id', Integer,
+ autoincrement=True,
+ primary_key=True,
+ default=Sequence(
+ 'my_sequence', metadata=self.metadata).next_value())
+ )
+ self.metadata.create_all(testing.db)
+
+ result = testing.db.execute(t.insert())
+ eq_(result.inserted_primary_key, [1])
+
cartitems = sometable = metadata = None
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index ec8d9b5c0..51cfcb919 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -1,20 +1,20 @@
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
Column, Sequence, MetaData, extract, Date, String, bindparam, \
- literal_column
+ literal_column, Array, Numeric
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
from sqlalchemy.testing.engines import all_dialects
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import functions
-from sqlalchemy.sql.functions import GenericFunction
+from sqlalchemy.sql.functions import GenericFunction, FunctionElement
import decimal
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
-
+from sqlalchemy.testing import assert_raises_message
table1 = table('mytable',
column('myid', Integer),
@@ -52,7 +52,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
fake_func('foo'),
"fake_func(%s)" %
- bindtemplate % {'name': 'param_1', 'position': 1},
+ bindtemplate % {'name': 'fake_func_1', 'position': 1},
dialect=dialect)
def test_use_labels(self):
@@ -89,7 +89,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_generic_annotation(self):
fn = func.coalesce('x', 'y')._annotate({"foo": "bar"})
self.assert_compile(
- fn, "coalesce(:param_1, :param_2)"
+ fn, "coalesce(:coalesce_1, :coalesce_2)"
)
def test_custom_default_namespace(self):
@@ -140,7 +140,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
func.my_func(1, 2),
- "my_func(:param_1, :param_2, :param_3)"
+ "my_func(:my_func_1, :my_func_2, :my_func_3)"
)
def test_custom_registered_identifier(self):
@@ -178,7 +178,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
myfunc(1, 2, 3),
- "myfunc(:param_1, :param_2, :param_3)"
+ "myfunc(:myfunc_1, :myfunc_2, :myfunc_3)"
)
def test_namespacing_conflicts(self):
@@ -188,7 +188,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
assert isinstance(func.count().type, sqltypes.Integer)
self.assert_compile(func.count(), 'count(*)')
- self.assert_compile(func.count(1), 'count(:param_1)')
+ self.assert_compile(func.count(1), 'count(:count_1)')
c = column('abc')
self.assert_compile(func.count(c), 'count(abc)')
@@ -378,7 +378,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_funcfilter_empty(self):
self.assert_compile(
func.count(1).filter(),
- "count(:param_1)"
+ "count(:count_1)"
)
def test_funcfilter_criterion(self):
@@ -386,7 +386,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
func.count(1).filter(
table1.c.name != None
),
- "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)"
+ "count(:count_1) FILTER (WHERE mytable.name IS NOT NULL)"
)
def test_funcfilter_compound_criterion(self):
@@ -395,7 +395,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.name == None,
table1.c.myid > 0
),
- "count(:param_1) FILTER (WHERE mytable.name IS NULL AND "
+ "count(:count_1) FILTER (WHERE mytable.name IS NULL AND "
"mytable.myid > :myid_1)"
)
@@ -404,7 +404,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
select([func.count(1).filter(
table1.c.description != None
).label('foo')]),
- "SELECT count(:param_1) FILTER (WHERE mytable.description "
+ "SELECT count(:count_1) FILTER (WHERE mytable.description "
"IS NOT NULL) AS foo FROM mytable"
)
@@ -429,7 +429,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.name == 'name'
)
]),
- "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) "
+ "SELECT count(:count_1) FILTER (WHERE mytable.name = :name_1) "
"AS anon_1 FROM mytable"
)
@@ -443,7 +443,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.description == 'description'
)
]),
- "SELECT count(:param_1) FILTER (WHERE "
+ "SELECT count(:count_1) FILTER (WHERE "
"mytable.name = :name_1 AND mytable.description = :description_1) "
"AS anon_1 FROM mytable"
)
@@ -477,6 +477,121 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"AS anon_1 FROM mytable"
)
+ def test_funcfilter_within_group(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_multi(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name, table1.c.description
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name, mytable.description) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_desc(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name.desc()
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_w_over(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name.desc()
+ ).over(partition_by=table1.c.description)
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) "
+ "OVER (PARTITION BY mytable.description) AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_incorrect_none_type(self):
+ class MissingType(FunctionElement):
+ name = 'mt'
+ type = None
+
+ assert_raises_message(
+ TypeError,
+ "Object None associated with '.type' attribute is "
+ "not a TypeEngine class or object",
+ MissingType().compile
+ )
+
+
+class ReturnTypeTest(fixtures.TestBase):
+
+ def test_array_agg(self):
+ expr = func.array_agg(column('data', Integer))
+ is_(expr.type._type_affinity, Array)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_mode(self):
+ expr = func.mode(0.5).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Integer)
+
+ def test_percentile_cont(self):
+ expr = func.percentile_cont(0.5).within_group(column('data', Integer))
+ is_(expr.type._type_affinity, Integer)
+
+ def test_percentile_cont_array(self):
+ expr = func.percentile_cont(0.5, 0.7).within_group(
+ column('data', Integer))
+ is_(expr.type._type_affinity, Array)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_percentile_cont_array_desc(self):
+ expr = func.percentile_cont(0.5, 0.7).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Array)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_cume_dist(self):
+ expr = func.cume_dist(0.5).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Numeric)
+
+ def test_percent_rank(self):
+ expr = func.percent_rank(0.5).within_group(
+ column('data', Integer))
+ is_(expr.type._type_affinity, Numeric)
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index 3c533d75f..ea4de032c 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -5,7 +5,7 @@ from sqlalchemy import Column, Integer, MetaData, String, Table,\
from sqlalchemy.dialects import mysql, postgresql
from sqlalchemy.engine import default
from sqlalchemy.testing import AssertsCompiledSQL,\
- assert_raises_message, fixtures
+ assert_raises_message, fixtures, eq_
from sqlalchemy.sql import crud
@@ -319,6 +319,32 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo", "foo": None}
)
+ def test_insert_from_select_dont_mutate_raw_columns(self):
+ # test [ticket:3603]
+ from sqlalchemy import table
+ table_ = table(
+ 'mytable',
+ Column('foo', String),
+ Column('bar', String, default='baz'),
+ )
+
+ stmt = select([table_.c.foo])
+ insert = table_.insert().from_select(['foo'], stmt)
+
+ self.assert_compile(stmt, "SELECT mytable.foo FROM mytable")
+ self.assert_compile(
+ insert,
+ "INSERT INTO mytable (foo, bar) "
+ "SELECT mytable.foo, :bar AS anon_1 FROM mytable"
+ )
+ self.assert_compile(stmt, "SELECT mytable.foo FROM mytable")
+ self.assert_compile(
+ insert,
+ "INSERT INTO mytable (foo, bar) "
+ "SELECT mytable.foo, :bar AS anon_1 FROM mytable"
+ )
+
+
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(
@@ -390,6 +416,106 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_anticipate_no_pk_composite_pk(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)
+ )
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.y' is marked as a member.*"
+ "Note that as of SQLAlchemy 1.1,",
+ t.insert().compile, column_keys=['x']
+
+ )
+
+ def test_anticipate_no_pk_composite_pk_implicit_returning(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = True
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.y' is marked as a member.*"
+ "Note that as of SQLAlchemy 1.1,",
+ t.insert().compile, dialect=d, column_keys=['x']
+
+ )
+
+ def test_anticipate_no_pk_composite_pk_prefetch(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = False
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.y' is marked as a member.*"
+ "Note that as of SQLAlchemy 1.1,",
+ t.insert().compile, dialect=d, column_keys=['x']
+
+ )
+
+ def test_anticipate_nullable_composite_pk(self):
+ t = Table(
+ 't', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True, nullable=True)
+ )
+ self.assert_compile(
+ t.insert(),
+ "INSERT INTO t (x) VALUES (:x)",
+ params={'x': 5},
+ )
+
+ def test_anticipate_no_pk_non_composite_pk(self):
+ t = Table(
+ 't', MetaData(),
+ Column('x', Integer, primary_key=True, autoincrement=False),
+ Column('q', Integer)
+ )
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.x' is marked as a member.*"
+ "may not store NULL.$",
+ t.insert().compile, column_keys=['q']
+
+ )
+
+ def test_anticipate_no_pk_non_composite_pk_implicit_returning(self):
+ t = Table(
+ 't', MetaData(),
+ Column('x', Integer, primary_key=True, autoincrement=False),
+ Column('q', Integer)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = True
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.x' is marked as a member.*"
+ "may not store NULL.$",
+ t.insert().compile, dialect=d, column_keys=['q']
+
+ )
+
+ def test_anticipate_no_pk_non_composite_pk_prefetch(self):
+ t = Table(
+ 't', MetaData(),
+ Column('x', Integer, primary_key=True, autoincrement=False),
+ Column('q', Integer)
+ )
+ d = postgresql.dialect()
+ d.implicit_returning = False
+ assert_raises_message(
+ exc.CompileError,
+ "Column 't.x' is marked as a member.*"
+ "may not store NULL.$",
+ t.insert().compile, dialect=d, column_keys=['q']
+
+ )
+
class InsertImplicitReturningTest(
_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
@@ -694,8 +820,21 @@ class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'foo_2': None # evaluated later
}
+ stmt = table.insert().values(values)
+
+ eq_(
+ dict([
+ (k, v.type._type_affinity)
+ for (k, v) in
+ stmt.compile(dialect=postgresql.dialect()).binds.items()]),
+ {
+ 'foo': Integer, 'data_2': String, 'id_0': Integer,
+ 'id_2': Integer, 'foo_1': Integer, 'data_1': String,
+ 'id_1': Integer, 'foo_2': Integer, 'data_0': String}
+ )
+
self.assert_compile(
- table.insert().values(values),
+ stmt,
'INSERT INTO sometable (id, data, foo) VALUES '
'(%(id_0)s, %(data_0)s, %(foo)s), '
'(%(id_1)s, %(data_1)s, %(foo_1)s), '
@@ -728,8 +867,20 @@ class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'foo_2': None, # evaluated later
}
+ stmt = table.insert().values(values)
+ eq_(
+ dict([
+ (k, v.type._type_affinity)
+ for (k, v) in
+ stmt.compile(dialect=postgresql.dialect()).binds.items()]),
+ {
+ 'foo': Integer, 'data_2': String, 'id_0': Integer,
+ 'id_2': Integer, 'foo_1': Integer, 'data_1': String,
+ 'id_1': Integer, 'foo_2': Integer, 'data_0': String}
+ )
+
self.assert_compile(
- table.insert().values(values),
+ stmt,
"INSERT INTO sometable (id, data, foo) VALUES "
"(%(id_0)s, %(data_0)s, %(foo)s), "
"(%(id_1)s, %(data_1)s, %(foo_1)s), "
diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py
new file mode 100644
index 000000000..c49947425
--- /dev/null
+++ b/test/sql/test_insert_exec.py
@@ -0,0 +1,445 @@
+from sqlalchemy.testing import eq_, assert_raises_message, is_
+from sqlalchemy import testing
+from sqlalchemy.testing import fixtures, engines
+from sqlalchemy import (
+ exc, sql, String, Integer, MetaData, and_, ForeignKey,
+ VARCHAR, INT, Sequence, func)
+from sqlalchemy.testing.schema import Table, Column
+
+
+class InsertExecTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'users', metadata,
+ Column(
+ 'user_id', INT, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+
+ @testing.requires.multivalues_inserts
+ def test_multivalues_insert(self):
+ users = self.tables.users
+ users.insert(
+ values=[
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'}]).execute()
+ rows = users.select().order_by(users.c.user_id).execute().fetchall()
+ eq_(rows[0], (7, 'jack'))
+ eq_(rows[1], (8, 'ed'))
+ users.insert(values=[(9, 'jack'), (10, 'ed')]).execute()
+ rows = users.select().order_by(users.c.user_id).execute().fetchall()
+ eq_(rows[2], (9, 'jack'))
+ eq_(rows[3], (10, 'ed'))
+
+ def test_insert_heterogeneous_params(self):
+ """test that executemany parameters are asserted to match the
+ parameter set of the first."""
+ users = self.tables.users
+
+ assert_raises_message(
+ exc.StatementError,
+ r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
+ "bind parameter 'user_name', in "
+ "parameter group 2 "
+ r"\[SQL: u?'INSERT INTO users",
+ users.insert().execute,
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9}
+ )
+
+ # this succeeds however. We aren't yet doing
+ # a length check on all subsequent parameters.
+ users.insert().execute(
+ {'user_id': 7},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9}
+ )
+
+ def _test_lastrow_accessor(self, table_, values, assertvalues):
+ """Tests the inserted_primary_key and lastrow_has_id() functions."""
+
+ def insert_values(engine, table_, values):
+ """
+ Inserts a row into a table, returns the full list of values
+ INSERTed including defaults that fired off on the DB side and
+ detects rows that had defaults and post-fetches.
+ """
+
+ # verify implicit_returning is working
+ if engine.dialect.implicit_returning:
+ ins = table_.insert()
+ comp = ins.compile(engine, column_keys=list(values))
+ if not set(values).issuperset(
+ c.key for c in table_.primary_key):
+ is_(bool(comp.returning), True)
+
+ result = engine.execute(table_.insert(), **values)
+ ret = values.copy()
+
+ for col, id in zip(
+ table_.primary_key, result.inserted_primary_key):
+ ret[col.key] = id
+
+ if result.lastrow_has_defaults():
+ criterion = and_(
+ *[
+ col == id for col, id in
+ zip(table_.primary_key, result.inserted_primary_key)])
+ row = engine.execute(table_.select(criterion)).first()
+ for c in table_.c:
+ ret[c.key] = row[c]
+ return ret
+
+ if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
+ assert testing.db.dialect.implicit_returning
+
+ if testing.db.dialect.implicit_returning:
+ test_engines = [
+ engines.testing_engine(options={'implicit_returning': False}),
+ engines.testing_engine(options={'implicit_returning': True}),
+ ]
+ else:
+ test_engines = [testing.db]
+
+ for engine in test_engines:
+ try:
+ table_.create(bind=engine, checkfirst=True)
+ i = insert_values(engine, table_, values)
+ eq_(i, assertvalues)
+ finally:
+ table_.drop(bind=engine)
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_one(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t1", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo', String(30), primary_key=True)),
+ {'foo': 'hi'},
+ {'id': 1, 'foo': 'hi'}
+ )
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_two(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t2", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo', String(30), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'foo': 'hi'},
+ {'id': 1, 'foo': 'hi', 'bar': 'hi'}
+ )
+
+ def test_lastrow_accessor_three(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t3", metadata,
+ Column("id", String(40), primary_key=True),
+ Column('foo', String(30), primary_key=True),
+ Column("bar", String(30))
+ ),
+ {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"},
+ {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"}
+ )
+
+ def test_lastrow_accessor_four(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t4", metadata,
+ Column(
+ 'id', Integer,
+ Sequence('t4_id_seq', optional=True),
+ primary_key=True),
+ Column('foo', String(30), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'foo': 'hi', 'id': 1},
+ {'id': 1, 'foo': 'hi', 'bar': 'hi'}
+ )
+
+ def test_lastrow_accessor_five(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t5", metadata,
+ Column('id', String(10), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'id': 'id1'},
+ {'id': 'id1', 'bar': 'hi'},
+ )
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_six(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t6", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('bar', Integer, primary_key=True)
+ ),
+ {'bar': 0},
+ {'id': 1, 'bar': 0},
+ )
+
+ # TODO: why not in the sqlite suite?
+ @testing.only_on('sqlite+pysqlite')
+ @testing.provide_metadata
+ def test_lastrowid_zero(self):
+ from sqlalchemy.dialects import sqlite
+ eng = engines.testing_engine()
+
+ class ExcCtx(sqlite.base.SQLiteExecutionContext):
+
+ def get_lastrowid(self):
+ return 0
+ eng.dialect.execution_ctx_cls = ExcCtx
+ t = Table(
+ 't', self.metadata, Column('x', Integer, primary_key=True),
+ Column('y', Integer))
+ t.create(eng)
+ r = eng.execute(t.insert().values(y=5))
+ eq_(r.inserted_primary_key, [0])
+
+ @testing.fails_on(
+ 'sqlite', "sqlite autoincremnt doesn't work with composite pks")
+ @testing.provide_metadata
+ def test_misordered_lastrow(self):
+ metadata = self.metadata
+
+ related = Table(
+ 'related', metadata,
+ Column('id', Integer, primary_key=True),
+ mysql_engine='MyISAM'
+ )
+ t6 = Table(
+ "t6", metadata,
+ Column(
+ 'manual_id', Integer, ForeignKey('related.id'),
+ primary_key=True),
+ Column(
+ 'auto_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ mysql_engine='MyISAM'
+ )
+
+ metadata.create_all()
+ r = related.insert().values(id=12).execute()
+ id_ = r.inserted_primary_key[0]
+ eq_(id_, 12)
+
+ r = t6.insert().values(manual_id=id_).execute()
+ eq_(r.inserted_primary_key, [12, 1])
+
+ def test_implicit_id_insert_select_columns(self):
+ users = self.tables.users
+ stmt = users.insert().from_select(
+ (users.c.user_id, users.c.user_name),
+ users.select().where(users.c.user_id == 20))
+
+ testing.db.execute(stmt)
+
+ def test_implicit_id_insert_select_keys(self):
+ users = self.tables.users
+ stmt = users.insert().from_select(
+ ["user_id", "user_name"],
+ users.select().where(users.c.user_id == 20))
+
+ testing.db.execute(stmt)
+
+ @testing.requires.empty_inserts
+ @testing.requires.returning
+ def test_no_inserted_pk_on_returning(self):
+ users = self.tables.users
+ result = testing.db.execute(users.insert().returning(
+ users.c.user_id, users.c.user_name))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ r"Can't call inserted_primary_key when returning\(\) is used.",
+ getattr, result, 'inserted_primary_key'
+ )
+
+
+class TableInsertTest(fixtures.TablesTest):
+
+ """test for consistent insert behavior across dialects
+ regarding the inline=True flag, lower-case 't' tables.
+
+ """
+ run_create_tables = 'each'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'foo', metadata,
+ Column('id', Integer, Sequence('t_id_seq'), primary_key=True),
+ Column('data', String(50)),
+ Column('x', Integer)
+ )
+
+ def _fixture(self, types=True):
+ if types:
+ t = sql.table(
+ 'foo', sql.column('id', Integer),
+ sql.column('data', String),
+ sql.column('x', Integer))
+ else:
+ t = sql.table(
+ 'foo', sql.column('id'), sql.column('data'), sql.column('x'))
+ return t
+
+ def _test(self, stmt, row, returning=None, inserted_primary_key=False):
+ r = testing.db.execute(stmt)
+
+ if returning:
+ returned = r.first()
+ eq_(returned, returning)
+ elif inserted_primary_key is not False:
+ eq_(r.inserted_primary_key, inserted_primary_key)
+
+ eq_(testing.db.execute(self.tables.foo.select()).first(), row)
+
+ def _test_multi(self, stmt, rows, data):
+ testing.db.execute(stmt, rows)
+ eq_(
+ testing.db.execute(
+ self.tables.foo.select().
+ order_by(self.tables.foo.c.id)).fetchall(),
+ data)
+
+ @testing.requires.sequences
+ def test_expicit_sequence(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(
+ id=func.next_value(Sequence('t_id_seq')), data='data', x=5),
+ (1, 'data', 5)
+ )
+
+ def test_uppercase(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ def test_uppercase_inline(self):
+ t = self.tables.foo
+ self._test(
+ t.insert(inline=True).values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ @testing.crashes(
+ "mssql+pyodbc",
+ "Pyodbc + SQL Server + Py3K, some decimal handling issue")
+ def test_uppercase_inline_implicit(self):
+ t = self.tables.foo
+ self._test(
+ t.insert(inline=True).values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[None]
+ )
+
+ def test_uppercase_implicit(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ def test_uppercase_direct_params(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ @testing.requires.returning
+ def test_uppercase_direct_params_returning(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
+ (1, 'data', 5),
+ returning=(1, 5)
+ )
+
+ @testing.fails_on(
+ 'mssql', "lowercase table doesn't support identity insert disable")
+ def test_direct_params(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
+
+ @testing.fails_on(
+ 'mssql', "lowercase table doesn't support identity insert disable")
+ @testing.requires.returning
+ def test_direct_params_returning(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
+ (1, 'data', 5),
+ returning=(1, 5)
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk_multi_rows(self):
+ t = self._fixture()
+ self._test_multi(
+ t.insert(),
+ [
+ {'data': 'd1', 'x': 5},
+ {'data': 'd2', 'x': 6},
+ {'data': 'd3', 'x': 7},
+ ],
+ [
+ (1, 'd1', 5),
+ (2, 'd2', 6),
+ (3, 'd3', 7)
+ ],
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk_inline(self):
+ t = self._fixture()
+ self._test(
+ t.insert(inline=True).values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 2e51b9a91..d4039a5fe 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -7,7 +7,7 @@ from sqlalchemy import Integer, String, UniqueConstraint, \
CheckConstraint, ForeignKey, MetaData, Sequence, \
ForeignKeyConstraint, PrimaryKeyConstraint, ColumnDefault, Index, event,\
events, Unicode, types as sqltypes, bindparam, \
- Table, Column, Boolean, Enum, func, text
+ Table, Column, Boolean, Enum, func, text, TypeDecorator
from sqlalchemy import schema, exc
from sqlalchemy.sql import elements, naming
import sqlalchemy as tsa
@@ -1361,6 +1361,123 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
assert not t1.c.x.nullable
+class PKAutoIncrementTest(fixtures.TestBase):
+ def test_multi_integer_no_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', Integer),
+ Column('b', Integer)
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, None)
+
+ def test_multi_integer_multi_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', Integer, autoincrement=True),
+ Column('b', Integer, autoincrement=True)
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Only one Column may be marked",
+ lambda: pk._autoincrement_column
+ )
+
+ def test_single_integer_no_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', Integer),
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, pk.columns['a'])
+
+ def test_single_string_no_autoinc(self):
+ pk = PrimaryKeyConstraint(
+ Column('a', String),
+ )
+ t = Table('t', MetaData())
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, None)
+
+ def test_single_string_illegal_autoinc(self):
+ t = Table('t', MetaData(), Column('a', String, autoincrement=True))
+ pk = PrimaryKeyConstraint(
+ t.c.a
+ )
+ t.append_constraint(pk)
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Column type VARCHAR on column 't.a'",
+ lambda: pk._autoincrement_column
+ )
+
+ def test_single_integer_default(self):
+ t = Table(
+ 't', MetaData(),
+ Column('a', Integer, autoincrement=True, default=lambda: 1))
+ pk = PrimaryKeyConstraint(
+ t.c.a
+ )
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, t.c.a)
+
+ def test_single_integer_server_default(self):
+ # new as of 1.1; now that we have three states for autoincrement,
+ # if the user puts autoincrement=True with a server_default, trust
+ # them on it
+ t = Table(
+ 't', MetaData(),
+ Column('a', Integer,
+ autoincrement=True, server_default=func.magic()))
+ pk = PrimaryKeyConstraint(
+ t.c.a
+ )
+ t.append_constraint(pk)
+
+ is_(pk._autoincrement_column, t.c.a)
+
+ def test_implicit_autoinc_but_fks(self):
+ m = MetaData()
+ Table('t1', m, Column('id', Integer, primary_key=True))
+ t2 = Table(
+ 't2', MetaData(),
+ Column('a', Integer, ForeignKey('t1.id')))
+ pk = PrimaryKeyConstraint(
+ t2.c.a
+ )
+ t2.append_constraint(pk)
+ is_(pk._autoincrement_column, None)
+
+ def test_explicit_autoinc_but_fks(self):
+ m = MetaData()
+ Table('t1', m, Column('id', Integer, primary_key=True))
+ t2 = Table(
+ 't2', MetaData(),
+ Column('a', Integer, ForeignKey('t1.id'), autoincrement=True))
+ pk = PrimaryKeyConstraint(
+ t2.c.a
+ )
+ t2.append_constraint(pk)
+ is_(pk._autoincrement_column, t2.c.a)
+
+ t3 = Table(
+ 't3', MetaData(),
+ Column('a', Integer,
+ ForeignKey('t1.id'), autoincrement='ignore_fk'))
+ pk = PrimaryKeyConstraint(
+ t3.c.a
+ )
+ t3.append_constraint(pk)
+ is_(pk._autoincrement_column, t3.c.a)
+
+
class SchemaTypeTest(fixtures.TestBase):
class MyType(sqltypes.SchemaType, sqltypes.TypeEngine):
@@ -1430,6 +1547,20 @@ class SchemaTypeTest(fixtures.TestBase):
# our test type sets table, though
is_(t2.c.y.type.table, t2)
+ def test_tometadata_copy_decorated(self):
+
+ class MyDecorated(TypeDecorator):
+ impl = self.MyType
+
+ m1 = MetaData()
+
+ type_ = MyDecorated(schema="z")
+ t1 = Table('x', m1, Column("y", type_))
+
+ m2 = MetaData()
+ t2 = t1.tometadata(m2)
+ eq_(t2.c.y.type.schema, "z")
+
def test_tometadata_independent_schema(self):
m1 = MetaData()
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 0985020d1..03c0f89be 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -1,7 +1,8 @@
from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
-from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
+from sqlalchemy.sql import column, desc, asc, literal, collate, null, \
+ true, false, any_, all_
from sqlalchemy.sql.expression import BinaryExpression, \
ClauseList, Grouping, \
UnaryExpression, select, union, func, tuple_
@@ -12,8 +13,9 @@ from sqlalchemy import exc
from sqlalchemy.engine import default
from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
+from sqlalchemy.sql import compiler
from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, \
- Boolean, NullType, MatchType
+ Boolean, NullType, MatchType, Indexable, Concatenable, Array
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
@@ -21,7 +23,6 @@ import datetime
import collections
from sqlalchemy import text, literal_column
from sqlalchemy import and_, not_, between, or_
-from sqlalchemy.sql import true, false, null
class LoopOperate(operators.ColumnOperators):
@@ -210,6 +211,60 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
def test_concat(self):
self._do_operate_test(operators.concat_op)
+ def test_default_adapt(self):
+ class TypeOne(TypeEngine):
+ pass
+
+ class TypeTwo(TypeEngine):
+ pass
+
+ expr = column('x', TypeOne()) - column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+
+ def test_concatenable_adapt(self):
+ class TypeOne(Concatenable, TypeEngine):
+ pass
+
+ class TypeTwo(Concatenable, TypeEngine):
+ pass
+
+ class TypeThree(TypeEngine):
+ pass
+
+ expr = column('x', TypeOne()) - column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.sub
+ )
+
+ expr = column('x', TypeOne()) + column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operators.concat_op
+ )
+
+ expr = column('x', TypeOne()) - column('y', TypeThree())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.sub
+ )
+
+ expr = column('x', TypeOne()) + column('y', TypeThree())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.add
+ )
+
class CustomUnaryOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -577,6 +632,200 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
+class IndexableTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ def setUp(self):
+ class MyTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_mytype(self, type, **kw):
+ return "MYTYPE"
+
+ def visit_myothertype(self, type, **kw):
+ return "MYOTHERTYPE"
+
+ class MyCompiler(compiler.SQLCompiler):
+ def visit_slice(self, element, **kw):
+ return "%s:%s" % (
+ self.process(element.start, **kw),
+ self.process(element.stop, **kw),
+ )
+
+ def visit_getitem_binary(self, binary, operator, **kw):
+ return "%s[%s]" % (
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw)
+ )
+
+ class MyDialect(default.DefaultDialect):
+ statement_compiler = MyCompiler
+ type_compiler = MyTypeCompiler
+
+ class MyType(Indexable, TypeEngine):
+ __visit_name__ = 'mytype'
+
+ def __init__(self, zero_indexes=False, dimensions=1):
+ if zero_indexes:
+ self.zero_indexes = zero_indexes
+ self.dimensions = dimensions
+
+ class Comparator(Indexable.Comparator):
+ def _setup_getitem(self, index):
+ if isinstance(index, slice):
+ return_type = self.type
+ elif self.type.dimensions is None or \
+ self.type.dimensions == 1:
+ return_type = Integer()
+ else:
+ adapt_kw = {'dimensions': self.type.dimensions - 1}
+ # this is also testing the behavior of adapt()
+ # that we can pass kw that override constructor kws.
+ # required a small change to util.constructor_copy().
+ return_type = self.type.adapt(
+ self.type.__class__, **adapt_kw)
+
+ return operators.getitem, index, return_type
+ comparator_factory = Comparator
+
+ self.MyType = MyType
+ self.__dialect__ = MyDialect()
+
+ def test_setup_getitem_w_dims(self):
+ """test the behavior of the _setup_getitem() method given a simple
+ 'dimensions' scheme - this is identical to postgresql.ARRAY."""
+
+ col = Column('x', self.MyType(dimensions=3))
+
+ is_(
+ col[5].type._type_affinity, self.MyType
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, self.MyType
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_getindex_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5],
+ "x[:x_1]",
+ checkparams={'x_1': 5}
+ )
+
+ def test_getindex_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x[y]",
+ checkparams={}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x[(y + :y_1)]",
+ checkparams={'y_1': 8}
+ )
+
+ def test_getslice_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5:6],
+ "x[:x_1::x_2]",
+ checkparams={'x_1': 5, 'x_2': 6}
+ )
+
+ def test_getslice_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2:col2 + 5],
+ "x[y:y + :y_1]",
+ checkparams={'y_1': 5}
+ )
+
+ def test_getindex_literal_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+
+ self.assert_compile(
+ col[5],
+ "x[:x_1]",
+ checkparams={'x_1': 6}
+ )
+
+ def test_getindex_sqlexpr_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x[(y + :y_1)]",
+ checkparams={'y_1': 1}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x[(y + :y_1 + :param_1)]",
+ checkparams={'y_1': 8, 'param_1': 1}
+ )
+
+ def test_getslice_literal_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+
+ self.assert_compile(
+ col[5:6],
+ "x[:x_1::x_2]",
+ checkparams={'x_1': 6, 'x_2': 7}
+ )
+
+ def test_getslice_sqlexpr_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2:col2 + 5],
+ "x[y + :y_1:y + :y_2 + :param_1]",
+ checkparams={'y_1': 1, 'y_2': 5, 'param_1': 1}
+ )
+
+ def test_override_operators(self):
+ special_index_op = operators.custom_op('->')
+
+ class MyOtherType(Indexable, TypeEngine):
+ __visit_name__ = 'myothertype'
+
+ class Comparator(TypeEngine.Comparator):
+
+ def _adapt_expression(self, op, other_comparator):
+ return special_index_op, MyOtherType()
+
+ comparator_factory = Comparator
+
+ col = Column('x', MyOtherType())
+ self.assert_compile(
+ col[5],
+ "x -> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+
class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"""test standalone booleans being wrapped in an AsBoolean, as well
@@ -825,6 +1074,64 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT false AS anon_1, false AS anon_2"
)
+ def test_is_true_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(True),
+ "x IS true"
+ )
+
+ def test_is_false_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(False),
+ "x IS false"
+ )
+
+ def test_and_false_literal_leading(self):
+ self.assert_compile(
+ and_(False, True),
+ "false"
+ )
+
+ self.assert_compile(
+ and_(False, False),
+ "false"
+ )
+
+ def test_and_true_literal_leading(self):
+ self.assert_compile(
+ and_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ and_(True, False),
+ "false"
+ )
+
+ def test_or_false_literal_leading(self):
+ self.assert_compile(
+ or_(False, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(False, False),
+ "false"
+ )
+
+ def test_or_true_literal_leading(self):
+ self.assert_compile(
+ or_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(True, False),
+ "true"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1327,6 +1634,9 @@ class MathOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
else:
self._test_math_op(operator.div, '/')
+ def test_math_op_mod(self):
+ self._test_math_op(operator.mod, '%')
+
class ComparisonOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1953,3 +2263,154 @@ class TupleTypingTest(fixtures.TestBase):
eq_(len(expr.right.clauses), 2)
for elem in expr.right.clauses:
self._assert_types(elem)
+
+
+class AnyAllTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def _fixture(self):
+ m = MetaData()
+
+ t = Table(
+ 'tab1', m,
+ Column('arrval', Array(Integer)),
+ Column('data', Integer)
+ )
+ return t
+
+ def test_any_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(t.c.arrval),
+ ":param_1 = ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(t.c.arrval),
+ ":param_1 = ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_comparator_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 > any_(t.c.arrval),
+ ":param_1 > ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_comparator_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 > all_(t.c.arrval),
+ ":param_1 > ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_comparator_array_wexpr(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.data > any_(t.c.arrval),
+ "tab1.data > ANY (tab1.arrval)",
+ checkparams={}
+ )
+
+ def test_all_comparator_array_wexpr(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.data > all_(t.c.arrval),
+ "tab1.data > ALL (tab1.arrval)",
+ checkparams={}
+ )
+
+ def test_illegal_ops(self):
+ t = self._fixture()
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Only comparison operators may be used with ANY/ALL",
+ lambda: 5 + all_(t.c.arrval)
+ )
+
+ # TODO:
+ # this is invalid but doesn't raise an error,
+ # as the left-hand side just does its thing. Types
+ # would need to reject their right-hand side.
+ self.assert_compile(
+ t.c.data + all_(t.c.arrval),
+ "tab1.data + ALL (tab1.arrval)"
+ )
+
+ def test_any_array_comparator_accessor(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.arrval.any(5, operator.gt),
+ ":param_1 > ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_array_comparator_accessor(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.arrval.all(5, operator.gt),
+ ":param_1 > ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_array_expression(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(t.c.arrval[5:6] + postgresql.array([3, 4])),
+ "%(param_1)s = ANY (tab1.arrval[%(arrval_1)s:%(arrval_2)s] || "
+ "ARRAY[%(param_2)s, %(param_3)s])",
+ checkparams={
+ 'arrval_2': 6, 'param_1': 5, 'param_3': 4,
+ 'arrval_1': 5, 'param_2': 3},
+ dialect='postgresql'
+ )
+
+ def test_all_array_expression(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(t.c.arrval[5:6] + postgresql.array([3, 4])),
+ "%(param_1)s = ALL (tab1.arrval[%(arrval_1)s:%(arrval_2)s] || "
+ "ARRAY[%(param_2)s, %(param_3)s])",
+ checkparams={
+ 'arrval_2': 6, 'param_1': 5, 'param_3': 4,
+ 'arrval_1': 5, 'param_2': 3},
+ dialect='postgresql'
+ )
+
+ def test_any_subq(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(select([t.c.data]).where(t.c.data < 10)),
+ ":param_1 = ANY (SELECT tab1.data "
+ "FROM tab1 WHERE tab1.data < :data_1)",
+ checkparams={'data_1': 10, 'param_1': 5}
+ )
+
+ def test_all_subq(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(select([t.c.data]).where(t.c.data < 10)),
+ ":param_1 = ALL (SELECT tab1.data "
+ "FROM tab1 WHERE tab1.data < :data_1)",
+ checkparams={'data_1': 10, 'param_1': 5}
+ )
+
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 98f375018..aca933fc9 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -1,13 +1,13 @@
-from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, is_
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ is_, in_, not_in_
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines
-from sqlalchemy import util
from sqlalchemy import (
exc, sql, func, select, String, Integer, MetaData, and_, ForeignKey,
- union, intersect, except_, union_all, VARCHAR, INT, CHAR, text, Sequence,
- bindparam, literal, not_, type_coerce, literal_column, desc, asc,
- TypeDecorator, or_, cast, table, column)
-from sqlalchemy.engine import default, result as _result
+ union, intersect, except_, union_all, VARCHAR, INT, text,
+ bindparam, literal, not_, literal_column, desc, asc,
+ TypeDecorator, or_, cast)
+from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
# ongoing - these are old tests. those which are of general use
@@ -61,260 +61,6 @@ class QueryTest(fixtures.TestBase):
def teardown_class(cls):
metadata.drop_all()
- @testing.requires.multivalues_inserts
- def test_multivalues_insert(self):
- users.insert(
- values=[
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'}]).execute()
- rows = users.select().order_by(users.c.user_id).execute().fetchall()
- self.assert_(rows[0] == (7, 'jack'))
- self.assert_(rows[1] == (8, 'ed'))
- users.insert(values=[(9, 'jack'), (10, 'ed')]).execute()
- rows = users.select().order_by(users.c.user_id).execute().fetchall()
- self.assert_(rows[2] == (9, 'jack'))
- self.assert_(rows[3] == (10, 'ed'))
-
- def test_insert_heterogeneous_params(self):
- """test that executemany parameters are asserted to match the
- parameter set of the first."""
-
- assert_raises_message(
- exc.StatementError,
- r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
- "bind parameter 'user_name', in "
- "parameter group 2 "
- r"\[SQL: u?'INSERT INTO query_users",
- users.insert().execute,
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9}
- )
-
- # this succeeds however. We aren't yet doing
- # a length check on all subsequent parameters.
- users.insert().execute(
- {'user_id': 7},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9}
- )
-
- def test_lastrow_accessor(self):
- """Tests the inserted_primary_key and lastrow_has_id() functions."""
-
- def insert_values(engine, table, values):
- """
- Inserts a row into a table, returns the full list of values
- INSERTed including defaults that fired off on the DB side and
- detects rows that had defaults and post-fetches.
- """
-
- # verify implicit_returning is working
- if engine.dialect.implicit_returning:
- ins = table.insert()
- comp = ins.compile(engine, column_keys=list(values))
- if not set(values).issuperset(
- c.key for c in table.primary_key):
- assert comp.returning
-
- result = engine.execute(table.insert(), **values)
- ret = values.copy()
-
- for col, id in zip(table.primary_key, result.inserted_primary_key):
- ret[col.key] = id
-
- if result.lastrow_has_defaults():
- criterion = and_(
- *[
- col == id for col, id in
- zip(table.primary_key, result.inserted_primary_key)])
- row = engine.execute(table.select(criterion)).first()
- for c in table.c:
- ret[c.key] = row[c]
- return ret
-
- if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
- assert testing.db.dialect.implicit_returning
-
- if testing.db.dialect.implicit_returning:
- test_engines = [
- engines.testing_engine(options={'implicit_returning': False}),
- engines.testing_engine(options={'implicit_returning': True}),
- ]
- else:
- test_engines = [testing.db]
-
- for engine in test_engines:
- metadata = MetaData()
- for supported, table, values, assertvalues in [
- (
- {'unsupported': ['sqlite']},
- Table(
- "t1", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('foo', String(30), primary_key=True)),
- {'foo': 'hi'},
- {'id': 1, 'foo': 'hi'}
- ),
- (
- {'unsupported': ['sqlite']},
- Table(
- "t2", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('foo', String(30), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'foo': 'hi'},
- {'id': 1, 'foo': 'hi', 'bar': 'hi'}
- ),
- (
- {'unsupported': []},
- Table(
- "t3", metadata,
- Column("id", String(40), primary_key=True),
- Column('foo', String(30), primary_key=True),
- Column("bar", String(30))
- ),
- {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"},
- {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"}
- ),
- (
- {'unsupported': []},
- Table(
- "t4", metadata,
- Column(
- 'id', Integer,
- Sequence('t4_id_seq', optional=True),
- primary_key=True),
- Column('foo', String(30), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'foo': 'hi', 'id': 1},
- {'id': 1, 'foo': 'hi', 'bar': 'hi'}
- ),
- (
- {'unsupported': []},
- Table(
- "t5", metadata,
- Column('id', String(10), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'id': 'id1'},
- {'id': 'id1', 'bar': 'hi'},
- ),
- (
- {'unsupported': ['sqlite']},
- Table(
- "t6", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('bar', Integer, primary_key=True)
- ),
- {'bar': 0},
- {'id': 1, 'bar': 0},
- ),
- ]:
- if testing.db.name in supported['unsupported']:
- continue
- try:
- table.create(bind=engine, checkfirst=True)
- i = insert_values(engine, table, values)
- assert i == assertvalues, "tablename: %s %r %r" % \
- (table.name, repr(i), repr(assertvalues))
- finally:
- table.drop(bind=engine)
-
- # TODO: why not in the sqlite suite?
- @testing.only_on('sqlite+pysqlite')
- @testing.provide_metadata
- def test_lastrowid_zero(self):
- from sqlalchemy.dialects import sqlite
- eng = engines.testing_engine()
-
- class ExcCtx(sqlite.base.SQLiteExecutionContext):
-
- def get_lastrowid(self):
- return 0
- eng.dialect.execution_ctx_cls = ExcCtx
- t = Table(
- 't', self.metadata, Column('x', Integer, primary_key=True),
- Column('y', Integer))
- t.create(eng)
- r = eng.execute(t.insert().values(y=5))
- eq_(r.inserted_primary_key, [0])
-
- @testing.fails_on(
- 'sqlite', "sqlite autoincremnt doesn't work with composite pks")
- def test_misordered_lastrow(self):
- related = Table(
- 'related', metadata,
- Column('id', Integer, primary_key=True),
- mysql_engine='MyISAM'
- )
- t6 = Table(
- "t6", metadata,
- Column(
- 'manual_id', Integer, ForeignKey('related.id'),
- primary_key=True),
- Column(
- 'auto_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- mysql_engine='MyISAM'
- )
-
- metadata.create_all()
- r = related.insert().values(id=12).execute()
- id = r.inserted_primary_key[0]
- assert id == 12
-
- r = t6.insert().values(manual_id=id).execute()
- eq_(r.inserted_primary_key, [12, 1])
-
- def test_implicit_id_insert_select_columns(self):
- stmt = users.insert().from_select(
- (users.c.user_id, users.c.user_name),
- users.select().where(users.c.user_id == 20))
-
- testing.db.execute(stmt)
-
- def test_implicit_id_insert_select_keys(self):
- stmt = users.insert().from_select(
- ["user_id", "user_name"],
- users.select().where(users.c.user_id == 20))
-
- testing.db.execute(stmt)
-
- def test_row_iteration(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
- r = users.select().execute()
- l = []
- for row in r:
- l.append(row)
- self.assert_(len(l) == 3)
-
- @testing.requires.subqueries
- def test_anonymous_rows(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
-
- sel = select([users.c.user_id]).where(users.c.user_name == 'jack'). \
- as_scalar()
- for row in select([sel + 1, sel + 3], bind=users.bind).execute():
- assert row['anon_1'] == 8
- assert row['anon_2'] == 10
-
@testing.fails_on(
'firebird', "kinterbasdb doesn't send full type information")
def test_order_by_label(self):
@@ -364,154 +110,6 @@ class QueryTest(fixtures.TestBase):
[("test: ed",), ("test: fred",), ("test: jack",)]
)
- def test_row_comparison(self):
- users.insert().execute(user_id=7, user_name='jack')
- rp = users.select().execute().first()
-
- self.assert_(rp == rp)
- self.assert_(not(rp != rp))
-
- equal = (7, 'jack')
-
- self.assert_(rp == equal)
- self.assert_(equal == rp)
- self.assert_(not (rp != equal))
- self.assert_(not (equal != equal))
-
- def endless():
- while True:
- yield 1
- self.assert_(rp != endless())
- self.assert_(endless() != rp)
-
- # test that everything compares the same
- # as it would against a tuple
- import operator
- for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
- for op in [
- operator.eq, operator.ne, operator.gt,
- operator.lt, operator.ge, operator.le
- ]:
-
- try:
- control = op(equal, compare)
- except TypeError:
- # Py3K raises TypeError for some invalid comparisons
- assert_raises(TypeError, op, rp, compare)
- else:
- eq_(control, op(rp, compare))
-
- try:
- control = op(compare, equal)
- except TypeError:
- # Py3K raises TypeError for some invalid comparisons
- assert_raises(TypeError, op, compare, rp)
- else:
- eq_(control, op(compare, rp))
-
- @testing.provide_metadata
- def test_column_label_overlap_fallback(self):
- content = Table(
- 'content', self.metadata,
- Column('type', String(30)),
- )
- bar = Table(
- 'bar', self.metadata,
- Column('content_type', String(30))
- )
- self.metadata.create_all(testing.db)
- testing.db.execute(content.insert().values(type="t1"))
-
- row = testing.db.execute(content.select(use_labels=True)).first()
- assert content.c.type in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- row = testing.db.execute(
- select([content.c.type.label("content_type")])).first()
- assert content.c.type in row
-
- assert bar.c.content_type not in row
-
- assert sql.column('content_type') in row
-
- row = testing.db.execute(select([func.now().label("content_type")])). \
- first()
- assert content.c.type not in row
-
- assert bar.c.content_type not in row
-
- assert sql.column('content_type') in row
-
- def test_pickled_rows(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
-
- for pickle in False, True:
- for use_labels in False, True:
- result = users.select(use_labels=use_labels).order_by(
- users.c.user_id).execute().fetchall()
-
- if pickle:
- result = util.pickle.loads(util.pickle.dumps(result))
-
- eq_(
- result,
- [(7, "jack"), (8, "ed"), (9, "fred")]
- )
- if use_labels:
- eq_(result[0]['query_users_user_id'], 7)
- eq_(
- list(result[0].keys()),
- ["query_users_user_id", "query_users_user_name"])
- else:
- eq_(result[0]['user_id'], 7)
- eq_(list(result[0].keys()), ["user_id", "user_name"])
-
- eq_(result[0][0], 7)
- eq_(result[0][users.c.user_id], 7)
- eq_(result[0][users.c.user_name], 'jack')
-
- if not pickle or use_labels:
- assert_raises(
- exc.NoSuchColumnError,
- lambda: result[0][addresses.c.user_id])
- else:
- # test with a different table. name resolution is
- # causing 'user_id' to match when use_labels wasn't used.
- eq_(result[0][addresses.c.user_id], 7)
-
- assert_raises(
- exc.NoSuchColumnError, lambda: result[0]['fake key'])
- assert_raises(
- exc.NoSuchColumnError,
- lambda: result[0][addresses.c.address_id])
-
- def test_column_error_printing(self):
- row = testing.db.execute(select([1])).first()
-
- class unprintable(object):
-
- def __str__(self):
- raise ValueError("nope")
-
- msg = r"Could not locate column in row for column '%s'"
-
- for accessor, repl in [
- ("x", "x"),
- (Column("q", Integer), "q"),
- (Column("q", Integer) + 12, r"q \+ :q_1"),
- (unprintable(), "unprintable element.*"),
- ]:
- assert_raises_message(
- exc.NoSuchColumnError,
- msg % repl,
- lambda: row[accessor]
- )
-
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
@@ -538,16 +136,6 @@ class QueryTest(fixtures.TestBase):
assert row.x == True # noqa
assert row.y == False # noqa
- def test_fetchmany(self):
- users.insert().execute(user_id=7, user_name='jack')
- users.insert().execute(user_id=8, user_name='ed')
- users.insert().execute(user_id=9, user_name='fred')
- r = users.select().execute()
- l = []
- for row in r.fetchmany(size=2):
- l.append(row)
- self.assert_(len(l) == 2, "fetchmany(size=2) got %s rows" % len(l))
-
def test_like_ops(self):
users.insert().execute(
{'user_id': 1, 'user_name': 'apples'},
@@ -816,521 +404,6 @@ class QueryTest(fixtures.TestBase):
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
- def test_column_slices(self):
- users.insert().execute(user_id=1, user_name='john')
- users.insert().execute(user_id=2, user_name='jack')
- addresses.insert().execute(
- address_id=1, user_id=2, address='foo@bar.com')
-
- r = text(
- "select * from query_addresses", bind=testing.db).execute().first()
- self.assert_(r[0:1] == (1,))
- self.assert_(r[1:] == (2, 'foo@bar.com'))
- self.assert_(r[:-1] == (1, 2))
-
- def test_column_accessor_basic_compiled(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
-
- r = users.select(users.c.user_id == 2).execute().first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_basic_text(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
- r = testing.db.execute(
- text("select * from query_users where user_id=2")).first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_textual_select(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
- # this will create column() objects inside
- # the select(), these need to match on name anyway
- r = testing.db.execute(
- select([
- column('user_id'), column('user_name')
- ]).select_from(table('query_users')).
- where(text('user_id=2'))
- ).first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_dotted_union(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- # test a little sqlite weirdness - with the UNION,
- # cols come back as "query_users.user_id" in cursor.description
- r = testing.db.execute(
- text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users"
- )
- ).first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
- eq_(list(r.keys()), ["user_id", "user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_raw(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- r = text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users",
- bind=testing.db).execution_options(sqlite_raw_colnames=True). \
- execute().first()
- assert 'user_id' not in r
- assert 'user_name' not in r
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_translated(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- r = text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users",
- bind=testing.db).execute().first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- eq_(list(r.keys()), ["user_id", "user_name"])
-
- def test_column_accessor_labels_w_dots(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
- # test using literal tablename.colname
- r = text(
- 'select query_users.user_id AS "query_users.user_id", '
- 'query_users.user_name AS "query_users.user_name" '
- 'from query_users', bind=testing.db).\
- execution_options(sqlite_raw_colnames=True).execute().first()
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- assert "user_name" not in r
- eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- def test_column_accessor_unary(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- # unary experssions
- r = select([users.c.user_name.distinct()]).order_by(
- users.c.user_name).execute().first()
- eq_(r[users.c.user_name], 'john')
- eq_(r.user_name, 'john')
-
- def test_column_accessor_err(self):
- r = testing.db.execute(select([1])).first()
- assert_raises_message(
- AttributeError,
- "Could not locate column in row for column 'foo'",
- getattr, r, "foo"
- )
- assert_raises_message(
- KeyError,
- "Could not locate column in row for column 'foo'",
- lambda: r['foo']
- )
-
- def test_graceful_fetch_on_non_rows(self):
- """test that calling fetchone() etc. on a result that doesn't
- return rows fails gracefully.
-
- """
-
- # these proxies don't work with no cursor.description present.
- # so they don't apply to this test at the moment.
- # result.FullyBufferedResultProxy,
- # result.BufferedRowResultProxy,
- # result.BufferedColumnResultProxy
-
- conn = testing.db.connect()
- for meth in ('fetchone', 'fetchall', 'first', 'scalar', 'fetchmany'):
- trans = conn.begin()
- result = conn.execute(users.insert(), user_id=1)
- assert_raises_message(
- exc.ResourceClosedError,
- "This result object does not return rows. "
- "It has been closed automatically.",
- getattr(result, meth),
- )
- trans.rollback()
-
- @testing.requires.empty_inserts
- @testing.requires.returning
- def test_no_inserted_pk_on_returning(self):
- result = testing.db.execute(users.insert().returning(
- users.c.user_id, users.c.user_name))
- assert_raises_message(
- exc.InvalidRequestError,
- r"Can't call inserted_primary_key when returning\(\) is used.",
- getattr, result, 'inserted_primary_key'
- )
-
- def test_fetchone_til_end(self):
- result = testing.db.execute("select * from query_users")
- eq_(result.fetchone(), None)
- eq_(result.fetchone(), None)
- eq_(result.fetchone(), None)
- result.close()
- assert_raises_message(
- exc.ResourceClosedError,
- "This result object is closed.",
- result.fetchone
- )
-
- def test_row_case_sensitive(self):
- row = testing.db.execute(
- select([
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive")
- ])
- ).first()
-
- eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
-
- assert_raises(
- KeyError,
- lambda: row["Case_insensitive"]
- )
- assert_raises(
- KeyError,
- lambda: row["casesensitive"]
- )
-
- def test_row_case_insensitive(self):
- ins_db = engines.testing_engine(options={"case_sensitive": False})
- row = ins_db.execute(
- select([
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive")
- ])
- ).first()
-
- eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
- eq_(row["Case_insensitive"], 1)
- eq_(row["casesensitive"], 2)
-
- def test_row_as_args(self):
- users.insert().execute(user_id=1, user_name='john')
- r = users.select(users.c.user_id == 1).execute().first()
- users.delete().execute()
- users.insert().execute(r)
- eq_(users.select().execute().fetchall(), [(1, 'john')])
-
- def test_result_as_args(self):
- users.insert().execute([
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='ed')])
- r = users.select().execute()
- users2.insert().execute(list(r))
- eq_(
- users2.select().order_by(users2.c.user_id).execute().fetchall(),
- [(1, 'john'), (2, 'ed')]
- )
-
- users2.delete().execute()
- r = users.select().execute()
- users2.insert().execute(*list(r))
- eq_(
- users2.select().order_by(users2.c.user_id).execute().fetchall(),
- [(1, 'john'), (2, 'ed')]
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column(self):
- users.insert().execute(user_id=1, user_name='john')
- result = users.outerjoin(addresses).select().execute()
- r = result.first()
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[addresses.c.user_id]
- )
-
- # try to trick it - fake_table isn't in the result!
- # we get the correct error
- fake_table = Table('fake', MetaData(), Column('user_id', Integer))
- assert_raises_message(
- exc.InvalidRequestError,
- "Could not locate column in row for column 'fake.user_id'",
- lambda: r[fake_table.c.user_id]
- )
-
- r = util.pickle.loads(util.pickle.dumps(r))
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- result = users.outerjoin(addresses).select().execute()
- result = _result.BufferedColumnResultProxy(result.context)
- r = result.first()
- assert isinstance(r, _result.BufferedColumnRow)
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column_by_col(self):
- users.insert().execute(user_id=1, user_name='john')
- ua = users.alias()
- u2 = users.alias()
- result = select([users.c.user_id, ua.c.user_id]).execute()
- row = result.first()
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[ua.c.user_id]
- )
-
- # Unfortunately, this fails -
- # we'd like
- # "Could not locate column in row"
- # to be raised here, but the check for
- # "common column" in _compare_name_for_result()
- # has other requirements to be more liberal.
- # Ultimately the
- # expression system would need a way to determine
- # if given two columns in a "proxy" relationship, if they
- # refer to a different parent table
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[u2.c.user_id]
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column_contains(self):
- # ticket 2702. in 0.7 we'd get True, False.
- # in 0.8, both columns are present so it's True;
- # but when they're fetched you'll get the ambiguous error.
- users.insert().execute(user_id=1, user_name='john')
- result = select([users.c.user_id, addresses.c.user_id]).\
- select_from(users.outerjoin(addresses)).execute()
- row = result.first()
-
- eq_(
- set([users.c.user_id in row, addresses.c.user_id in row]),
- set([True])
- )
-
- def test_ambiguous_column_by_col_plus_label(self):
- users.insert().execute(user_id=1, user_name='john')
- result = select(
- [users.c.user_id,
- type_coerce(users.c.user_id, Integer).label('foo')]).execute()
- row = result.first()
- eq_(
- row[users.c.user_id], 1
- )
- eq_(
- row[1], 1
- )
-
- def test_fetch_partial_result_map(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- t = text("select * from query_users").columns(
- user_name=String()
- )
- eq_(
- testing.db.execute(t).fetchall(), [(7, 'ed')]
- )
-
- def test_fetch_unordered_result_map(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- class Goofy1(TypeDecorator):
- impl = String
-
- def process_result_value(self, value, dialect):
- return value + "a"
-
- class Goofy2(TypeDecorator):
- impl = String
-
- def process_result_value(self, value, dialect):
- return value + "b"
-
- class Goofy3(TypeDecorator):
- impl = String
-
- def process_result_value(self, value, dialect):
- return value + "c"
-
- t = text(
- "select user_name as a, user_name as b, "
- "user_name as c from query_users").columns(
- a=Goofy1(), b=Goofy2(), c=Goofy3()
- )
- eq_(
- testing.db.execute(t).fetchall(), [
- ('eda', 'edb', 'edc')
- ]
- )
-
- @testing.requires.subqueries
- def test_column_label_targeting(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- for s in (
- users.select().alias('foo'),
- users.select().alias(users.name),
- ):
- row = s.select(use_labels=True).execute().first()
- assert row[s.c.user_id] == 7
- assert row[s.c.user_name] == 'ed'
-
- def test_keys(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- r = r.first()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
-
- def test_items(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
- eq_(
- [(x[0].lower(), x[1]) for x in list(r.items())],
- [('user_id', 1), ('user_name', 'foo')])
-
- def test_len(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
- eq_(len(r), 2)
-
- r = testing.db.execute('select user_name, user_id from query_users'). \
- first()
- eq_(len(r), 2)
- r = testing.db.execute('select user_name from query_users').first()
- eq_(len(r), 1)
-
- def test_sorting_in_python(self):
- users.insert().execute(
- dict(user_id=1, user_name='foo'),
- dict(user_id=2, user_name='bar'),
- dict(user_id=3, user_name='def'),
- )
-
- rows = users.select().order_by(users.c.user_name).execute().fetchall()
-
- eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
-
- eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
-
- def test_column_order_with_simple_query(self):
- # should return values in column definition order
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select(users.c.user_id == 1).execute().first()
- eq_(r[0], 1)
- eq_(r[1], 'foo')
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- eq_(list(r.values()), [1, 'foo'])
-
- def test_column_order_with_text_query(self):
- # should return values in query order
- users.insert().execute(user_id=1, user_name='foo')
- r = testing.db.execute('select user_name, user_id from query_users'). \
- first()
- eq_(r[0], 'foo')
- eq_(r[1], 1)
- eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
- eq_(list(r.values()), ['foo', 1])
-
- @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
- @testing.crashes('firebird', 'An identifier must begin with a letter')
- def test_column_accessor_shadow(self):
- meta = MetaData(testing.db)
- shadowed = Table(
- 'test_shadowed', meta,
- Column('shadow_id', INT, primary_key=True),
- Column('shadow_name', VARCHAR(20)),
- Column('parent', VARCHAR(20)),
- Column('row', VARCHAR(40)),
- Column('_parent', VARCHAR(20)),
- Column('_row', VARCHAR(20)),
- )
- shadowed.create(checkfirst=True)
- try:
- shadowed.insert().execute(
- shadow_id=1, shadow_name='The Shadow', parent='The Light',
- row='Without light there is no shadow',
- _parent='Hidden parent', _row='Hidden row')
- r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
- self.assert_(
- r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1)
- self.assert_(
- r.shadow_name == r['shadow_name'] ==
- r[shadowed.c.shadow_name] == 'The Shadow')
- self.assert_(
- r.parent == r['parent'] == r[shadowed.c.parent] == 'The Light')
- self.assert_(
- r.row == r['row'] == r[shadowed.c.row] ==
- 'Without light there is no shadow')
- self.assert_(r['_parent'] == 'Hidden parent')
- self.assert_(r['_row'] == 'Hidden row')
- finally:
- shadowed.drop(checkfirst=True)
-
@testing.emits_warning('.*empty sequence.*')
def test_in_filtering(self):
"""test the behavior of the in_() function."""
@@ -1480,393 +553,6 @@ class RequiredBindTest(fixtures.TablesTest):
is_(bindparam('foo', callable_=c, required=False).required, False)
-class TableInsertTest(fixtures.TablesTest):
-
- """test for consistent insert behavior across dialects
- regarding the inline=True flag, lower-case 't' tables.
-
- """
- run_create_tables = 'each'
- __backend__ = True
-
- @classmethod
- def define_tables(cls, metadata):
- Table(
- 'foo', metadata,
- Column('id', Integer, Sequence('t_id_seq'), primary_key=True),
- Column('data', String(50)),
- Column('x', Integer)
- )
-
- def _fixture(self, types=True):
- if types:
- t = sql.table(
- 'foo', sql.column('id', Integer),
- sql.column('data', String),
- sql.column('x', Integer))
- else:
- t = sql.table(
- 'foo', sql.column('id'), sql.column('data'), sql.column('x'))
- return t
-
- def _test(self, stmt, row, returning=None, inserted_primary_key=False):
- r = testing.db.execute(stmt)
-
- if returning:
- returned = r.first()
- eq_(returned, returning)
- elif inserted_primary_key is not False:
- eq_(r.inserted_primary_key, inserted_primary_key)
-
- eq_(testing.db.execute(self.tables.foo.select()).first(), row)
-
- def _test_multi(self, stmt, rows, data):
- testing.db.execute(stmt, rows)
- eq_(
- testing.db.execute(
- self.tables.foo.select().
- order_by(self.tables.foo.c.id)).fetchall(),
- data)
-
- @testing.requires.sequences
- def test_expicit_sequence(self):
- t = self._fixture()
- self._test(
- t.insert().values(
- id=func.next_value(Sequence('t_id_seq')), data='data', x=5),
- (1, 'data', 5)
- )
-
- def test_uppercase(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- def test_uppercase_inline(self):
- t = self.tables.foo
- self._test(
- t.insert(inline=True).values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- @testing.crashes(
- "mssql+pyodbc",
- "Pyodbc + SQL Server + Py3K, some decimal handling issue")
- def test_uppercase_inline_implicit(self):
- t = self.tables.foo
- self._test(
- t.insert(inline=True).values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[None]
- )
-
- def test_uppercase_implicit(self):
- t = self.tables.foo
- self._test(
- t.insert().values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- def test_uppercase_direct_params(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- @testing.requires.returning
- def test_uppercase_direct_params_returning(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
- (1, 'data', 5),
- returning=(1, 5)
- )
-
- @testing.fails_on(
- 'mssql', "lowercase table doesn't support identity insert disable")
- def test_direct_params(self):
- t = self._fixture()
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
- @testing.fails_on(
- 'mssql', "lowercase table doesn't support identity insert disable")
- @testing.requires.returning
- def test_direct_params_returning(self):
- t = self._fixture()
- self._test(
- t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
- (1, 'data', 5),
- returning=(1, 5)
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk(self):
- t = self._fixture()
- self._test(
- t.insert().values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk_multi_rows(self):
- t = self._fixture()
- self._test_multi(
- t.insert(),
- [
- {'data': 'd1', 'x': 5},
- {'data': 'd2', 'x': 6},
- {'data': 'd3', 'x': 7},
- ],
- [
- (1, 'd1', 5),
- (2, 'd2', 6),
- (3, 'd3', 7)
- ],
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk_inline(self):
- t = self._fixture()
- self._test(
- t.insert(inline=True).values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
-
-class KeyTargetingTest(fixtures.TablesTest):
- run_inserts = 'once'
- run_deletes = None
- __backend__ = True
-
- @classmethod
- def define_tables(cls, metadata):
- Table(
- 'keyed1', metadata, Column("a", CHAR(2), key="b"),
- Column("c", CHAR(2), key="q")
- )
- Table('keyed2', metadata, Column("a", CHAR(2)), Column("b", CHAR(2)))
- Table('keyed3', metadata, Column("a", CHAR(2)), Column("d", CHAR(2)))
- Table('keyed4', metadata, Column("b", CHAR(2)), Column("q", CHAR(2)))
- Table('content', metadata, Column('t', String(30), key="type"))
- Table('bar', metadata, Column('ctype', String(30), key="content_type"))
-
- if testing.requires.schemas.enabled:
- Table(
- 'wschema', metadata,
- Column("a", CHAR(2), key="b"),
- Column("c", CHAR(2), key="q"),
- schema=testing.config.test_schema
- )
-
- @classmethod
- def insert_data(cls):
- cls.tables.keyed1.insert().execute(dict(b="a1", q="c1"))
- cls.tables.keyed2.insert().execute(dict(a="a2", b="b2"))
- cls.tables.keyed3.insert().execute(dict(a="a3", d="d3"))
- cls.tables.keyed4.insert().execute(dict(b="b4", q="q4"))
- cls.tables.content.insert().execute(type="t1")
-
- if testing.requires.schemas.enabled:
- cls.tables['%s.wschema' % testing.config.test_schema].insert().execute(
- dict(b="a1", q="c1"))
-
- @testing.requires.schemas
- def test_keyed_accessor_wschema(self):
- keyed1 = self.tables['%s.wschema' % testing.config.test_schema]
- row = testing.db.execute(keyed1.select()).first()
-
- eq_(row.b, "a1")
- eq_(row.q, "c1")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- def test_keyed_accessor_single(self):
- keyed1 = self.tables.keyed1
- row = testing.db.execute(keyed1.select()).first()
-
- eq_(row.b, "a1")
- eq_(row.q, "c1")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- def test_keyed_accessor_single_labeled(self):
- keyed1 = self.tables.keyed1
- row = testing.db.execute(keyed1.select().apply_labels()).first()
-
- eq_(row.keyed1_b, "a1")
- eq_(row.keyed1_q, "c1")
- eq_(row.keyed1_a, "a1")
- eq_(row.keyed1_c, "c1")
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_keyed_accessor_composite_conflict_2(self):
- keyed1 = self.tables.keyed1
- keyed2 = self.tables.keyed2
-
- row = testing.db.execute(select([keyed1, keyed2])).first()
- # row.b is unambiguous
- eq_(row.b, "b2")
- # row.a is ambiguous
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambig",
- getattr, row, "a"
- )
-
- def test_keyed_accessor_composite_names_precedent(self):
- keyed1 = self.tables.keyed1
- keyed4 = self.tables.keyed4
-
- row = testing.db.execute(select([keyed1, keyed4])).first()
- eq_(row.b, "b4")
- eq_(row.q, "q4")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_keyed_accessor_composite_keys_precedent(self):
- keyed1 = self.tables.keyed1
- keyed3 = self.tables.keyed3
-
- row = testing.db.execute(select([keyed1, keyed3])).first()
- eq_(row.q, "c1")
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name 'b'",
- getattr, row, "b"
- )
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name 'a'",
- getattr, row, "a"
- )
- eq_(row.d, "d3")
-
- def test_keyed_accessor_composite_labeled(self):
- keyed1 = self.tables.keyed1
- keyed2 = self.tables.keyed2
-
- row = testing.db.execute(select([keyed1, keyed2]).apply_labels()). \
- first()
- eq_(row.keyed1_b, "a1")
- eq_(row.keyed1_a, "a1")
- eq_(row.keyed1_q, "c1")
- eq_(row.keyed1_c, "c1")
- eq_(row.keyed2_a, "a2")
- eq_(row.keyed2_b, "b2")
- assert_raises(KeyError, lambda: row['keyed2_c'])
- assert_raises(KeyError, lambda: row['keyed2_q'])
-
- def test_column_label_overlap_fallback(self):
- content, bar = self.tables.content, self.tables.bar
- row = testing.db.execute(
- select([content.c.type.label("content_type")])).first()
- assert content.c.type not in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- row = testing.db.execute(select([func.now().label("content_type")])). \
- first()
- assert content.c.type not in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- def test_column_label_overlap_fallback_2(self):
- content, bar = self.tables.content, self.tables.bar
- row = testing.db.execute(content.select(use_labels=True)).first()
- assert content.c.type in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') not in row
-
- def test_columnclause_schema_column_one(self):
- keyed2 = self.tables.keyed2
-
- # this is addressed by [ticket:2932]
- # ColumnClause._compare_name_for_result allows the
- # columns which the statement is against to be lightweight
- # cols, which results in a more liberal comparison scheme
- a, b = sql.column('a'), sql.column('b')
- stmt = select([a, b]).select_from(table("keyed2"))
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
-
- def test_columnclause_schema_column_two(self):
- keyed2 = self.tables.keyed2
-
- a, b = sql.column('a'), sql.column('b')
- stmt = select([keyed2.c.a, keyed2.c.b])
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
-
- def test_columnclause_schema_column_three(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- a, b = sql.column('a'), sql.column('b')
- stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
- assert stmt.c.a in row
- assert stmt.c.b in row
-
- def test_columnclause_schema_column_four(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- a, b = sql.column('keyed2_a'), sql.column('keyed2_b')
- stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
- a, b)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
- assert stmt.c.keyed2_a in row
- assert stmt.c.keyed2_b in row
-
- def test_columnclause_schema_column_five(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
- keyed2_a=CHAR, keyed2_b=CHAR)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert stmt.c.keyed2_a in row
- assert stmt.c.keyed2_b in row
-
-
class LimitTest(fixtures.TestBase):
__backend__ = True
diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py
new file mode 100644
index 000000000..8461996ea
--- /dev/null
+++ b/test/sql/test_resultset.py
@@ -0,0 +1,1136 @@
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ in_, not_in_, is_, ne_
+from sqlalchemy import testing
+from sqlalchemy.testing import fixtures, engines
+from sqlalchemy import util
+from sqlalchemy import (
+ exc, sql, func, select, String, Integer, MetaData, ForeignKey,
+ VARCHAR, INT, CHAR, text, type_coerce, literal_column,
+ TypeDecorator, table, column)
+from sqlalchemy.engine import result as _result
+from sqlalchemy.testing.schema import Table, Column
+import operator
+
+
+class ResultProxyTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'users', metadata,
+ Column(
+ 'user_id', INT, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+ Table(
+ 'addresses', metadata,
+ Column(
+ 'address_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.user_id')),
+ Column('address', String(30)),
+ test_needs_acid=True
+ )
+
+ Table(
+ 'users2', metadata,
+ Column('user_id', INT, primary_key=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+
+ def test_row_iteration(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+ r = users.select().execute()
+ l = []
+ for row in r:
+ l.append(row)
+ eq_(len(l), 3)
+
+ @testing.requires.subqueries
+ def test_anonymous_rows(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+
+ sel = select([users.c.user_id]).where(users.c.user_name == 'jack'). \
+ as_scalar()
+ for row in select([sel + 1, sel + 3], bind=users.bind).execute():
+ eq_(row['anon_1'], 8)
+ eq_(row['anon_2'], 10)
+
+ def test_row_comparison(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='jack')
+ rp = users.select().execute().first()
+
+ eq_(rp, rp)
+ is_(not(rp != rp), True)
+
+ equal = (7, 'jack')
+
+ eq_(rp, equal)
+ eq_(equal, rp)
+ is_((not (rp != equal)), True)
+ is_(not (equal != equal), True)
+
+ def endless():
+ while True:
+ yield 1
+ ne_(rp, endless())
+ ne_(endless(), rp)
+
+ # test that everything compares the same
+ # as it would against a tuple
+ for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
+ for op in [
+ operator.eq, operator.ne, operator.gt,
+ operator.lt, operator.ge, operator.le
+ ]:
+
+ try:
+ control = op(equal, compare)
+ except TypeError:
+ # Py3K raises TypeError for some invalid comparisons
+ assert_raises(TypeError, op, rp, compare)
+ else:
+ eq_(control, op(rp, compare))
+
+ try:
+ control = op(compare, equal)
+ except TypeError:
+ # Py3K raises TypeError for some invalid comparisons
+ assert_raises(TypeError, op, compare, rp)
+ else:
+ eq_(control, op(compare, rp))
+
+ @testing.provide_metadata
+ def test_column_label_overlap_fallback(self):
+ content = Table(
+ 'content', self.metadata,
+ Column('type', String(30)),
+ )
+ bar = Table(
+ 'bar', self.metadata,
+ Column('content_type', String(30))
+ )
+ self.metadata.create_all(testing.db)
+ testing.db.execute(content.insert().values(type="t1"))
+
+ row = testing.db.execute(content.select(use_labels=True)).first()
+ in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(
+ select([content.c.type.label("content_type")])).first()
+ in_(content.c.type, row)
+
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(select([func.now().label("content_type")])). \
+ first()
+ not_in_(content.c.type, row)
+
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ def test_pickled_rows(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+
+ for pickle in False, True:
+ for use_labels in False, True:
+ result = users.select(use_labels=use_labels).order_by(
+ users.c.user_id).execute().fetchall()
+
+ if pickle:
+ result = util.pickle.loads(util.pickle.dumps(result))
+
+ eq_(
+ result,
+ [(7, "jack"), (8, "ed"), (9, "fred")]
+ )
+ if use_labels:
+ eq_(result[0]['users_user_id'], 7)
+ eq_(
+ list(result[0].keys()),
+ ["users_user_id", "users_user_name"])
+ else:
+ eq_(result[0]['user_id'], 7)
+ eq_(list(result[0].keys()), ["user_id", "user_name"])
+
+ eq_(result[0][0], 7)
+ eq_(result[0][users.c.user_id], 7)
+ eq_(result[0][users.c.user_name], 'jack')
+
+ if not pickle or use_labels:
+ assert_raises(
+ exc.NoSuchColumnError,
+ lambda: result[0][addresses.c.user_id])
+ else:
+ # test with a different table. name resolution is
+ # causing 'user_id' to match when use_labels wasn't used.
+ eq_(result[0][addresses.c.user_id], 7)
+
+ assert_raises(
+ exc.NoSuchColumnError, lambda: result[0]['fake key'])
+ assert_raises(
+ exc.NoSuchColumnError,
+ lambda: result[0][addresses.c.address_id])
+
+ def test_column_error_printing(self):
+ row = testing.db.execute(select([1])).first()
+
+ class unprintable(object):
+
+ def __str__(self):
+ raise ValueError("nope")
+
+ msg = r"Could not locate column in row for column '%s'"
+
+ for accessor, repl in [
+ ("x", "x"),
+ (Column("q", Integer), "q"),
+ (Column("q", Integer) + 12, r"q \+ :q_1"),
+ (unprintable(), "unprintable element.*"),
+ ]:
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ msg % repl,
+ lambda: row[accessor]
+ )
+
+ def test_fetchmany(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='jack')
+ users.insert().execute(user_id=8, user_name='ed')
+ users.insert().execute(user_id=9, user_name='fred')
+ r = users.select().execute()
+ l = []
+ for row in r.fetchmany(size=2):
+ l.append(row)
+ eq_(len(l), 2)
+
+ def test_column_slices(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(user_id=1, user_name='john')
+ users.insert().execute(user_id=2, user_name='jack')
+ addresses.insert().execute(
+ address_id=1, user_id=2, address='foo@bar.com')
+
+ r = text(
+ "select * from addresses", bind=testing.db).execute().first()
+ eq_(r[0:1], (1,))
+ eq_(r[1:], (2, 'foo@bar.com'))
+ eq_(r[:-1], (1, 2))
+
+ def test_column_accessor_basic_compiled(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+
+ r = users.select(users.c.user_id == 2).execute().first()
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_basic_text(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+ r = testing.db.execute(
+ text("select * from users where user_id=2")).first()
+
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_textual_select(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+ # this will create column() objects inside
+ # the select(), these need to match on name anyway
+ r = testing.db.execute(
+ select([
+ column('user_id'), column('user_name')
+ ]).select_from(table('users')).
+ where(text('user_id=2'))
+ ).first()
+
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_dotted_union(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ # test a little sqlite weirdness - with the UNION,
+ # cols come back as "users.user_id" in cursor.description
+ r = testing.db.execute(
+ text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users"
+ )
+ ).first()
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ @testing.only_on("sqlite", "sqlite specific feature")
+ def test_column_accessor_sqlite_raw(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ r = text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users",
+ bind=testing.db).execution_options(sqlite_raw_colnames=True). \
+ execute().first()
+ not_in_('user_id', r)
+ not_in_('user_name', r)
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+
+ @testing.only_on("sqlite", "sqlite specific feature")
+ def test_column_accessor_sqlite_translated(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ r = text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users",
+ bind=testing.db).execute().first()
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ def test_column_accessor_labels_w_dots(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+ # test using literal tablename.colname
+ r = text(
+ 'select users.user_id AS "users.user_id", '
+ 'users.user_name AS "users.user_name" '
+ 'from users', bind=testing.db).\
+ execution_options(sqlite_raw_colnames=True).execute().first()
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ not_in_("user_name", r)
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+
+ def test_column_accessor_unary(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ # unary experssions
+ r = select([users.c.user_name.distinct()]).order_by(
+ users.c.user_name).execute().first()
+ eq_(r[users.c.user_name], 'john')
+ eq_(r.user_name, 'john')
+
+ def test_column_accessor_err(self):
+ r = testing.db.execute(select([1])).first()
+ assert_raises_message(
+ AttributeError,
+ "Could not locate column in row for column 'foo'",
+ getattr, r, "foo"
+ )
+ assert_raises_message(
+ KeyError,
+ "Could not locate column in row for column 'foo'",
+ lambda: r['foo']
+ )
+
+ def test_graceful_fetch_on_non_rows(self):
+ """test that calling fetchone() etc. on a result that doesn't
+ return rows fails gracefully.
+
+ """
+
+ # these proxies don't work with no cursor.description present.
+ # so they don't apply to this test at the moment.
+ # result.FullyBufferedResultProxy,
+ # result.BufferedRowResultProxy,
+ # result.BufferedColumnResultProxy
+
+ users = self.tables.users
+
+ conn = testing.db.connect()
+ for meth in [
+ lambda r: r.fetchone(),
+ lambda r: r.fetchall(),
+ lambda r: r.first(),
+ lambda r: r.scalar(),
+ lambda r: r.fetchmany(),
+ lambda r: r._getter('user'),
+ lambda r: r._has_key('user'),
+ ]:
+ trans = conn.begin()
+ result = conn.execute(users.insert(), user_id=1)
+ assert_raises_message(
+ exc.ResourceClosedError,
+ "This result object does not return rows. "
+ "It has been closed automatically.",
+ meth, result,
+ )
+ trans.rollback()
+
+ def test_fetchone_til_end(self):
+ result = testing.db.execute("select * from users")
+ eq_(result.fetchone(), None)
+ eq_(result.fetchone(), None)
+ eq_(result.fetchone(), None)
+ result.close()
+ assert_raises_message(
+ exc.ResourceClosedError,
+ "This result object is closed.",
+ result.fetchone
+ )
+
+ def test_row_case_sensitive(self):
+ row = testing.db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive")
+ ])
+ ).first()
+
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+
+ assert_raises(
+ KeyError,
+ lambda: row["Case_insensitive"]
+ )
+ assert_raises(
+ KeyError,
+ lambda: row["casesensitive"]
+ )
+
+ def test_row_case_sensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": True})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+
+ assert_raises(KeyError, lambda: row["Case_insensitive"])
+ assert_raises(KeyError, lambda: row["casesensitive"])
+ assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
+
+ def test_row_case_insensitive(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive")
+ ])
+ ).first()
+
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+
+ def test_row_case_insensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+ eq_(row["screw_UP_the_cols"], 3)
+
+ def test_row_as_args(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ r = users.select(users.c.user_id == 1).execute().first()
+ users.delete().execute()
+ users.insert().execute(r)
+ eq_(users.select().execute().fetchall(), [(1, 'john')])
+
+ def test_result_as_args(self):
+ users = self.tables.users
+ users2 = self.tables.users2
+
+ users.insert().execute([
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='ed')])
+ r = users.select().execute()
+ users2.insert().execute(list(r))
+ eq_(
+ users2.select().order_by(users2.c.user_id).execute().fetchall(),
+ [(1, 'john'), (2, 'ed')]
+ )
+
+ users2.delete().execute()
+ r = users.select().execute()
+ users2.insert().execute(*list(r))
+ eq_(
+ users2.select().order_by(users2.c.user_id).execute().fetchall(),
+ [(1, 'john'), (2, 'ed')]
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(user_id=1, user_name='john')
+ result = users.outerjoin(addresses).select().execute()
+ r = result.first()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r[users.c.user_id]
+ )
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r[addresses.c.user_id]
+ )
+
+ # try to trick it - fake_table isn't in the result!
+ # we get the correct error
+ fake_table = Table('fake', MetaData(), Column('user_id', Integer))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Could not locate column in row for column 'fake.user_id'",
+ lambda: r[fake_table.c.user_id]
+ )
+
+ r = util.pickle.loads(util.pickle.dumps(r))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ result = users.outerjoin(addresses).select().execute()
+ result = _result.BufferedColumnResultProxy(result.context)
+ r = result.first()
+ assert isinstance(r, _result.BufferedColumnRow)
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column_by_col(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ ua = users.alias()
+ u2 = users.alias()
+ result = select([users.c.user_id, ua.c.user_id]).execute()
+ row = result.first()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: row[users.c.user_id]
+ )
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: row[ua.c.user_id]
+ )
+
+ # Unfortunately, this fails -
+ # we'd like
+ # "Could not locate column in row"
+ # to be raised here, but the check for
+ # "common column" in _compare_name_for_result()
+ # has other requirements to be more liberal.
+ # Ultimately the
+ # expression system would need a way to determine
+ # if given two columns in a "proxy" relationship, if they
+ # refer to a different parent table
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: row[u2.c.user_id]
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column_contains(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ # ticket 2702. in 0.7 we'd get True, False.
+ # in 0.8, both columns are present so it's True;
+ # but when they're fetched you'll get the ambiguous error.
+ users.insert().execute(user_id=1, user_name='john')
+ result = select([users.c.user_id, addresses.c.user_id]).\
+ select_from(users.outerjoin(addresses)).execute()
+ row = result.first()
+
+ eq_(
+ set([users.c.user_id in row, addresses.c.user_id in row]),
+ set([True])
+ )
+
+ def test_ambiguous_column_by_col_plus_label(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ result = select(
+ [users.c.user_id,
+ type_coerce(users.c.user_id, Integer).label('foo')]).execute()
+ row = result.first()
+ eq_(
+ row[users.c.user_id], 1
+ )
+ eq_(
+ row[1], 1
+ )
+
+ def test_fetch_partial_result_map(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ t = text("select * from users").columns(
+ user_name=String()
+ )
+ eq_(
+ testing.db.execute(t).fetchall(), [(7, 'ed')]
+ )
+
+ def test_fetch_unordered_result_map(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ class Goofy1(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "a"
+
+ class Goofy2(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "b"
+
+ class Goofy3(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "c"
+
+ t = text(
+ "select user_name as a, user_name as b, "
+ "user_name as c from users").columns(
+ a=Goofy1(), b=Goofy2(), c=Goofy3()
+ )
+ eq_(
+ testing.db.execute(t).fetchall(), [
+ ('eda', 'edb', 'edc')
+ ]
+ )
+
+ @testing.requires.subqueries
+ def test_column_label_targeting(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ for s in (
+ users.select().alias('foo'),
+ users.select().alias(users.name),
+ ):
+ row = s.select(use_labels=True).execute().first()
+ eq_(row[s.c.user_id], 7)
+ eq_(row[s.c.user_name], 'ed')
+
+ def test_keys(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = users.select().execute()
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name']
+ )
+
+ def test_keys_anon_labels(self):
+ """test [ticket:3483]"""
+
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = testing.db.execute(
+ select([
+ users.c.user_id,
+ users.c.user_name.label(None),
+ func.count(literal_column('1'))]).
+ group_by(users.c.user_id, users.c.user_name)
+ )
+
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+
+ def test_items(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select().execute().first()
+ eq_(
+ [(x[0].lower(), x[1]) for x in list(r.items())],
+ [('user_id', 1), ('user_name', 'foo')])
+
+ def test_len(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select().execute().first()
+ eq_(len(r), 2)
+
+ r = testing.db.execute('select user_name, user_id from users'). \
+ first()
+ eq_(len(r), 2)
+ r = testing.db.execute('select user_name from users').first()
+ eq_(len(r), 1)
+
+ def test_sorting_in_python(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='foo'),
+ dict(user_id=2, user_name='bar'),
+ dict(user_id=3, user_name='def'),
+ )
+
+ rows = users.select().order_by(users.c.user_name).execute().fetchall()
+
+ eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
+
+ eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
+
+ def test_column_order_with_simple_query(self):
+ # should return values in column definition order
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select(users.c.user_id == 1).execute().first()
+ eq_(r[0], 1)
+ eq_(r[1], 'foo')
+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
+ eq_(list(r.values()), [1, 'foo'])
+
+ def test_column_order_with_text_query(self):
+ # should return values in query order
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = testing.db.execute('select user_name, user_id from users'). \
+ first()
+ eq_(r[0], 'foo')
+ eq_(r[1], 1)
+ eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
+ eq_(list(r.values()), ['foo', 1])
+
+ @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
+ @testing.crashes('firebird', 'An identifier must begin with a letter')
+ @testing.provide_metadata
+ def test_column_accessor_shadow(self):
+ shadowed = Table(
+ 'test_shadowed', self.metadata,
+ Column('shadow_id', INT, primary_key=True),
+ Column('shadow_name', VARCHAR(20)),
+ Column('parent', VARCHAR(20)),
+ Column('row', VARCHAR(40)),
+ Column('_parent', VARCHAR(20)),
+ Column('_row', VARCHAR(20)),
+ )
+ self.metadata.create_all()
+ shadowed.insert().execute(
+ shadow_id=1, shadow_name='The Shadow', parent='The Light',
+ row='Without light there is no shadow',
+ _parent='Hidden parent', _row='Hidden row')
+ r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
+
+ eq_(r.shadow_id, 1)
+ eq_(r['shadow_id'], 1)
+ eq_(r[shadowed.c.shadow_id], 1)
+
+ eq_(r.shadow_name, 'The Shadow')
+ eq_(r['shadow_name'], 'The Shadow')
+ eq_(r[shadowed.c.shadow_name], 'The Shadow')
+
+ eq_(r.parent, 'The Light')
+ eq_(r['parent'], 'The Light')
+ eq_(r[shadowed.c.parent], 'The Light')
+
+ eq_(r.row, 'Without light there is no shadow')
+ eq_(r['row'], 'Without light there is no shadow')
+ eq_(r[shadowed.c.row], 'Without light there is no shadow')
+
+ eq_(r['_parent'], 'Hidden parent')
+ eq_(r['_row'], 'Hidden row')
+
+
+class KeyTargetingTest(fixtures.TablesTest):
+ run_inserts = 'once'
+ run_deletes = None
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'keyed1', metadata, Column("a", CHAR(2), key="b"),
+ Column("c", CHAR(2), key="q")
+ )
+ Table('keyed2', metadata, Column("a", CHAR(2)), Column("b", CHAR(2)))
+ Table('keyed3', metadata, Column("a", CHAR(2)), Column("d", CHAR(2)))
+ Table('keyed4', metadata, Column("b", CHAR(2)), Column("q", CHAR(2)))
+ Table('content', metadata, Column('t', String(30), key="type"))
+ Table('bar', metadata, Column('ctype', String(30), key="content_type"))
+
+ if testing.requires.schemas.enabled:
+ Table(
+ 'wschema', metadata,
+ Column("a", CHAR(2), key="b"),
+ Column("c", CHAR(2), key="q"),
+ schema=testing.config.test_schema
+ )
+
+ @classmethod
+ def insert_data(cls):
+ cls.tables.keyed1.insert().execute(dict(b="a1", q="c1"))
+ cls.tables.keyed2.insert().execute(dict(a="a2", b="b2"))
+ cls.tables.keyed3.insert().execute(dict(a="a3", d="d3"))
+ cls.tables.keyed4.insert().execute(dict(b="b4", q="q4"))
+ cls.tables.content.insert().execute(type="t1")
+
+ if testing.requires.schemas.enabled:
+ cls.tables[
+ '%s.wschema' % testing.config.test_schema].insert().execute(
+ dict(b="a1", q="c1"))
+
+ @testing.requires.schemas
+ def test_keyed_accessor_wschema(self):
+ keyed1 = self.tables['%s.wschema' % testing.config.test_schema]
+ row = testing.db.execute(keyed1.select()).first()
+
+ eq_(row.b, "a1")
+ eq_(row.q, "c1")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ def test_keyed_accessor_single(self):
+ keyed1 = self.tables.keyed1
+ row = testing.db.execute(keyed1.select()).first()
+
+ eq_(row.b, "a1")
+ eq_(row.q, "c1")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ def test_keyed_accessor_single_labeled(self):
+ keyed1 = self.tables.keyed1
+ row = testing.db.execute(keyed1.select().apply_labels()).first()
+
+ eq_(row.keyed1_b, "a1")
+ eq_(row.keyed1_q, "c1")
+ eq_(row.keyed1_a, "a1")
+ eq_(row.keyed1_c, "c1")
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_keyed_accessor_composite_conflict_2(self):
+ keyed1 = self.tables.keyed1
+ keyed2 = self.tables.keyed2
+
+ row = testing.db.execute(select([keyed1, keyed2])).first()
+ # row.b is unambiguous
+ eq_(row.b, "b2")
+ # row.a is ambiguous
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambig",
+ getattr, row, "a"
+ )
+
+ def test_keyed_accessor_composite_names_precedent(self):
+ keyed1 = self.tables.keyed1
+ keyed4 = self.tables.keyed4
+
+ row = testing.db.execute(select([keyed1, keyed4])).first()
+ eq_(row.b, "b4")
+ eq_(row.q, "q4")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_keyed_accessor_composite_keys_precedent(self):
+ keyed1 = self.tables.keyed1
+ keyed3 = self.tables.keyed3
+
+ row = testing.db.execute(select([keyed1, keyed3])).first()
+ eq_(row.q, "c1")
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name 'b'",
+ getattr, row, "b"
+ )
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name 'a'",
+ getattr, row, "a"
+ )
+ eq_(row.d, "d3")
+
+ def test_keyed_accessor_composite_labeled(self):
+ keyed1 = self.tables.keyed1
+ keyed2 = self.tables.keyed2
+
+ row = testing.db.execute(select([keyed1, keyed2]).apply_labels()). \
+ first()
+ eq_(row.keyed1_b, "a1")
+ eq_(row.keyed1_a, "a1")
+ eq_(row.keyed1_q, "c1")
+ eq_(row.keyed1_c, "c1")
+ eq_(row.keyed2_a, "a2")
+ eq_(row.keyed2_b, "b2")
+ assert_raises(KeyError, lambda: row['keyed2_c'])
+ assert_raises(KeyError, lambda: row['keyed2_q'])
+
+ def test_column_label_overlap_fallback(self):
+ content, bar = self.tables.content, self.tables.bar
+ row = testing.db.execute(
+ select([content.c.type.label("content_type")])).first()
+
+ not_in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(select([func.now().label("content_type")])). \
+ first()
+ not_in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ in_(sql.column('content_type'), row)
+
+ def test_column_label_overlap_fallback_2(self):
+ content, bar = self.tables.content, self.tables.bar
+ row = testing.db.execute(content.select(use_labels=True)).first()
+ in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ not_in_(sql.column('content_type'), row)
+
+ def test_columnclause_schema_column_one(self):
+ keyed2 = self.tables.keyed2
+
+ # this is addressed by [ticket:2932]
+ # ColumnClause._compare_name_for_result allows the
+ # columns which the statement is against to be lightweight
+ # cols, which results in a more liberal comparison scheme
+ a, b = sql.column('a'), sql.column('b')
+ stmt = select([a, b]).select_from(table("keyed2"))
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+
+ def test_columnclause_schema_column_two(self):
+ keyed2 = self.tables.keyed2
+
+ a, b = sql.column('a'), sql.column('b')
+ stmt = select([keyed2.c.a, keyed2.c.b])
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+
+ def test_columnclause_schema_column_three(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ a, b = sql.column('a'), sql.column('b')
+ stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+ in_(stmt.c.a, row)
+ in_(stmt.c.b, row)
+
+ def test_columnclause_schema_column_four(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ a, b = sql.column('keyed2_a'), sql.column('keyed2_b')
+ stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
+ a, b)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+ in_(stmt.c.keyed2_a, row)
+ in_(stmt.c.keyed2_b, row)
+
+ def test_columnclause_schema_column_five(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
+ keyed2_a=CHAR, keyed2_b=CHAR)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(stmt.c.keyed2_a, row)
+ in_(stmt.c.keyed2_b, row)
diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py
index cd9f632b9..8c189a0dd 100644
--- a/test/sql/test_returning.py
+++ b/test/sql/test_returning.py
@@ -387,6 +387,33 @@ class ReturnDefaultsTest(fixtures.TablesTest):
{"data": None, 'upddef': 1}
)
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
+ def test_insert_all(self):
+ t1 = self.tables.t1
+ result = testing.db.execute(
+ t1.insert().values(upddef=1).return_defaults()
+ )
+ eq_(
+ dict(result.returned_defaults),
+ {"id": 1, "data": None, "insdef": 0}
+ )
+
+ @testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
+ def test_update_all(self):
+ t1 = self.tables.t1
+ testing.db.execute(
+ t1.insert().values(upddef=1)
+ )
+ result = testing.db.execute(
+ t1.update().
+ values(insdef=2).return_defaults()
+ )
+ eq_(
+ dict(result.returned_defaults),
+ {'upddef': 1}
+ )
+
+
class ImplicitReturningFlag(fixtures.TestBase):
__backend__ = True
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index 3390f4a77..b9cbbf480 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -458,6 +458,26 @@ class SelectableTest(
assert u1.corresponding_column(table2.c.col1) is u1.c._all_columns[0]
assert u1.corresponding_column(table2.c.col3) is u1.c._all_columns[2]
+ @testing.emits_warning("Column 'col1'")
+ def test_union_alias_dupe_keys_grouped(self):
+ s1 = select([table1.c.col1, table1.c.col2, table2.c.col1]).\
+ limit(1).alias()
+ s2 = select([table2.c.col1, table2.c.col2, table2.c.col3]).limit(1)
+ u1 = union(s1, s2)
+
+ assert u1.corresponding_column(
+ s1.c._all_columns[0]) is u1.c._all_columns[0]
+ assert u1.corresponding_column(s2.c.col1) is u1.c._all_columns[0]
+ assert u1.corresponding_column(s1.c.col2) is u1.c.col2
+ assert u1.corresponding_column(s2.c.col2) is u1.c.col2
+
+ assert u1.corresponding_column(s2.c.col3) is u1.c._all_columns[2]
+
+ # this differs from the non-alias test because table2.c.col1 is
+ # more directly at s2.c.col1 than it is s1.c.col1.
+ assert u1.corresponding_column(table2.c.col1) is u1.c._all_columns[0]
+ assert u1.corresponding_column(table2.c.col3) is u1.c._all_columns[2]
+
def test_select_union(self):
# like testaliasunion, but off a Select off the union.
@@ -912,10 +932,10 @@ class AnonLabelTest(fixtures.TestBase):
c1 = func.count('*')
assert c1.label(None) is not c1
- eq_(str(select([c1])), "SELECT count(:param_1) AS count_1")
+ eq_(str(select([c1])), "SELECT count(:count_2) AS count_1")
c2 = select([c1]).compile()
- eq_(str(select([c1.label(None)])), "SELECT count(:param_1) AS count_1")
+ eq_(str(select([c1.label(None)])), "SELECT count(:count_2) AS count_1")
def test_named_labels_named_column(self):
c1 = column('x')
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 2545dec59..f1fb611fb 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -1,5 +1,6 @@
# coding: utf-8
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, expect_warnings
+from sqlalchemy.testing import eq_, is_, assert_raises, \
+ assert_raises_message, expect_warnings
import decimal
import datetime
import os
@@ -9,9 +10,10 @@ from sqlalchemy import (
and_, func, Date, LargeBinary, literal, cast, text, Enum,
type_coerce, VARCHAR, Time, DateTime, BigInteger, SmallInteger, BOOLEAN,
BLOB, NCHAR, NVARCHAR, CLOB, TIME, DATE, DATETIME, TIMESTAMP, SMALLINT,
- INTEGER, DECIMAL, NUMERIC, FLOAT, REAL)
+ INTEGER, DECIMAL, NUMERIC, FLOAT, REAL, Array)
from sqlalchemy.sql import ddl
-
+from sqlalchemy.sql import visitors
+from sqlalchemy import inspection
from sqlalchemy import exc, types, util, dialects
for name in dialects.__all__:
__import__("sqlalchemy.dialects.%s" % name)
@@ -25,6 +27,7 @@ from sqlalchemy.testing import AssertsCompiledSQL, AssertsExecutionResults, \
from sqlalchemy.testing.util import picklers
from sqlalchemy.testing.util import round_decimal
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
class AdaptTest(fixtures.TestBase):
@@ -137,7 +140,7 @@ class AdaptTest(fixtures.TestBase):
for is_down_adaption, typ, target_adaptions in adaptions():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif typ is dialects.postgresql.ARRAY:
+ elif issubclass(typ, Array):
t1 = typ(String)
else:
t1 = typ()
@@ -187,12 +190,28 @@ class AdaptTest(fixtures.TestBase):
for typ in self._all_types():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif typ is dialects.postgresql.ARRAY:
+ elif issubclass(typ, Array):
t1 = typ(String)
else:
t1 = typ()
repr(t1)
+ def test_adapt_constructor_copy_override_kw(self):
+ """test that adapt() can accept kw args that override
+ the state of the original object.
+
+ This essentially is testing the behavior of util.constructor_copy().
+
+ """
+ t1 = String(length=50, convert_unicode=False)
+ t2 = t1.adapt(Text, convert_unicode=True)
+ eq_(
+ t2.length, 50
+ )
+ eq_(
+ t2.convert_unicode, True
+ )
+
class TypeAffinityTest(fixtures.TestBase):
@@ -771,6 +790,68 @@ class TypeCoerceCastTest(fixtures.TablesTest):
[('BIND_INd1', 'BIND_INd1BIND_OUT')]
)
+ def test_cast_replace_col_w_bind(self):
+ self._test_replace_col_w_bind(cast)
+
+ def test_type_coerce_replace_col_w_bind(self):
+ self._test_replace_col_w_bind(type_coerce)
+
+ def _test_replace_col_w_bind(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ stmt = select([t.c.data, coerce_fn(t.c.data, MyType)])
+
+ def col_to_bind(col):
+ if col is t.c.data:
+ return bindparam(None, "x", type_=col.type, unique=True)
+ return None
+
+ # ensure we evaulate the expression so that we can see
+ # the clone resets this info
+ stmt.compile()
+
+ new_stmt = visitors.replacement_traverse(stmt, {}, col_to_bind)
+
+ # original statement
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ # replaced with binds; CAST can't affect the bound parameter
+ # on the way in here
+ eq_(
+ testing.db.execute(new_stmt).fetchall(),
+ [('x', 'BIND_INxBIND_OUT')] if coerce_fn is type_coerce
+ else [('x', 'xBIND_OUT')]
+ )
+
+ def test_cast_bind(self):
+ self._test_bind(cast)
+
+ def test_type_bind(self):
+ self._test_bind(type_coerce)
+
+ def _test_bind(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ stmt = select([
+ bindparam(None, "x", String(50), unique=True),
+ coerce_fn(bindparam(None, "x", String(50), unique=True), MyType)
+ ])
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [('x', 'BIND_INxBIND_OUT')] if coerce_fn is type_coerce
+ else [('x', 'xBIND_OUT')]
+ )
+
@testing.fails_on(
"oracle", "ORA-00906: missing left parenthesis - "
"seems to be CAST(:param AS type)")
@@ -804,6 +885,7 @@ class TypeCoerceCastTest(fixtures.TablesTest):
[('BIND_INd1BIND_OUT', )])
+
class VariantTest(fixtures.TestBase, AssertsCompiledSQL):
def setup(self):
@@ -1160,16 +1242,13 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
def __init__(self, name):
self.name = name
- class MyEnum(types.SchemaType, TypeDecorator):
+ class MyEnum(TypeDecorator):
def __init__(self, values):
self.impl = Enum(
*[v.name for v in values], name="myenum",
native_enum=False)
- def _set_table(self, table, column):
- self.impl._set_table(table, column)
-
# future method
def process_literal_param(self, value, dialect):
return value.name
@@ -1326,6 +1405,68 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
with open(f, mode='rb') as o:
return o.read()
+
+class ArrayTest(fixtures.TestBase):
+
+ def _myarray_fixture(self):
+ class MyArray(Array):
+ pass
+ return MyArray
+
+ def test_array_index_map_dimensions(self):
+ col = column('x', Array(Integer, dimensions=3))
+ is_(
+ col[5].type._type_affinity, Array
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, Array
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_array_getitem_single_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', Array(Integer)),
+ Column('strarr', Array(String)),
+ )
+ is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+ is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+ def test_array_getitem_slice_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', Array(Integer)),
+ Column('strarr', Array(String)),
+ )
+ is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+
+ def test_array_getitem_slice_type_dialect_level(self):
+ MyArray = self._myarray_fixture()
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', MyArray(Integer)),
+ Column('strarr', MyArray(String)),
+ )
+ is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+
+ # but the slice returns the actual type
+ assert isinstance(arrtable.c.intarr[1:3].type, MyArray)
+ assert isinstance(arrtable.c.strarr[1:3].type, MyArray)
+
+
test_table = meta = MyCustomType = MyTypeDec = None
@@ -1631,6 +1772,34 @@ class ExpressionTest(
assert distinct(test_table.c.data).type == test_table.c.data.type
assert test_table.c.data.distinct().type == test_table.c.data.type
+ def test_detect_coercion_of_builtins(self):
+ @inspection._self_inspects
+ class SomeSQLAThing(object):
+ def __repr__(self):
+ return "some_sqla_thing()"
+
+ class SomeOtherThing(object):
+ pass
+
+ assert_raises_message(
+ exc.ArgumentError,
+ r"Object some_sqla_thing\(\) is not legal as a SQL literal value",
+ lambda: column('a', String) == SomeSQLAThing()
+ )
+
+ is_(
+ bindparam('x', SomeOtherThing()).type,
+ types.NULLTYPE
+ )
+
+ def test_detect_coercion_not_fooled_by_mock(self):
+ m1 = mock.Mock()
+ is_(
+ bindparam('x', m1).type,
+ types.NULLTYPE
+ )
+
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1899,12 +2068,9 @@ class BooleanTest(
def __init__(self, value):
self.value = value
- class MyBool(types.SchemaType, TypeDecorator):
+ class MyBool(TypeDecorator):
impl = Boolean()
- def _set_table(self, table, column):
- self.impl._set_table(table, column)
-
# future method
def process_literal_param(self, value, dialect):
return value.value
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index 58c86613b..3ab580b11 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -4,6 +4,7 @@ from sqlalchemy.dialects import mysql
from sqlalchemy.engine import default
from sqlalchemy.testing import AssertsCompiledSQL, eq_, fixtures
from sqlalchemy.testing.schema import Table, Column
+from sqlalchemy import util
class _UpdateFromTestBase(object):
@@ -32,6 +33,11 @@ class _UpdateFromTestBase(object):
test_needs_autoincrement=True),
Column('address_id', None, ForeignKey('addresses.id')),
Column('data', String(30)))
+ Table('update_w_default', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('ycol', Integer, key='y'),
+ Column('data', String(30), onupdate=lambda: "hi"))
@classmethod
def fixtures(cls):
@@ -165,6 +171,124 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
table1.c.name: table1.c.name + 'lala',
table1.c.myid: func.do_stuff(table1.c.myid, literal('hoho'))
}
+
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4)) & (
+ table1.c.name == literal('foo') +
+ table1.c.name +
+ literal('lala')),
+ values=values),
+ 'UPDATE mytable '
+ 'SET '
+ 'myid=do_stuff(mytable.myid, :param_1), '
+ 'name=(mytable.name || :name_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_update_ordered_parameters_1(self):
+ table1 = self.tables.mytable
+
+ # Confirm that we can pass values as list value pairs
+ # note these are ordered *differently* from table.c
+ values = [
+ (table1.c.name, table1.c.name + 'lala'),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho'))),
+ ]
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4)) & (
+ table1.c.name == literal('foo') +
+ table1.c.name +
+ literal('lala')),
+ preserve_parameter_order=True,
+ values=values),
+ 'UPDATE mytable '
+ 'SET '
+ 'name=(mytable.name || :name_1), '
+ 'myid=do_stuff(mytable.myid, :param_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_update_ordered_parameters_2(self):
+ table1 = self.tables.mytable
+
+ # Confirm that we can pass values as list value pairs
+ # note these are ordered *differently* from table.c
+ values = [
+ (table1.c.name, table1.c.name + 'lala'),
+ ('description', 'some desc'),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho')))
+ ]
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4)) & (
+ table1.c.name == literal('foo') +
+ table1.c.name +
+ literal('lala')),
+ preserve_parameter_order=True).values(values),
+ 'UPDATE mytable '
+ 'SET '
+ 'name=(mytable.name || :name_1), '
+ 'description=:description, '
+ 'myid=do_stuff(mytable.myid, :param_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_update_ordered_parameters_fire_onupdate(self):
+ table = self.tables.update_w_default
+
+ values = [
+ (table.c.y, table.c.x + 5),
+ ('x', 10)
+ ]
+
+ self.assert_compile(
+ table.update(preserve_parameter_order=True).values(values),
+ "UPDATE update_w_default SET ycol=(update_w_default.x + :x_1), "
+ "x=:x, data=:data"
+ )
+
+ def test_update_ordered_parameters_override_onupdate(self):
+ table = self.tables.update_w_default
+
+ values = [
+ (table.c.y, table.c.x + 5),
+ (table.c.data, table.c.x + 10),
+ ('x', 10)
+ ]
+
+ self.assert_compile(
+ table.update(preserve_parameter_order=True).values(values),
+ "UPDATE update_w_default SET ycol=(update_w_default.x + :x_1), "
+ "data=(update_w_default.x + :x_2), x=:x"
+ )
+
+ def test_update_preserve_order_reqs_listtups(self):
+ table1 = self.tables.mytable
+ testing.assert_raises_message(
+ ValueError,
+ "When preserve_parameter_order is True, values\(\) "
+ "only accepts a list of 2-tuples",
+ table1.update(preserve_parameter_order=True).values,
+ {"description": "foo", "name": "bar"}
+ )
+
+ def test_update_ordereddict(self):
+ table1 = self.tables.mytable
+
+ # Confirm that ordered dicts are treated as normal dicts,
+ # columns sorted in table order
+ values = util.OrderedDict((
+ (table1.c.name, table1.c.name + 'lala'),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho')))))
+
self.assert_compile(
update(
table1,