summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorScott Dugas <scott.dugas@foundationdb.com>2014-11-03 14:54:51 -0500
committerScott Dugas <scott.dugas@foundationdb.com>2014-11-03 14:54:51 -0500
commitb31ab006897d2709442f9745faf0cac6e0de1713 (patch)
treea6b428e9ca7f1f67c5193581ecd82a83632eeb79 /test
parentebb9d57cb385f49becbf54c6f78647715ddd1c29 (diff)
parent7bf5ac9c1e814c999d4930941935e1d5cfd236bf (diff)
downloadsqlalchemy-b31ab006897d2709442f9745faf0cac6e0de1713.tar.gz
Merge branch 'master' into fdbsql-tests
Conflicts: lib/sqlalchemy/testing/exclusions.py
Diffstat (limited to 'test')
-rw-r--r--test/base/test_events.py165
-rw-r--r--test/base/test_except.py123
-rw-r--r--test/base/test_utils.py39
-rwxr-xr-xtest/conftest.py20
-rw-r--r--test/dialect/mssql/test_engine.py20
-rw-r--r--test/dialect/mysql/test_reflection.py32
-rw-r--r--test/dialect/mysql/test_types.py6
-rw-r--r--test/dialect/postgresql/test_reflection.py180
-rw-r--r--test/dialect/test_oracle.py39
-rw-r--r--test/dialect/test_sqlite.py173
-rw-r--r--test/engine/test_execute.py94
-rw-r--r--test/engine/test_logging.py8
-rw-r--r--test/engine/test_reconnect.py50
-rw-r--r--test/engine/test_transaction.py218
-rw-r--r--test/ext/declarative/test_basic.py408
-rw-r--r--test/ext/declarative/test_clsregistry.py5
-rw-r--r--test/ext/declarative/test_inheritance.py394
-rw-r--r--test/ext/declarative/test_mixin.py285
-rw-r--r--test/ext/declarative/test_reflection.py193
-rw-r--r--test/ext/test_automap.py167
-rw-r--r--test/ext/test_orderinglist.py22
-rw-r--r--test/orm/inheritance/test_single.py206
-rw-r--r--test/orm/test_assorted_eager.py4
-rw-r--r--test/orm/test_attributes.py50
-rw-r--r--test/orm/test_bind.py413
-rw-r--r--test/orm/test_cascade.py8
-rw-r--r--test/orm/test_collection.py17
-rw-r--r--test/orm/test_eager_relations.py5
-rw-r--r--test/orm/test_events.py18
-rw-r--r--test/orm/test_joins.py39
-rw-r--r--test/orm/test_query.py44
-rw-r--r--test/orm/test_rel_fn.py20
-rw-r--r--test/orm/test_relationships.py2094
-rw-r--r--test/orm/test_session.py253
-rw-r--r--test/orm/test_update_delete.py183
-rw-r--r--test/profiles.txt146
-rw-r--r--test/requirements.py29
-rw-r--r--test/sql/test_compiler.py54
-rw-r--r--test/sql/test_defaults.py65
-rw-r--r--test/sql/test_functions.py112
-rw-r--r--test/sql/test_generative.py5
-rw-r--r--test/sql/test_insert.py82
-rw-r--r--test/sql/test_metadata.py86
-rw-r--r--test/sql/test_operators.py21
-rw-r--r--test/sql/test_query.py28
-rw-r--r--test/sql/test_selectable.py24
46 files changed, 4594 insertions, 2053 deletions
diff --git a/test/base/test_events.py b/test/base/test_events.py
index 30b728cd3..89379961e 100644
--- a/test/base/test_events.py
+++ b/test/base/test_events.py
@@ -192,7 +192,7 @@ class EventsTest(fixtures.TestBase):
class NamedCallTest(fixtures.TestBase):
- def setUp(self):
+ def _fixture(self):
class TargetEventsOne(event.Events):
def event_one(self, x, y):
pass
@@ -205,48 +205,104 @@ class NamedCallTest(fixtures.TestBase):
class TargetOne(object):
dispatch = event.dispatcher(TargetEventsOne)
- self.TargetOne = TargetOne
+ return TargetOne
- def tearDown(self):
- event.base._remove_dispatcher(
- self.TargetOne.__dict__['dispatch'].events)
+ def _wrapped_fixture(self):
+ class TargetEvents(event.Events):
+ @classmethod
+ def _listen(cls, event_key):
+ fn = event_key._listen_fn
+
+ def adapt(*args):
+ fn(*["adapted %s" % arg for arg in args])
+ event_key = event_key.with_wrapper(adapt)
+
+ event_key.base_listen()
+
+ def event_one(self, x, y):
+ pass
+
+ def event_five(self, x, y, z, q):
+ pass
+
+ class Target(object):
+ dispatch = event.dispatcher(TargetEvents)
+ return Target
def test_kw_accept(self):
+ TargetOne = self._fixture()
+
canary = Mock()
- @event.listens_for(self.TargetOne, "event_one", named=True)
+ @event.listens_for(TargetOne, "event_one", named=True)
def handler1(**kw):
canary(kw)
- self.TargetOne().dispatch.event_one(4, 5)
+ TargetOne().dispatch.event_one(4, 5)
eq_(
canary.mock_calls,
[call({"x": 4, "y": 5})]
)
+ def test_kw_accept_wrapped(self):
+ TargetOne = self._wrapped_fixture()
+
+ canary = Mock()
+
+ @event.listens_for(TargetOne, "event_one", named=True)
+ def handler1(**kw):
+ canary(kw)
+
+ TargetOne().dispatch.event_one(4, 5)
+
+ eq_(
+ canary.mock_calls,
+ [call({'y': 'adapted 5', 'x': 'adapted 4'})]
+ )
+
def test_partial_kw_accept(self):
+ TargetOne = self._fixture()
+
canary = Mock()
- @event.listens_for(self.TargetOne, "event_five", named=True)
+ @event.listens_for(TargetOne, "event_five", named=True)
def handler1(z, y, **kw):
canary(z, y, kw)
- self.TargetOne().dispatch.event_five(4, 5, 6, 7)
+ TargetOne().dispatch.event_five(4, 5, 6, 7)
eq_(
canary.mock_calls,
[call(6, 5, {"x": 4, "q": 7})]
)
+ def test_partial_kw_accept_wrapped(self):
+ TargetOne = self._wrapped_fixture()
+
+ canary = Mock()
+
+ @event.listens_for(TargetOne, "event_five", named=True)
+ def handler1(z, y, **kw):
+ canary(z, y, kw)
+
+ TargetOne().dispatch.event_five(4, 5, 6, 7)
+
+ eq_(
+ canary.mock_calls,
+ [call('adapted 6', 'adapted 5',
+ {'q': 'adapted 7', 'x': 'adapted 4'})]
+ )
+
def test_kw_accept_plus_kw(self):
+ TargetOne = self._fixture()
canary = Mock()
- @event.listens_for(self.TargetOne, "event_two", named=True)
+ @event.listens_for(TargetOne, "event_two", named=True)
def handler1(**kw):
canary(kw)
- self.TargetOne().dispatch.event_two(4, 5, z=8, q=5)
+ TargetOne().dispatch.event_two(4, 5, z=8, q=5)
eq_(
canary.mock_calls,
@@ -996,6 +1052,25 @@ class RemovalTest(fixtures.TestBase):
dispatch = event.dispatcher(TargetEvents)
return Target
+ def _wrapped_fixture(self):
+ class TargetEvents(event.Events):
+ @classmethod
+ def _listen(cls, event_key):
+ fn = event_key._listen_fn
+
+ def adapt(value):
+ fn("adapted " + value)
+ event_key = event_key.with_wrapper(adapt)
+
+ event_key.base_listen()
+
+ def event_one(self, x):
+ pass
+
+ class Target(object):
+ dispatch = event.dispatcher(TargetEvents)
+ return Target
+
def test_clslevel(self):
Target = self._fixture()
@@ -1194,3 +1269,71 @@ class RemovalTest(fixtures.TestBase):
"deque mutated during iteration",
t1.dispatch.event_one
)
+
+ def test_remove_plain_named(self):
+ Target = self._fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+ event.listen(t1, "event_one", listen_one, named=True)
+ t1.dispatch.event_one("t1")
+
+ eq_(listen_one.mock_calls, [call(x="t1")])
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call(x="t1")])
+
+ def test_remove_wrapped_named(self):
+ Target = self._wrapped_fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+ event.listen(t1, "event_one", listen_one, named=True)
+ t1.dispatch.event_one("t1")
+
+ eq_(listen_one.mock_calls, [call(x="adapted t1")])
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call(x="adapted t1")])
+
+ def test_double_event_nonwrapped(self):
+ Target = self._fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+ event.listen(t1, "event_one", listen_one)
+ event.listen(t1, "event_one", listen_one)
+
+ t1.dispatch.event_one("t1")
+
+ # doubles are eliminated
+ eq_(listen_one.mock_calls, [call("t1")])
+
+ # only one remove needed
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call("t1")])
+
+ def test_double_event_wrapped(self):
+ # this is issue #3199
+ Target = self._wrapped_fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+
+ event.listen(t1, "event_one", listen_one)
+ event.listen(t1, "event_one", listen_one)
+
+ t1.dispatch.event_one("t1")
+
+ # doubles are eliminated
+ eq_(listen_one.mock_calls, [call("adapted t1")])
+
+ # only one remove needed
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call("adapted t1")])
diff --git a/test/base/test_except.py b/test/base/test_except.py
index a62382725..918e7a042 100644
--- a/test/base/test_except.py
+++ b/test/base/test_except.py
@@ -2,19 +2,12 @@
from sqlalchemy import exc as sa_exceptions
-from sqlalchemy import util
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import eq_
-if util.py2k:
- from exceptions import StandardError, KeyboardInterrupt, SystemExit
-else:
- Exception = BaseException
-
class Error(Exception):
- """This class will be old-style on <= 2.4 and new-style on >=
- 2.5."""
+ pass
class DatabaseError(Error):
@@ -26,6 +19,7 @@ class OperationalError(DatabaseError):
class ProgrammingError(DatabaseError):
+
def __str__(self):
return '<%s>' % self.bogus
@@ -38,89 +32,110 @@ class WrapTest(fixtures.TestBase):
def test_db_error_normal(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(True)
def test_tostring(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- , None, OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ None, OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc) \
- == "(OperationalError) 'this is a message' None"
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message']")
def test_tostring_large_dict(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- ,
- {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
- 8, 'i': 9, 'j': 10, 'k': 11,
- }, OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ {
+ 'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7,
+ 'h': 8, 'i': 9, 'j': 10, 'k': 11
+ },
+ OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' {")
+ assert str(exc).startswith(
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: {")
def test_tostring_large_list(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]")
+ assert str(exc).startswith(
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: "
+ "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]]")
def test_tostring_large_executemany(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
[{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1: 1}, {1:1}, {1: 1}, {1: 1},],
+ {1: 1}, {1: 1}, {1: 1}, {1: 1}, ],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "\
- "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "\
- "1}, {1: 1}, {1: 1}]")
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [{1: 1}, "
+ "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "
+ "1}, {1: 1}, {1: 1}]]"
+ )
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1:1}, {1: 1}, {1: 1}, {1: 1},
- ], OperationalError(), DatabaseError)
+ {1: 1}, {1: 1}, {1: 1}, {1: 1},
+ ], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "
+ eq_(str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [{1: 1}, "
"{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, "
"{1: 1}, {1: 1} ... displaying 10 of 11 total "
- "bound parameter sets ... {1: 1}, {1: 1}]"
- )
+ "bound parameter sets ... {1: 1}, {1: 1}]]"
+ )
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
[
- (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
- (1, ),
+ (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
+ (1, ), (1, ), (1, ), (1, ),
], OperationalError(), DatabaseError)
+
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "\
- "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]")
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [(1,), "
+ "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]]")
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
(1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
(1, ), (1, ),
- ], OperationalError(), DatabaseError)
+ ], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [(1,), "
"(1,), (1,), (1,), (1,), (1,), (1,), (1,) "
"... displaying 10 of 11 total bound "
- "parameter sets ... (1,), (1,)]"
- )
+ "parameter sets ... (1,), (1,)]]"
+ )
def test_db_error_busted_dbapi(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- ProgrammingError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ ProgrammingError(), DatabaseError)
except sa_exceptions.DBAPIError as e:
self.assert_(True)
self.assert_('Error in str() of DB-API' in e.args[0])
@@ -147,8 +162,9 @@ class WrapTest(fixtures.TestBase):
def test_db_error_keyboard_interrupt(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- KeyboardInterrupt(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ KeyboardInterrupt(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(False)
except KeyboardInterrupt:
@@ -156,8 +172,9 @@ class WrapTest(fixtures.TestBase):
def test_db_error_system_exit(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- SystemExit(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ SystemExit(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(False)
except SystemExit:
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index a378b0160..f75c5cbe9 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -6,7 +6,7 @@ from sqlalchemy.testing import eq_, is_, ne_, fails_if
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
-
+from sqlalchemy.util import langhelpers
class _KeyedTupleTest(object):
@@ -1274,6 +1274,43 @@ class DuckTypeCollectionTest(fixtures.TestBase):
is_(util.duck_type_collection(instance), None)
+class PublicFactoryTest(fixtures.TestBase):
+
+ def _fixture(self):
+ class Thingy(object):
+ def __init__(self, value):
+ "make a thingy"
+ self.value = value
+
+ @classmethod
+ def foobar(cls, x, y):
+ "do the foobar"
+ return Thingy(x + y)
+
+ return Thingy
+
+ def test_classmethod(self):
+ Thingy = self._fixture()
+ foob = langhelpers.public_factory(
+ Thingy.foobar, ".sql.elements.foob")
+ eq_(foob(3, 4).value, 7)
+ eq_(foob(x=3, y=4).value, 7)
+ eq_(foob.__doc__, "do the foobar")
+ eq_(foob.__module__, "sqlalchemy.sql.elements")
+ assert Thingy.foobar.__doc__.startswith("This function is mirrored;")
+
+ def test_constructor(self):
+ Thingy = self._fixture()
+ foob = langhelpers.public_factory(
+ Thingy, ".sql.elements.foob")
+ eq_(foob(7).value, 7)
+ eq_(foob(value=7).value, 7)
+ eq_(foob.__doc__, "make a thingy")
+ eq_(foob.__module__, "sqlalchemy.sql.elements")
+ assert Thingy.__init__.__doc__.startswith(
+ "Construct a new :class:`.Thingy` object.")
+
+
class ArgInspectionTest(fixtures.TestBase):
def test_get_cls_kwargs(self):
diff --git a/test/conftest.py b/test/conftest.py
index 1dd442309..c697085ee 100755
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -7,9 +7,23 @@ installs SQLAlchemy's testing plugin into the local environment.
"""
import sys
+import os
-from os import path
for pth in ['../lib']:
- sys.path.insert(0, path.join(path.dirname(path.abspath(__file__)), pth))
+ sys.path.insert(
+ 0,
+ os.path.join(os.path.dirname(os.path.abspath(__file__)), pth))
-from sqlalchemy.testing.plugin.pytestplugin import *
+
+# use bootstrapping so that test plugins are loaded
+# without touching the main library before coverage starts
+bootstrap_file = os.path.join(
+ os.path.dirname(__file__), "..", "lib", "sqlalchemy",
+ "testing", "plugin", "bootstrap.py"
+)
+
+with open(bootstrap_file) as f:
+ code = compile(f.read(), "bootstrap.py", 'exec')
+ to_bootstrap = "pytest"
+ exec(code, globals(), locals())
+ from pytestplugin import * # noqa
diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py
index 8ac9c6c16..4b4780d43 100644
--- a/test/dialect/mssql/test_engine.py
+++ b/test/dialect/mssql/test_engine.py
@@ -7,6 +7,8 @@ from sqlalchemy.engine import url
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message, assert_warnings
+from sqlalchemy.testing.mock import Mock
+
class ParseConnectTest(fixtures.TestBase):
@@ -167,3 +169,21 @@ class ParseConnectTest(fixtures.TestBase):
assert_raises_message(exc.SAWarning,
'Unrecognized server version info',
engine.connect)
+
+
+class VersionDetectionTest(fixtures.TestBase):
+ def test_pymssql_version(self):
+ dialect = pymssql.MSDialect_pymssql()
+
+ for vers in [
+ "Microsoft SQL Server Blah - 11.0.9216.62",
+ "Microsoft SQL Server (XYZ) - 11.0.9216.62 \n"
+ "Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation",
+ "Microsoft SQL Azure (RTM) - 11.0.9216.62 \n"
+ "Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation"
+ ]:
+ conn = Mock(scalar=Mock(return_value=vers))
+ eq_(
+ dialect._get_server_version_info(conn),
+ (11, 0, 9216, 62)
+ ) \ No newline at end of file
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index bf35a2c6b..99733e397 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -283,6 +283,38 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
view_names = dialect.get_view_names(connection, "information_schema")
self.assert_('TABLES' in view_names)
+ @testing.provide_metadata
+ def test_reflection_with_unique_constraint(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+ uc_table = Table('mysql_uc', meta,
+ Column('a', String(10)),
+ UniqueConstraint('a', name='uc_a'))
+
+ uc_table.create()
+
+ # MySQL converts unique constraints into unique indexes.
+ # separately we get both
+ indexes = dict((i['name'], i) for i in insp.get_indexes('mysql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('mysql_uc'))
+
+ self.assert_('uc_a' in indexes)
+ self.assert_(indexes['uc_a']['unique'])
+ self.assert_('uc_a' in constraints)
+
+ # reflection here favors the unique index, as that's the
+ # more "official" MySQL construct
+ reflected = Table('mysql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = dict((i.name, i) for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('uc_a' in indexes)
+ self.assert_(indexes['uc_a'].unique)
+ self.assert_('uc_a' not in constraints)
+
class RawReflectionTest(fixtures.TestBase):
def setup(self):
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 75dbe15e0..e65acc6db 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -154,10 +154,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
res
)
- @testing.fails_if(
- lambda: testing.against("mysql+mysqlconnector")
- and not util.py3k,
- "bug in mysqlconnector; http://bugs.mysql.com/bug.php?id=73266")
+ # fixed in mysql-connector as of 2.0.1,
+ # see http://bugs.mysql.com/bug.php?id=73266
@testing.provide_metadata
def test_precision_float_roundtrip(self):
t = Table('t', self.metadata,
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index bab41b0f7..8de71216e 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -7,14 +7,130 @@ from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy import inspect
from sqlalchemy import Table, Column, MetaData, Integer, String, \
- PrimaryKeyConstraint, ForeignKey, join, Sequence
+ PrimaryKeyConstraint, ForeignKey, join, Sequence, UniqueConstraint, \
+ Index
from sqlalchemy import exc
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import base as postgresql
-class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
+class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
+ """Test reflection on foreign tables"""
+
+ __requires__ = 'postgresql_test_dblink',
+ __only_on__ = 'postgresql >= 9.3'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy.testing import config
+ dblink = config.file_config.get(
+ 'sqla_testing', 'postgres_test_db_link')
+
+ testtable = Table(
+ 'testtable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)))
+
+ for ddl in [
+ "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw "
+ "OPTIONS (dbname 'test', host '%s')" % dblink,
+ "CREATE USER MAPPING FOR public \
+ SERVER test_server options (user 'scott', password 'tiger')",
+ "CREATE FOREIGN TABLE test_foreigntable ( "
+ " id INT, "
+ " data VARCHAR(30) "
+ ") SERVER test_server OPTIONS (table_name 'testtable')",
+ ]:
+ sa.event.listen(metadata, "after_create", sa.DDL(ddl))
+
+ for ddl in [
+ 'DROP FOREIGN TABLE test_foreigntable',
+ 'DROP USER MAPPING FOR public SERVER test_server',
+ "DROP SERVER test_server"
+ ]:
+ sa.event.listen(metadata, "before_drop", sa.DDL(ddl))
+
+ def test_foreign_table_is_reflected(self):
+ metadata = MetaData(testing.db)
+ table = Table('test_foreigntable', metadata, autoload=True)
+ eq_(set(table.columns.keys()), set(['id', 'data']),
+ "Columns of reflected foreign table didn't equal expected columns")
+
+ def test_get_foreign_table_names(self):
+ inspector = inspect(testing.db)
+ with testing.db.connect() as conn:
+ ft_names = inspector.get_foreign_table_names()
+ eq_(ft_names, ['test_foreigntable'])
+
+ def test_get_table_names_no_foreign(self):
+ inspector = inspect(testing.db)
+ with testing.db.connect() as conn:
+ names = inspector.get_table_names()
+ eq_(names, ['testtable'])
+
+
+class MaterialiedViewReflectionTest(
+ fixtures.TablesTest, AssertsExecutionResults):
+ """Test reflection on materialized views"""
+
+ __only_on__ = 'postgresql >= 9.3'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ testtable = Table(
+ 'testtable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)))
+
+ # insert data before we create the view
+ @sa.event.listens_for(testtable, "after_create")
+ def insert_data(target, connection, **kw):
+ connection.execute(
+ target.insert(),
+ {"id": 89, "data": 'd1'}
+ )
+
+ materialized_view = sa.DDL(
+ "CREATE MATERIALIZED VIEW test_mview AS "
+ "SELECT * FROM testtable")
+
+ plain_view = sa.DDL(
+ "CREATE VIEW test_regview AS "
+ "SELECT * FROM testtable")
+
+ sa.event.listen(testtable, 'after_create', plain_view)
+ sa.event.listen(testtable, 'after_create', materialized_view)
+ sa.event.listen(
+ testtable, 'before_drop',
+ sa.DDL("DROP MATERIALIZED VIEW test_mview")
+ )
+ sa.event.listen(
+ testtable, 'before_drop',
+ sa.DDL("DROP VIEW test_regview")
+ )
+ def test_mview_is_reflected(self):
+ metadata = MetaData(testing.db)
+ table = Table('test_mview', metadata, autoload=True)
+ eq_(set(table.columns.keys()), set(['id', 'data']),
+ "Columns of reflected mview didn't equal expected columns")
+
+ def test_mview_select(self):
+ metadata = MetaData(testing.db)
+ table = Table('test_mview', metadata, autoload=True)
+ eq_(
+ table.select().execute().fetchall(),
+ [(89, 'd1',)]
+ )
+
+ def test_get_view_names(self):
+ insp = inspect(testing.db)
+ eq_(set(insp.get_view_names()), set(['test_mview', 'test_regview']))
+
+
+class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
"""Test PostgreSQL domains"""
__only_on__ = 'postgresql > 8.3'
@@ -688,6 +804,66 @@ class ReflectionTest(fixtures.TestBase):
'labels': ['sad', 'ok', 'happy']
}])
+ @testing.provide_metadata
+ def test_reflection_with_unique_constraint(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+ uc_table = Table('pgsql_uc', meta,
+ Column('a', String(10)),
+ UniqueConstraint('a', name='uc_a'))
+
+ uc_table.create()
+
+ # PostgreSQL will create an implicit index for a unique
+ # constraint. Separately we get both
+ indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('pgsql_uc'))
+
+ self.assert_('uc_a' in indexes)
+ self.assert_('uc_a' in constraints)
+
+ # reflection corrects for the dupe
+ reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = set(i.name for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('uc_a' not in indexes)
+ self.assert_('uc_a' in constraints)
+
+ @testing.provide_metadata
+ def test_reflect_unique_index(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+
+ # a unique index OTOH we are able to detect is an index
+ # and not a unique constraint
+ uc_table = Table('pgsql_uc', meta,
+ Column('a', String(10)),
+ Index('ix_a', 'a', unique=True))
+
+ uc_table.create()
+
+ indexes = dict((i['name'], i) for i in insp.get_indexes('pgsql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('pgsql_uc'))
+
+ self.assert_('ix_a' in indexes)
+ assert indexes['ix_a']['unique']
+ self.assert_('ix_a' not in constraints)
+
+ reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = dict((i.name, i) for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('ix_a' in indexes)
+ assert indexes['ix_a'].unique
+ self.assert_('ix_a' not in constraints)
+
class CustomTypeReflectionTest(fixtures.TestBase):
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 187042036..a771c5d80 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -104,6 +104,28 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
(2, 2, 2)
)
+ def test_numeric_bind_round_trip(self):
+ eq_(
+ testing.db.scalar(
+ select([
+ literal_column("2", type_=Integer()) +
+ bindparam("2_1", value=2)])
+ ),
+ 4
+ )
+
+ @testing.provide_metadata
+ def test_numeric_bind_in_crud(self):
+ t = Table(
+ "asfd", self.metadata,
+ Column("100K", Integer)
+ )
+ t.create()
+
+ testing.db.execute(t.insert(), {"100K": 10})
+ eq_(
+ testing.db.scalar(t.select()), 10
+ )
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "oracle" #oracle.dialect()
@@ -648,6 +670,23 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE INDEX bar ON foo (x > 5)"
)
+ def test_table_options(self):
+ m = MetaData()
+
+ t = Table(
+ 'foo', m,
+ Column('x', Integer),
+ prefixes=["GLOBAL TEMPORARY"],
+ oracle_on_commit="PRESERVE ROWS"
+ )
+
+ self.assert_compile(
+ schema.CreateTable(t),
+ "CREATE GLOBAL TEMPORARY TABLE "
+ "foo (x INTEGER) ON COMMIT PRESERVE ROWS"
+ )
+
+
class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
def _dialect(self, server_version, **kw):
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index e77a03980..124208dbe 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -11,7 +11,7 @@ from sqlalchemy import Table, select, bindparam, Column,\
UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time
from sqlalchemy import types as sqltypes
-from sqlalchemy import event
+from sqlalchemy import event, inspect
from sqlalchemy.util import u, ue
from sqlalchemy import exc, sql, schema, pool, util
from sqlalchemy.dialects.sqlite import base as sqlite, \
@@ -480,57 +480,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
assert u('méil') in result.keys()
assert ue('\u6e2c\u8a66') in result.keys()
- def test_attached_as_schema(self):
- cx = testing.db.connect()
- try:
- cx.execute('ATTACH DATABASE ":memory:" AS test_schema')
- dialect = cx.dialect
- assert dialect.get_table_names(cx, 'test_schema') == []
- meta = MetaData(cx)
- Table('created', meta, Column('id', Integer),
- schema='test_schema')
- alt_master = Table('sqlite_master', meta, autoload=True,
- schema='test_schema')
- meta.create_all(cx)
- eq_(dialect.get_table_names(cx, 'test_schema'), ['created'])
- assert len(alt_master.c) > 0
- meta.clear()
- reflected = Table('created', meta, autoload=True,
- schema='test_schema')
- assert len(reflected.c) == 1
- cx.execute(reflected.insert(), dict(id=1))
- r = cx.execute(reflected.select()).fetchall()
- assert list(r) == [(1, )]
- cx.execute(reflected.update(), dict(id=2))
- r = cx.execute(reflected.select()).fetchall()
- assert list(r) == [(2, )]
- cx.execute(reflected.delete(reflected.c.id == 2))
- r = cx.execute(reflected.select()).fetchall()
- assert list(r) == []
-
- # note that sqlite_master is cleared, above
-
- meta.drop_all()
- assert dialect.get_table_names(cx, 'test_schema') == []
- finally:
- cx.execute('DETACH DATABASE test_schema')
-
- @testing.exclude('sqlite', '<', (2, 6), 'no database support')
- def test_temp_table_reflection(self):
- cx = testing.db.connect()
- try:
- cx.execute('CREATE TEMPORARY TABLE tempy (id INT)')
- assert 'tempy' in cx.dialect.get_table_names(cx, None)
- meta = MetaData(cx)
- tempy = Table('tempy', meta, autoload=True)
- assert len(tempy.c) == 1
- meta.drop_all()
- except:
- try:
- cx.execute('DROP TABLE tempy')
- except exc.DBAPIError:
- pass
- raise
def test_file_path_is_absolute(self):
d = pysqlite_dialect.dialect()
@@ -549,7 +498,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
e = create_engine('sqlite+pysqlite:///foo.db')
assert e.pool.__class__ is pool.NullPool
-
def test_dont_reflect_autoindex(self):
meta = MetaData(testing.db)
t = Table('foo', meta, Column('bar', String, primary_key=True))
@@ -575,6 +523,125 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
finally:
meta.drop_all()
+ def test_get_unique_constraints(self):
+ meta = MetaData(testing.db)
+ t1 = Table('foo', meta, Column('f', Integer),
+ UniqueConstraint('f', name='foo_f'))
+ t2 = Table('bar', meta, Column('b', Integer),
+ UniqueConstraint('b', name='bar_b'),
+ prefixes=['TEMPORARY'])
+ meta.create_all()
+ from sqlalchemy.engine.reflection import Inspector
+ try:
+ inspector = Inspector(testing.db)
+ eq_(inspector.get_unique_constraints('foo'),
+ [{'column_names': [u'f'], 'name': u'foo_f'}])
+ eq_(inspector.get_unique_constraints('bar'),
+ [{'column_names': [u'b'], 'name': u'bar_b'}])
+ finally:
+ meta.drop_all()
+
+
+class AttachedMemoryDBTest(fixtures.TestBase):
+ __only_on__ = 'sqlite'
+
+ dbname = None
+
+ def setUp(self):
+ self.conn = conn = testing.db.connect()
+ if self.dbname is None:
+ dbname = ':memory:'
+ else:
+ dbname = self.dbname
+ conn.execute('ATTACH DATABASE "%s" AS test_schema' % dbname)
+ self.metadata = MetaData()
+
+ def tearDown(self):
+ self.metadata.drop_all(self.conn)
+ self.conn.execute('DETACH DATABASE test_schema')
+ if self.dbname:
+ os.remove(self.dbname)
+
+ def _fixture(self):
+ meta = self.metadata
+ ct = Table(
+ 'created', meta,
+ Column('id', Integer),
+ Column('name', String),
+ schema='test_schema')
+
+ meta.create_all(self.conn)
+ return ct
+
+ def test_no_tables(self):
+ insp = inspect(self.conn)
+ eq_(insp.get_table_names("test_schema"), [])
+
+ def test_table_names_present(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_table_names("test_schema"), ["created"])
+
+ def test_table_names_system(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_table_names("test_schema"), ["created"])
+
+ def test_reflect_system_table(self):
+ meta = MetaData(self.conn)
+ alt_master = Table(
+ 'sqlite_master', meta, autoload=True,
+ autoload_with=self.conn,
+ schema='test_schema')
+ assert len(alt_master.c) > 0
+
+ def test_reflect_user_table(self):
+ self._fixture()
+
+ m2 = MetaData()
+ c2 = Table('created', m2, autoload=True, autoload_with=self.conn)
+ eq_(len(c2.c), 2)
+
+ def test_crud(self):
+ ct = self._fixture()
+
+ self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'})
+ eq_(
+ self.conn.execute(ct.select()).fetchall(),
+ [(1, 'foo')]
+ )
+
+ self.conn.execute(ct.update(), {'id': 2, 'name': 'bar'})
+ eq_(
+ self.conn.execute(ct.select()).fetchall(),
+ [(2, 'bar')]
+ )
+ self.conn.execute(ct.delete())
+ eq_(
+ self.conn.execute(ct.select()).fetchall(),
+ []
+ )
+
+ def test_col_targeting(self):
+ ct = self._fixture()
+
+ self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'})
+ row = self.conn.execute(ct.select()).first()
+ eq_(row['id'], 1)
+ eq_(row['name'], 'foo')
+
+ def test_col_targeting_union(self):
+ ct = self._fixture()
+
+ self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'})
+ row = self.conn.execute(ct.select().union(ct.select())).first()
+ eq_(row['id'], 1)
+ eq_(row['name'], 'foo')
+
+
+class AttachedFileDBTest(AttachedMemoryDBTest):
+ dbname = 'attached_db.db'
+
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index d8e1c655e..5c3279ba9 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -25,6 +25,10 @@ from sqlalchemy.util import nested
users, metadata, users_autoinc = None, None, None
+class SomeException(Exception):
+ pass
+
+
class ExecuteTest(fixtures.TestBase):
__backend__ = True
@@ -280,12 +284,13 @@ class ExecuteTest(fixtures.TestBase):
impl = Integer
def process_bind_param(self, value, dialect):
- raise Exception("nope")
+ raise SomeException("nope")
def _go(conn):
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).
where(
@@ -479,6 +484,26 @@ class ExecuteTest(fixtures.TestBase):
eq_(canary, ["l1", "l2", "l3", "l1", "l2"])
@testing.requires.ad_hoc_engines
+ def test_autocommit_option_no_issue_first_connect(self):
+ eng = create_engine(testing.db.url)
+ eng.update_execution_options(autocommit=True)
+ conn = eng.connect()
+ eq_(conn._execution_options, {"autocommit": True})
+ conn.close()
+
+ @testing.requires.ad_hoc_engines
+ def test_dialect_init_uses_options(self):
+ eng = create_engine(testing.db.url)
+
+ def my_init(connection):
+ connection.execution_options(foo='bar').execute(select([1]))
+
+ with patch.object(eng.dialect, "initialize", my_init):
+ conn = eng.connect()
+ eq_(conn._execution_options, {})
+ conn.close()
+
+ @testing.requires.ad_hoc_engines
def test_generative_engine_event_dispatch_hasevents(self):
def l1(*arg, **kw):
pass
@@ -541,7 +566,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest):
if is_transaction:
conn = conn.connection
conn.execute(self.table.insert().values(a=x, b=value))
- raise Exception("breakage")
+ raise SomeException("breakage")
return go
def _assert_no_data(self):
@@ -982,6 +1007,17 @@ class ExecutionOptionsTest(fixtures.TestBase):
eq_(c1._execution_options, {"foo": "bar"})
eq_(c2._execution_options, {"foo": "bar", "bat": "hoho"})
+ def test_branched_connection_execution_options(self):
+ engine = testing_engine("sqlite://")
+
+ conn = engine.connect()
+ c2 = conn.execution_options(foo="bar")
+ c2_branch = c2.connect()
+ eq_(
+ c2_branch._execution_options,
+ {"foo": "bar"}
+ )
+
class AlternateResultProxyTest(fixtures.TestBase):
__requires__ = ('sqlite', )
@@ -1440,6 +1476,48 @@ class EngineEventsTest(fixtures.TestBase):
'begin', 'execute', 'cursor_execute', 'commit',
])
+ def test_transactional_named(self):
+ canary = []
+
+ def tracker(name):
+ def go(*args, **kw):
+ canary.append((name, set(kw)))
+ return go
+
+ engine = engines.testing_engine()
+ event.listen(engine, 'before_execute', tracker('execute'), named=True)
+ event.listen(
+ engine, 'before_cursor_execute',
+ tracker('cursor_execute'), named=True)
+ event.listen(engine, 'begin', tracker('begin'), named=True)
+ event.listen(engine, 'commit', tracker('commit'), named=True)
+ event.listen(engine, 'rollback', tracker('rollback'), named=True)
+
+ conn = engine.connect()
+ trans = conn.begin()
+ conn.execute(select([1]))
+ trans.rollback()
+ trans = conn.begin()
+ conn.execute(select([1]))
+ trans.commit()
+
+ eq_(
+ canary, [
+ ('begin', set(['conn', ])),
+ ('execute', set([
+ 'conn', 'clauseelement', 'multiparams', 'params'])),
+ ('cursor_execute', set([
+ 'conn', 'cursor', 'executemany',
+ 'statement', 'parameters', 'context'])),
+ ('rollback', set(['conn', ])), ('begin', set(['conn', ])),
+ ('execute', set([
+ 'conn', 'clauseelement', 'multiparams', 'params'])),
+ ('cursor_execute', set([
+ 'conn', 'cursor', 'executemany', 'statement',
+ 'parameters', 'context'])),
+ ('commit', set(['conn', ]))]
+ )
+
@testing.requires.savepoints
@testing.requires.two_phase_transactions
def test_transactional_advanced(self):
@@ -1524,7 +1602,7 @@ class HandleErrorTest(fixtures.TestBase):
listener = Mock(return_value=None)
event.listen(engine, 'dbapi_error', listener)
- nope = Exception("nope")
+ nope = SomeException("nope")
class MyType(TypeDecorator):
impl = Integer
@@ -1535,7 +1613,8 @@ class HandleErrorTest(fixtures.TestBase):
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
@@ -1715,7 +1794,7 @@ class HandleErrorTest(fixtures.TestBase):
listener = Mock(return_value=None)
event.listen(engine, 'handle_error', listener)
- nope = Exception("nope")
+ nope = SomeException("nope")
class MyType(TypeDecorator):
impl = Integer
@@ -1726,7 +1805,8 @@ class HandleErrorTest(fixtures.TestBase):
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py
index 1432a0f7b..180ea9388 100644
--- a/test/engine/test_logging.py
+++ b/test/engine/test_logging.py
@@ -56,7 +56,8 @@ class LogParamsTest(fixtures.TestBase):
def test_error_large_dict(self):
assert_raises_message(
tsa.exc.DBAPIError,
- r".*'INSERT INTO nonexistent \(data\) values \(:data\)' "
+ r".*'INSERT INTO nonexistent \(data\) values \(:data\)'\] "
+ "\[parameters: "
"\[{'data': '0'}, {'data': '1'}, {'data': '2'}, "
"{'data': '3'}, {'data': '4'}, {'data': '5'}, "
"{'data': '6'}, {'data': '7'} ... displaying 10 of "
@@ -71,8 +72,9 @@ class LogParamsTest(fixtures.TestBase):
assert_raises_message(
tsa.exc.DBAPIError,
r".*INSERT INTO nonexistent \(data\) values "
- "\(\?\)' \[\('0',\), \('1',\), \('2',\), \('3',\), "
- "\('4',\), \('5',\), \('6',\), \('7',\) ... displaying "
+ "\(\?\)'\] \[parameters: \[\('0',\), \('1',\), \('2',\), \('3',\), "
+ "\('4',\), \('5',\), \('6',\), \('7',\) "
+ "... displaying "
"10 of 100 total bound parameter sets ... "
"\('98',\), \('99',\)\]",
lambda: self.eng.execute(
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index c82cca5a1..4500ada6a 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -8,7 +8,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.engines import testing_engine
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock, call, patch
class MockError(Exception):
@@ -504,6 +504,54 @@ class RealReconnectTest(fixtures.TestBase):
# pool isn't replaced
assert self.engine.pool is p2
+ def test_branched_invalidate_branch_to_parent(self):
+ c1 = self.engine.connect()
+
+ with patch.object(self.engine.pool, "logger") as logger:
+ c1_branch = c1.connect()
+ eq_(c1_branch.execute(select([1])).scalar(), 1)
+
+ self.engine.test_shutdown()
+
+ _assert_invalidated(c1_branch.execute, select([1]))
+ assert c1.invalidated
+ assert c1_branch.invalidated
+
+ c1_branch._revalidate_connection()
+ assert not c1.invalidated
+ assert not c1_branch.invalidated
+
+ assert "Invalidate connection" in logger.mock_calls[0][1][0]
+
+ def test_branched_invalidate_parent_to_branch(self):
+ c1 = self.engine.connect()
+
+ c1_branch = c1.connect()
+ eq_(c1_branch.execute(select([1])).scalar(), 1)
+
+ self.engine.test_shutdown()
+
+ _assert_invalidated(c1.execute, select([1]))
+ assert c1.invalidated
+ assert c1_branch.invalidated
+
+ c1._revalidate_connection()
+ assert not c1.invalidated
+ assert not c1_branch.invalidated
+
+ def test_branch_invalidate_state(self):
+ c1 = self.engine.connect()
+
+ c1_branch = c1.connect()
+
+ eq_(c1_branch.execute(select([1])).scalar(), 1)
+
+ self.engine.test_shutdown()
+
+ _assert_invalidated(c1_branch.execute, select([1]))
+ assert not c1_branch.closed
+ assert not c1_branch._connection_is_valid
+
def test_ensure_is_disconnect_gets_connection(self):
def is_disconnect(e, conn, cursor):
# connection is still present
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index 8a5303642..b3b17e75a 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -133,6 +133,91 @@ class TransactionTest(fixtures.TestBase):
finally:
connection.close()
+ def test_branch_nested_rollback(self):
+ connection = testing.db.connect()
+ try:
+ connection.begin()
+ branched = connection.connect()
+ assert branched.in_transaction()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ assert not connection.in_transaction()
+ eq_(connection.scalar("select count(*) from query_users"), 0)
+
+ finally:
+ connection.close()
+
+ def test_branch_autorollback(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ try:
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ except exc.DBAPIError:
+ pass
+ finally:
+ connection.close()
+
+ def test_branch_orig_rollback(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin()
+ assert branched.in_transaction()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ eq_(connection.scalar("select count(*) from query_users"), 1)
+
+ finally:
+ connection.close()
+
+ def test_branch_autocommit(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ finally:
+ connection.close()
+ eq_(testing.db.scalar("select count(*) from query_users"), 1)
+
+ @testing.requires.savepoints
+ def test_branch_savepoint_rollback(self):
+ connection = testing.db.connect()
+ try:
+ trans = connection.begin()
+ branched = connection.connect()
+ assert branched.in_transaction()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin_nested()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ assert connection.in_transaction()
+ trans.commit()
+ eq_(connection.scalar("select count(*) from query_users"), 1)
+
+ finally:
+ connection.close()
+
+ @testing.requires.two_phase_transactions
+ def test_branch_twophase_rollback(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ assert not branched.in_transaction()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin_twophase()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ assert not connection.in_transaction()
+ eq_(connection.scalar("select count(*) from query_users"), 1)
+
+ finally:
+ connection.close()
+
def test_retains_through_options(self):
connection = testing.db.connect()
try:
@@ -1126,139 +1211,6 @@ class TLTransactionTest(fixtures.TestBase):
order_by(users.c.user_id)).fetchall(),
[(1, ), (2, )])
-counters = None
-
-
-class ForUpdateTest(fixtures.TestBase):
- __requires__ = 'ad_hoc_engines',
- __backend__ = True
-
- @classmethod
- def setup_class(cls):
- global counters, metadata
- metadata = MetaData()
- counters = Table('forupdate_counters', metadata,
- Column('counter_id', INT, primary_key=True),
- Column('counter_value', INT),
- test_needs_acid=True)
- counters.create(testing.db)
-
- def teardown(self):
- testing.db.execute(counters.delete()).close()
-
- @classmethod
- def teardown_class(cls):
- counters.drop(testing.db)
-
- def increment(self, count, errors, update_style=True, delay=0.005):
- con = testing.db.connect()
- sel = counters.select(for_update=update_style,
- whereclause=counters.c.counter_id == 1)
- for i in range(count):
- trans = con.begin()
- try:
- existing = con.execute(sel).first()
- incr = existing['counter_value'] + 1
- time.sleep(delay)
- con.execute(counters.update(counters.c.counter_id == 1,
- values={'counter_value': incr}))
- time.sleep(delay)
- readback = con.execute(sel).first()
- if readback['counter_value'] != incr:
- raise AssertionError('Got %s post-update, expected '
- '%s' % (readback['counter_value'], incr))
- trans.commit()
- except Exception as e:
- trans.rollback()
- errors.append(e)
- break
- con.close()
-
- @testing.crashes('mssql', 'FIXME: unknown')
- @testing.crashes('firebird', 'FIXME: unknown')
- @testing.crashes('sybase', 'FIXME: unknown')
- @testing.requires.independent_connections
- def test_queued_update(self):
- """Test SELECT FOR UPDATE with concurrent modifications.
-
- Runs concurrent modifications on a single row in the users
- table, with each mutator trying to increment a value stored in
- user_name.
-
- """
-
- db = testing.db
- db.execute(counters.insert(), counter_id=1, counter_value=0)
- iterations, thread_count = 10, 5
- threads, errors = [], []
- for i in range(thread_count):
- thrd = threading.Thread(target=self.increment,
- args=(iterations, ),
- kwargs={'errors': errors,
- 'update_style': True})
- thrd.start()
- threads.append(thrd)
- for thrd in threads:
- thrd.join()
- assert not errors
- sel = counters.select(whereclause=counters.c.counter_id == 1)
- final = db.execute(sel).first()
- eq_(final['counter_value'], iterations * thread_count)
-
- def overlap(self, ids, errors, update_style):
-
- sel = counters.select(for_update=update_style,
- whereclause=counters.c.counter_id.in_(ids))
- con = testing.db.connect()
- trans = con.begin()
- try:
- rows = con.execute(sel).fetchall()
- time.sleep(0.50)
- trans.commit()
- except Exception as e:
- trans.rollback()
- errors.append(e)
- con.close()
-
- def _threaded_overlap(self, thread_count, groups, update_style=True, pool=5):
- db = testing.db
- for cid in range(pool - 1):
- db.execute(counters.insert(), counter_id=cid + 1,
- counter_value=0)
- errors, threads = [], []
- for i in range(thread_count):
- thrd = threading.Thread(target=self.overlap,
- args=(groups.pop(0), errors,
- update_style))
- time.sleep(0.20) # give the previous thread a chance to start
- # to ensure it gets a lock
- thrd.start()
- threads.append(thrd)
- for thrd in threads:
- thrd.join()
- return errors
-
- @testing.crashes('mssql', 'FIXME: unknown')
- @testing.crashes('firebird', 'FIXME: unknown')
- @testing.crashes('sybase', 'FIXME: unknown')
- @testing.requires.independent_connections
- def test_queued_select(self):
- """Simple SELECT FOR UPDATE conflict test"""
-
- errors = self._threaded_overlap(2, [(1, 2, 3), (3, 4, 5)])
- assert not errors
-
- @testing.crashes('mssql', 'FIXME: unknown')
- @testing.fails_on('mysql', 'No support for NOWAIT')
- @testing.crashes('firebird', 'FIXME: unknown')
- @testing.crashes('sybase', 'FIXME: unknown')
- @testing.requires.independent_connections
- def test_nowait_select(self):
- """Simple SELECT FOR UPDATE NOWAIT conflict test"""
-
- errors = self._threaded_overlap(2, [(1, 2, 3), (3, 4, 5)],
- update_style='nowait')
- assert errors
class IsolationLevelTest(fixtures.TestBase):
__requires__ = ('isolation_level', 'ad_hoc_engines')
diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py
index e2c2af679..3fac39cac 100644
--- a/test/ext/declarative/test_basic.py
+++ b/test/ext/declarative/test_basic.py
@@ -1,6 +1,6 @@
from sqlalchemy.testing import eq_, assert_raises, \
- assert_raises_message, is_
+ assert_raises_message
from sqlalchemy.ext import declarative as decl
from sqlalchemy import exc
import sqlalchemy as sa
@@ -10,21 +10,21 @@ from sqlalchemy import MetaData, Integer, String, ForeignKey, \
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, class_mapper, \
joinedload, configure_mappers, backref, clear_mappers, \
- deferred, column_property, composite,\
- Session, properties
-from sqlalchemy.testing import eq_
-from sqlalchemy.util import classproperty, with_metaclass
-from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
- ConcreteBase, synonym_for
+ column_property, composite, Session, properties
+from sqlalchemy.util import with_metaclass
+from sqlalchemy.ext.declarative import declared_attr, synonym_for
from sqlalchemy.testing import fixtures
-from sqlalchemy.testing.util import gc_collect
Base = None
+User = Address = None
+
+
class DeclarativeTestBase(fixtures.TestBase,
- testing.AssertsExecutionResults,
- testing.AssertsCompiledSQL):
+ testing.AssertsExecutionResults,
+ testing.AssertsCompiledSQL):
__dialect__ = 'default'
+
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
@@ -34,13 +34,15 @@ class DeclarativeTestBase(fixtures.TestBase,
clear_mappers()
Base.metadata.drop_all()
+
class DeclarativeTest(DeclarativeTestBase):
+
def test_basic(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship("Address", backref="user")
@@ -48,7 +50,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'addresses'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
email = Column(String(50), key='_email')
user_id = Column('user_id', Integer, ForeignKey('users.id'),
key='_user_id')
@@ -82,7 +84,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship(util.u("Address"), backref="user")
@@ -90,7 +92,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'addresses'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
email = Column(String(50), key='_email')
user_id = Column('user_id', Integer, ForeignKey('users.id'),
key='_user_id')
@@ -120,8 +122,10 @@ class DeclarativeTest(DeclarativeTestBase):
__table_args__ = ()
def test_cant_add_columns(self):
- t = Table('t', Base.metadata, Column('id', Integer,
- primary_key=True), Column('data', String))
+ t = Table(
+ 't', Base.metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String))
def go():
class User(Base):
@@ -158,7 +162,6 @@ class DeclarativeTest(DeclarativeTestBase):
go
)
-
def test_column_repeated_under_prop(self):
def go():
class Foo(Base):
@@ -180,6 +183,7 @@ class DeclarativeTest(DeclarativeTestBase):
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
+
class B(Base):
__tablename__ = 'b'
id = Column(Integer, primary_key=True)
@@ -196,6 +200,7 @@ class DeclarativeTest(DeclarativeTestBase):
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
+
class B(Base):
__tablename__ = 'b'
id = Column(Integer, primary_key=True)
@@ -213,11 +218,12 @@ class DeclarativeTest(DeclarativeTestBase):
# metaclass to mock the way zope.interface breaks getattr()
class BrokenMeta(type):
+
def __getattribute__(self, attr):
if attr == 'xyzzy':
raise AttributeError('xyzzy')
else:
- return object.__getattribute__(self,attr)
+ return object.__getattribute__(self, attr)
# even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
# fails
@@ -225,13 +231,13 @@ class DeclarativeTest(DeclarativeTestBase):
xyzzy = "magic"
# _as_declarative() inspects obj.__class__.__bases__
- class User(BrokenParent,fixtures.ComparableEntity):
+ class User(BrokenParent, fixtures.ComparableEntity):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column('name', String(50))
- decl.instrument_declarative(User,{},Base.metadata)
+ decl.instrument_declarative(User, {}, Base.metadata)
def test_reserved_identifiers(self):
def go1():
@@ -285,29 +291,28 @@ class DeclarativeTest(DeclarativeTestBase):
email = Column('email', String(50))
user_id = Column('user_id', Integer, ForeignKey('users.id'))
user = relationship("User", primaryjoin=user_id == User.id,
- backref="addresses")
+ backref="addresses")
assert mapperlib.Mapper._new_mappers is True
- u = User()
+ u = User() # noqa
assert User.addresses
assert mapperlib.Mapper._new_mappers is False
def test_string_dependency_resolution(self):
- from sqlalchemy.sql import desc
-
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'users'
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
name = Column(String(50))
- addresses = relationship('Address',
- order_by='desc(Address.email)',
- primaryjoin='User.id==Address.user_id',
- foreign_keys='[Address.user_id]',
- backref=backref('user',
- primaryjoin='User.id==Address.user_id',
- foreign_keys='[Address.user_id]'))
+ addresses = relationship(
+ 'Address',
+ order_by='desc(Address.email)',
+ primaryjoin='User.id==Address.user_id',
+ foreign_keys='[Address.user_id]',
+ backref=backref('user',
+ primaryjoin='User.id==Address.user_id',
+ foreign_keys='[Address.user_id]'))
class Address(Base, fixtures.ComparableEntity):
@@ -319,14 +324,17 @@ class DeclarativeTest(DeclarativeTestBase):
Base.metadata.create_all()
sess = create_session()
- u1 = User(name='ed', addresses=[Address(email='abc'),
- Address(email='def'), Address(email='xyz')])
+ u1 = User(
+ name='ed', addresses=[
+ Address(email='abc'),
+ Address(email='def'), Address(email='xyz')])
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).filter(User.name == 'ed').one(),
- User(name='ed', addresses=[Address(email='xyz'),
- Address(email='def'), Address(email='abc')]))
+ User(name='ed', addresses=[
+ Address(email='xyz'),
+ Address(email='def'), Address(email='abc')]))
class Foo(Base, fixtures.ComparableEntity):
@@ -340,7 +348,6 @@ class DeclarativeTest(DeclarativeTestBase):
"ColumnProperty", configure_mappers)
def test_string_dependency_resolution_synonym(self):
- from sqlalchemy.sql import desc
class User(Base, fixtures.ComparableEntity):
@@ -416,12 +423,13 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
b_id = Column(ForeignKey('b.id'))
- d = relationship("D",
- secondary="join(B, D, B.d_id == D.id)."
- "join(C, C.d_id == D.id)",
- primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)",
- secondaryjoin="D.id == B.d_id",
- )
+ d = relationship(
+ "D",
+ secondary="join(B, D, B.d_id == D.id)."
+ "join(C, C.d_id == D.id)",
+ primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)",
+ secondaryjoin="D.id == B.d_id",
+ )
class B(Base):
__tablename__ = 'b'
@@ -444,9 +452,9 @@ class DeclarativeTest(DeclarativeTestBase):
self.assert_compile(
s.query(A).join(A.d),
"SELECT a.id AS a_id, a.b_id AS a_b_id FROM a JOIN "
- "(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
- "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
- "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
+ "(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
+ "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
+ "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
)
def test_string_dependency_resolution_no_table(self):
@@ -474,6 +482,7 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
name = Column(String(50))
+
class Address(Base, fixtures.ComparableEntity):
__tablename__ = 'addresses'
@@ -481,7 +490,8 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
email = Column(String(50))
user_id = Column(Integer)
- user = relationship("User",
+ user = relationship(
+ "User",
primaryjoin="remote(User.id)==foreign(Address.user_id)"
)
@@ -497,9 +507,9 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
- addresses = relationship('Address',
- primaryjoin='User.id==Address.user_id.prop.columns['
- '0]')
+ addresses = relationship(
+ 'Address',
+ primaryjoin='User.id==Address.user_id.prop.columns[0]')
class Address(Base, fixtures.ComparableEntity):
@@ -516,9 +526,10 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
- addresses = relationship('%s.Address' % __name__,
- primaryjoin='%s.User.id==%s.Address.user_id.prop.columns['
- '0]' % (__name__, __name__))
+ addresses = relationship(
+ '%s.Address' % __name__,
+ primaryjoin='%s.User.id==%s.Address.user_id.prop.columns[0]'
+ % (__name__, __name__))
class Address(Base, fixtures.ComparableEntity):
@@ -538,8 +549,8 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
name = Column(String(50))
addresses = relationship('Address',
- primaryjoin='User.id==Address.user_id',
- backref='user')
+ primaryjoin='User.id==Address.user_id',
+ backref='user')
class Address(Base, fixtures.ComparableEntity):
@@ -571,10 +582,11 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
name = Column(String(50))
- user_to_prop = Table('user_to_prop', Base.metadata,
- Column('user_id', Integer,
- ForeignKey('users.id')), Column('prop_id',
- Integer, ForeignKey('props.id')))
+ user_to_prop = Table(
+ 'user_to_prop', Base.metadata,
+ Column('user_id', Integer, ForeignKey('users.id')),
+ Column('prop_id', Integer, ForeignKey('props.id')))
+
configure_mappers()
assert class_mapper(User).get_property('props').secondary \
is user_to_prop
@@ -585,27 +597,29 @@ class DeclarativeTest(DeclarativeTestBase):
class User(Base):
__tablename__ = 'users'
- __table_args__ = {'schema':'fooschema'}
+ __table_args__ = {'schema': 'fooschema'}
id = Column(Integer, primary_key=True)
name = Column(String(50))
- props = relationship('Prop', secondary='fooschema.user_to_prop',
- primaryjoin='User.id==fooschema.user_to_prop.c.user_id',
- secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
- backref='users')
+ props = relationship(
+ 'Prop', secondary='fooschema.user_to_prop',
+ primaryjoin='User.id==fooschema.user_to_prop.c.user_id',
+ secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
+ backref='users')
class Prop(Base):
__tablename__ = 'props'
- __table_args__ = {'schema':'fooschema'}
+ __table_args__ = {'schema': 'fooschema'}
id = Column(Integer, primary_key=True)
name = Column(String(50))
- user_to_prop = Table('user_to_prop', Base.metadata,
- Column('user_id', Integer, ForeignKey('fooschema.users.id')),
- Column('prop_id',Integer, ForeignKey('fooschema.props.id')),
- schema='fooschema')
+ user_to_prop = Table(
+ 'user_to_prop', Base.metadata,
+ Column('user_id', Integer, ForeignKey('fooschema.users.id')),
+ Column('prop_id', Integer, ForeignKey('fooschema.props.id')),
+ schema='fooschema')
configure_mappers()
assert class_mapper(User).get_property('props').secondary \
@@ -618,9 +632,11 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'parent'
id = Column(Integer, primary_key=True)
name = Column(String)
- children = relationship("Child",
- primaryjoin="Parent.name==remote(foreign(func.lower(Child.name_upper)))"
- )
+ children = relationship(
+ "Child",
+ primaryjoin="Parent.name=="
+ "remote(foreign(func.lower(Child.name_upper)))"
+ )
class Child(Base):
__tablename__ = 'child'
@@ -667,8 +683,8 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
name = Column(String(50))
addresses = relationship('Address', order_by=Address.email,
- foreign_keys=Address.user_id,
- remote_side=Address.user_id)
+ foreign_keys=Address.user_id,
+ remote_side=Address.user_id)
# get the mapper for User. User mapper will compile,
# "addresses" relationship will call upon Address.user_id for
@@ -681,14 +697,16 @@ class DeclarativeTest(DeclarativeTestBase):
class_mapper(User)
Base.metadata.create_all()
sess = create_session()
- u1 = User(name='ed', addresses=[Address(email='abc'),
- Address(email='xyz'), Address(email='def')])
+ u1 = User(name='ed', addresses=[
+ Address(email='abc'),
+ Address(email='xyz'), Address(email='def')])
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).filter(User.name == 'ed').one(),
- User(name='ed', addresses=[Address(email='abc'),
- Address(email='def'), Address(email='xyz')]))
+ User(name='ed', addresses=[
+ Address(email='abc'),
+ Address(email='def'), Address(email='xyz')]))
def test_nice_dependency_error(self):
@@ -726,14 +744,16 @@ class DeclarativeTest(DeclarativeTestBase):
# the exception is preserved. Remains the
# same through repeated calls.
for i in range(3):
- assert_raises_message(sa.exc.InvalidRequestError,
- "^One or more mappers failed to initialize - "
- "can't proceed with initialization of other "
- "mappers. Original exception was: When initializing.*",
- configure_mappers)
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "^One or more mappers failed to initialize - "
+ "can't proceed with initialization of other "
+ "mappers. Original exception was: When initializing.*",
+ configure_mappers)
def test_custom_base(self):
class MyBase(object):
+
def foobar(self):
return "foobar"
Base = decl.declarative_base(cls=MyBase)
@@ -761,7 +781,7 @@ class DeclarativeTest(DeclarativeTestBase):
Base.metadata.create_all()
configure_mappers()
assert class_mapper(Detail).get_property('master'
- ).strategy.use_get
+ ).strategy.use_get
m1 = Master()
d1 = Detail(master=m1)
sess = create_session()
@@ -821,13 +841,15 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(Address.__table__.c['_email'].name, 'email')
eq_(Address.__table__.c['_user_id'].name, 'user_id')
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(
+ name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
@@ -842,7 +864,8 @@ class DeclarativeTest(DeclarativeTestBase):
class ASub(A):
brap = A.data
assert ASub.brap.property is A.data.property
- assert isinstance(ASub.brap.original_property, properties.SynonymProperty)
+ assert isinstance(
+ ASub.brap.original_property, properties.SynonymProperty)
def test_alt_name_attr_subclass_relationship_inline(self):
# [ticket:2900]
@@ -857,10 +880,12 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column('id', Integer, primary_key=True)
configure_mappers()
+
class ASub(A):
brap = A.b
assert ASub.brap.property is A.b.property
- assert isinstance(ASub.brap.original_property, properties.SynonymProperty)
+ assert isinstance(
+ ASub.brap.original_property, properties.SynonymProperty)
ASub(brap=B())
def test_alt_name_attr_subclass_column_attrset(self):
@@ -881,6 +906,7 @@ class DeclarativeTest(DeclarativeTestBase):
b_id = Column(Integer, ForeignKey('b.id'))
b = relationship("B", backref="as_")
A.brap = A.b
+
class B(Base):
__tablename__ = 'b'
id = Column('id', Integer, primary_key=True)
@@ -889,7 +915,6 @@ class DeclarativeTest(DeclarativeTestBase):
assert isinstance(A.brap.original_property, properties.SynonymProperty)
A(brap=B())
-
def test_eager_order_by(self):
class Address(Base, fixtures.ComparableEntity):
@@ -910,14 +935,14 @@ class DeclarativeTest(DeclarativeTestBase):
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='two'),
- Address(email='one')])
+ Address(email='one')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).options(joinedload(User.addresses)).all(),
[User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])])
+ Address(email='two')])])
def test_order_by_multi(self):
@@ -936,17 +961,17 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship('Address',
- order_by=(Address.email, Address.id))
+ order_by=(Address.email, Address.id))
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='two'),
- Address(email='one')])
+ Address(email='one')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
u = sess.query(User).filter(User.name == 'u1').one()
- a = u.addresses
+ u.addresses
def test_as_declarative(self):
@@ -971,13 +996,15 @@ class DeclarativeTest(DeclarativeTestBase):
decl.instrument_declarative(Address, reg, Base.metadata)
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(
+ name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
def test_custom_mapper_attribute(self):
@@ -1045,7 +1072,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'foo'
__table_args__ = ForeignKeyConstraint(['id'], ['foo.id'
- ])
+ ])
id = Column('id', Integer, primary_key=True)
assert_raises_message(sa.exc.ArgumentError,
'__table_args__ value must be a tuple, ', err)
@@ -1107,17 +1134,18 @@ class DeclarativeTest(DeclarativeTestBase):
User.address_count = \
sa.orm.column_property(sa.select([sa.func.count(Address.id)]).
- where(Address.user_id
- == User.id).as_scalar())
+ where(Address.user_id
+ == User.id).as_scalar())
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', address_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', address_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_useless_declared_attr(self):
class Address(Base, fixtures.ComparableEntity):
@@ -1140,23 +1168,26 @@ class DeclarativeTest(DeclarativeTestBase):
def address_count(cls):
# this doesn't really gain us anything. but if
# one is used, lets have it function as expected...
- return sa.orm.column_property(sa.select([sa.func.count(Address.id)]).
- where(Address.user_id == cls.id))
+ return sa.orm.column_property(
+ sa.select([sa.func.count(Address.id)]).
+ where(Address.user_id == cls.id))
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', address_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', address_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_declared_on_base_class(self):
class MyBase(Base):
__tablename__ = 'foo'
id = Column(Integer, primary_key=True)
+
@declared_attr
def somecol(cls):
return Column(Integer)
@@ -1213,18 +1244,19 @@ class DeclarativeTest(DeclarativeTestBase):
adr_count = \
sa.orm.column_property(
sa.select([sa.func.count(Address.id)],
- Address.user_id == id).as_scalar())
+ Address.user_id == id).as_scalar())
addresses = relationship(Address)
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', adr_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', adr_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_column_properties_2(self):
@@ -1248,7 +1280,7 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(set(User.__table__.c.keys()), set(['id', 'name']))
eq_(set(Address.__table__.c.keys()), set(['id', 'email',
- 'user_id']))
+ 'user_id']))
def test_deferred(self):
@@ -1274,86 +1306,91 @@ class DeclarativeTest(DeclarativeTestBase):
def test_composite_inline(self):
class AddressComposite(fixtures.ComparableEntity):
+
def __init__(self, street, state):
self.street = street
self.state = state
+
def __composite_values__(self):
return [self.street, self.state]
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'user'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
address = composite(AddressComposite,
- Column('street', String(50)),
- Column('state', String(2)),
- )
+ Column('street', String(50)),
+ Column('state', String(2)),
+ )
Base.metadata.create_all()
sess = Session()
sess.add(User(
- address=AddressComposite('123 anywhere street',
- 'MD')
- ))
+ address=AddressComposite('123 anywhere street',
+ 'MD')
+ ))
sess.commit()
eq_(
sess.query(User).all(),
[User(address=AddressComposite('123 anywhere street',
- 'MD'))]
+ 'MD'))]
)
def test_composite_separate(self):
class AddressComposite(fixtures.ComparableEntity):
+
def __init__(self, street, state):
self.street = street
self.state = state
+
def __composite_values__(self):
return [self.street, self.state]
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'user'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
street = Column(String(50))
state = Column(String(2))
address = composite(AddressComposite,
- street, state)
+ street, state)
Base.metadata.create_all()
sess = Session()
sess.add(User(
- address=AddressComposite('123 anywhere street',
- 'MD')
- ))
+ address=AddressComposite('123 anywhere street',
+ 'MD')
+ ))
sess.commit()
eq_(
sess.query(User).all(),
[User(address=AddressComposite('123 anywhere street',
- 'MD'))]
+ 'MD'))]
)
def test_mapping_to_join(self):
users = Table('users', Base.metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True)
+ )
addresses = Table('addresses', Base.metadata,
- Column('id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey('users.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey('users.id'))
+ )
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
+
class User(Base):
__table__ = usersaddresses
- __table_args__ = {'primary_key':[users.c.id]}
+ __table_args__ = {'primary_key': [users.c.id]}
# need to use column_property for now
user_id = column_property(users.c.id, addresses.c.user_id)
address_id = addresses.c.id
assert User.__mapper__.get_property('user_id').columns[0] \
- is users.c.id
+ is users.c.id
assert User.__mapper__.get_property('user_id').columns[1] \
- is addresses.c.user_id
+ is addresses.c.user_id
def test_synonym_inline(self):
@@ -1372,7 +1409,7 @@ class DeclarativeTest(DeclarativeTestBase):
name = sa.orm.synonym('_name',
descriptor=property(_get_name,
- _set_name))
+ _set_name))
Base.metadata.create_all()
sess = create_session()
@@ -1381,7 +1418,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(u1)
sess.flush()
eq_(sess.query(User).filter(User.name == 'SOMENAME someuser'
- ).one(), u1)
+ ).one(), u1)
def test_synonym_no_descriptor(self):
from sqlalchemy.orm.properties import ColumnProperty
@@ -1434,7 +1471,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(u1)
sess.flush()
eq_(sess.query(User).filter(User.name == 'SOMENAME someuser'
- ).one(), u1)
+ ).one(), u1)
def test_reentrant_compile_via_foreignkey(self):
@@ -1465,13 +1502,14 @@ class DeclarativeTest(DeclarativeTestBase):
)
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
def test_relationship_reference(self):
@@ -1490,21 +1528,22 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship('Address', backref='user',
- primaryjoin=id == Address.user_id)
+ primaryjoin=id == Address.user_id)
User.address_count = \
sa.orm.column_property(sa.select([sa.func.count(Address.id)]).
- where(Address.user_id
- == User.id).as_scalar())
+ where(Address.user_id
+ == User.id).as_scalar())
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', address_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', address_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_pk_with_fk_init(self):
@@ -1526,9 +1565,11 @@ class DeclarativeTest(DeclarativeTestBase):
def test_with_explicit_autoloaded(self):
meta = MetaData(testing.db)
- t1 = Table('t1', meta, Column('id', String(50),
+ t1 = Table(
+ 't1', meta,
+ Column('id', String(50),
primary_key=True, test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('data', String(50)))
meta.create_all()
try:
@@ -1541,7 +1582,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(m)
sess.flush()
eq_(t1.select().execute().fetchall(), [('someid', 'somedata'
- )])
+ )])
finally:
meta.drop_all()
@@ -1584,7 +1625,7 @@ class DeclarativeTest(DeclarativeTestBase):
op,
other,
**kw
- ):
+ ):
return op(self.upperself, other, **kw)
class User(Base, fixtures.ComparableEntity):
@@ -1612,7 +1653,7 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(rt, u1)
sess.expunge_all()
rt = sess.query(User).filter(User.uc_name.startswith('SOMEUSE'
- )).one()
+ )).one()
eq_(rt, u1)
def test_duplicate_classes_in_base(self):
@@ -1631,7 +1672,6 @@ class DeclarativeTest(DeclarativeTestBase):
)
-
def _produce_test(inline, stringbased):
class ExplicitJoinTest(fixtures.MappedTest):
@@ -1657,35 +1697,43 @@ def _produce_test(inline, stringbased):
user_id = Column(Integer, ForeignKey('users.id'))
if inline:
if stringbased:
- user = relationship('User',
- primaryjoin='User.id==Address.user_id',
- backref='addresses')
+ user = relationship(
+ 'User',
+ primaryjoin='User.id==Address.user_id',
+ backref='addresses')
else:
user = relationship(User, primaryjoin=User.id
- == user_id, backref='addresses')
+ == user_id, backref='addresses')
if not inline:
configure_mappers()
if stringbased:
- Address.user = relationship('User',
- primaryjoin='User.id==Address.user_id',
- backref='addresses')
+ Address.user = relationship(
+ 'User',
+ primaryjoin='User.id==Address.user_id',
+ backref='addresses')
else:
- Address.user = relationship(User,
- primaryjoin=User.id == Address.user_id,
- backref='addresses')
+ Address.user = relationship(
+ User,
+ primaryjoin=User.id == Address.user_id,
+ backref='addresses')
@classmethod
def insert_data(cls):
- params = [dict(list(zip(('id', 'name'), column_values)))
- for column_values in [(7, 'jack'), (8, 'ed'), (9,
- 'fred'), (10, 'chuck')]]
+ params = [
+ dict(list(zip(('id', 'name'), column_values)))
+ for column_values in [
+ (7, 'jack'), (8, 'ed'),
+ (9, 'fred'), (10, 'chuck')]]
+
User.__table__.insert().execute(params)
- Address.__table__.insert().execute([dict(list(zip(('id',
- 'user_id', 'email'), column_values)))
- for column_values in [(1, 7, 'jack@bean.com'), (2,
- 8, 'ed@wood.com'), (3, 8, 'ed@bettyboop.com'), (4,
- 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]])
+ Address.__table__.insert().execute([
+ dict(list(zip(('id', 'user_id', 'email'), column_values)))
+ for column_values in [
+ (1, 7, 'jack@bean.com'),
+ (2, 8, 'ed@wood.com'),
+ (3, 8, 'ed@bettyboop.com'),
+ (4, 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]])
def test_aliased_join(self):
@@ -1699,13 +1747,14 @@ def _produce_test(inline, stringbased):
sess = create_session()
eq_(sess.query(User).join(User.addresses,
- aliased=True).filter(Address.email == 'ed@wood.com'
- ).filter(User.addresses.any(Address.email
- == 'jack@bean.com')).all(), [])
-
- ExplicitJoinTest.__name__ = 'ExplicitJoinTest%s%s' % (inline
- and 'Inline' or 'Separate', stringbased and 'String'
- or 'Literal')
+ aliased=True).filter(
+ Address.email == 'ed@wood.com').filter(
+ User.addresses.any(Address.email == 'jack@bean.com')).all(),
+ [])
+
+ ExplicitJoinTest.__name__ = 'ExplicitJoinTest%s%s' % (
+ inline and 'Inline' or 'Separate',
+ stringbased and 'String' or 'Literal')
return ExplicitJoinTest
for inline in True, False:
@@ -1713,4 +1762,3 @@ for inline in True, False:
testclass = _produce_test(inline, stringbased)
exec('%s = testclass' % testclass.__name__)
del testclass
-
diff --git a/test/ext/declarative/test_clsregistry.py b/test/ext/declarative/test_clsregistry.py
index e78a1abbe..535fd00b3 100644
--- a/test/ext/declarative/test_clsregistry.py
+++ b/test/ext/declarative/test_clsregistry.py
@@ -5,7 +5,9 @@ from sqlalchemy import exc, MetaData
from sqlalchemy.ext.declarative import clsregistry
import weakref
+
class MockClass(object):
+
def __init__(self, base, name):
self._decl_class_registry = base
tokens = name.split(".")
@@ -183,7 +185,7 @@ class ClsRegistryTest(fixtures.TestBase):
f1 = MockClass(base, "foo.bar.Foo")
clsregistry.add_class("Foo", f1)
reg = base['_sa_module_registry']
- mod_entry = reg['foo']['bar']
+ mod_entry = reg['foo']['bar'] # noqa
resolver = clsregistry._resolver(f1, MockProp())
resolver = resolver("foo")
assert_raises_message(
@@ -232,4 +234,3 @@ class ClsRegistryTest(fixtures.TestBase):
del f4
gc_collect()
assert 'single' not in reg
-
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index edff4421e..6ea37e4d3 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -10,12 +10,14 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
configure_mappers, clear_mappers, \
polymorphic_union, deferred, Session
from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
- ConcreteBase, has_inherited_table
-from sqlalchemy.testing import fixtures
+ ConcreteBase, has_inherited_table
+from sqlalchemy.testing import fixtures, mock
Base = None
+
class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
+
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
@@ -25,6 +27,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
clear_mappers()
Base.metadata.drop_all()
+
class DeclarativeInheritanceTest(DeclarativeTestBase):
def test_we_must_copy_mapper_args(self):
@@ -65,7 +68,6 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
assert class_mapper(Person).version_id_col == 'a'
assert class_mapper(Person).include_properties == set(['id', 'a', 'b'])
-
def test_custom_join_condition(self):
class Foo(Base):
@@ -123,21 +125,23 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
Base.metadata.create_all()
sess = create_session()
- c1 = Company(name='MegaCorp, Inc.',
- employees=[Engineer(name='dilbert',
- primary_language='java'), Engineer(name='wally',
- primary_language='c++'), Manager(name='dogbert',
- golf_swing='fore!')])
+ c1 = Company(
+ name='MegaCorp, Inc.',
+ employees=[
+ Engineer(name='dilbert', primary_language='java'),
+ Engineer(name='wally', primary_language='c++'),
+ Manager(name='dogbert', golf_swing='fore!')])
+
c2 = Company(name='Elbonia, Inc.',
employees=[Engineer(name='vlad',
- primary_language='cobol')])
+ primary_language='cobol')])
sess.add(c1)
sess.add(c2)
sess.flush()
sess.expunge_all()
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
# ensure that the Manager mapper was compiled with the Manager id
# column as higher priority. this ensures that "Manager.id"
@@ -145,8 +149,8 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
# table (reversed from 0.6's behavior.)
eq_(
- Manager.id.property.columns,
- [Manager.__table__.c.id, Person.__table__.c.id]
+ Manager.id.property.columns,
+ [Manager.__table__.c.id, Person.__table__.c.id]
)
# assert that the "id" column is available without a second
@@ -157,13 +161,13 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
def go():
assert sess.query(Manager).filter(Manager.name == 'dogbert'
- ).one().id
+ ).one().id
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
assert sess.query(Person).filter(Manager.name == 'dogbert'
- ).one().id
+ ).one().id
self.assert_sql_count(testing.db, go, 1)
@@ -186,7 +190,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
primary_key=True)
Engineer.primary_language = Column('primary_language',
- String(50))
+ String(50))
Base.metadata.create_all()
sess = create_session()
e1 = Engineer(primary_language='java', name='dilbert')
@@ -194,7 +198,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.flush()
sess.expunge_all()
eq_(sess.query(Person).first(),
- Engineer(primary_language='java', name='dilbert'))
+ Engineer(primary_language='java', name='dilbert'))
def test_add_parentcol_after_the_fact(self):
@@ -258,8 +262,8 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.add(e1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(Person).first(), Admin(primary_language='java',
- name='dilbert', workstation='foo'))
+ eq_(sess.query(Person).first(),
+ Admin(primary_language='java', name='dilbert', workstation='foo'))
def test_subclass_mixin(self):
@@ -331,26 +335,25 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
class PlanBooking(Booking):
__tablename__ = 'plan_booking'
id = Column(Integer, ForeignKey(Booking.id),
- primary_key=True)
+ primary_key=True)
# referencing PlanBooking.id gives us the column
# on plan_booking, not booking
class FeatureBooking(Booking):
__tablename__ = 'feature_booking'
id = Column(Integer, ForeignKey(Booking.id),
- primary_key=True)
+ primary_key=True)
plan_booking_id = Column(Integer,
- ForeignKey(PlanBooking.id))
+ ForeignKey(PlanBooking.id))
plan_booking = relationship(PlanBooking,
- backref='feature_bookings')
+ backref='feature_bookings')
assert FeatureBooking.__table__.c.plan_booking_id.\
- references(PlanBooking.__table__.c.id)
+ references(PlanBooking.__table__.c.id)
assert FeatureBooking.__table__.c.id.\
- references(Booking.__table__.c.id)
-
+ references(Booking.__table__.c.id)
def test_single_colsonbase(self):
"""test single inheritance where all the columns are on the base
@@ -387,23 +390,26 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
Base.metadata.create_all()
sess = create_session()
- c1 = Company(name='MegaCorp, Inc.',
- employees=[Engineer(name='dilbert',
- primary_language='java'), Engineer(name='wally',
- primary_language='c++'), Manager(name='dogbert',
- golf_swing='fore!')])
+ c1 = Company(
+ name='MegaCorp, Inc.',
+ employees=[
+ Engineer(name='dilbert', primary_language='java'),
+ Engineer(name='wally', primary_language='c++'),
+ Manager(name='dogbert', golf_swing='fore!')])
+
c2 = Company(name='Elbonia, Inc.',
employees=[Engineer(name='vlad',
- primary_language='cobol')])
+ primary_language='cobol')])
sess.add(c1)
sess.add(c2)
sess.flush()
sess.expunge_all()
eq_(sess.query(Person).filter(Engineer.primary_language
- == 'cobol').first(), Engineer(name='vlad'))
+ == 'cobol').first(),
+ Engineer(name='vlad'))
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
def test_single_colsonsub(self):
"""test single inheritance where the columns are local to their
@@ -470,15 +476,17 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.flush()
sess.expunge_all()
eq_(sess.query(Person).filter(Engineer.primary_language
- == 'cobol').first(), Engineer(name='vlad'))
+ == 'cobol').first(),
+ Engineer(name='vlad'))
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
eq_(sess.query(Engineer).filter_by(primary_language='cobol'
- ).one(), Engineer(name='vlad', primary_language='cobol'))
+ ).one(),
+ Engineer(name='vlad', primary_language='cobol'))
@testing.skip_if(lambda: testing.against('oracle'),
- "Test has an empty insert in it at the moment")
+ "Test has an empty insert in it at the moment")
def test_columns_single_inheritance_conflict_resolution(self):
"""Test that a declared_attr can return the existing column and it will
be ignored. this allows conditional columns to be added.
@@ -491,25 +499,29 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
class Engineer(Person):
+
"""single table inheritance"""
@declared_attr
def target_id(cls):
- return cls.__table__.c.get('target_id',
- Column(Integer, ForeignKey('other.id'))
- )
+ return cls.__table__.c.get(
+ 'target_id',
+ Column(Integer, ForeignKey('other.id')))
+
@declared_attr
def target(cls):
return relationship("Other")
class Manager(Person):
+
"""single table inheritance"""
@declared_attr
def target_id(cls):
- return cls.__table__.c.get('target_id',
- Column(Integer, ForeignKey('other.id'))
- )
+ return cls.__table__.c.get(
+ 'target_id',
+ Column(Integer, ForeignKey('other.id')))
+
@declared_attr
def target(cls):
return relationship("Other")
@@ -534,11 +546,10 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
Engineer(target=o1),
Manager(target=o2),
Manager(target=o1)
- ])
+ ])
session.commit()
eq_(session.query(Engineer).first().target, o1)
-
def test_joined_from_single(self):
class Company(Base, fixtures.ComparableEntity):
@@ -595,12 +606,13 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.expunge_all()
eq_(sess.query(Person).with_polymorphic(Engineer).
filter(Engineer.primary_language
- == 'cobol').first(), Engineer(name='vlad'))
+ == 'cobol').first(), Engineer(name='vlad'))
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
eq_(sess.query(Engineer).filter_by(primary_language='cobol'
- ).one(), Engineer(name='vlad', primary_language='cobol'))
+ ).one(),
+ Engineer(name='vlad', primary_language='cobol'))
def test_single_from_joined_colsonsub(self):
class Person(Base, fixtures.ComparableEntity):
@@ -661,7 +673,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
eq_(sess.query(Person).all(), [Person(name='ratbert')])
sess.expunge_all()
person = sess.query(Person).filter(Person.name == 'ratbert'
- ).one()
+ ).one()
assert 'name' not in person.__dict__
def test_single_fksonsub(self):
@@ -683,7 +695,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language_id = Column(Integer,
- ForeignKey('languages.id'))
+ ForeignKey('languages.id'))
primary_language = relationship('Language')
class Language(Base, fixtures.ComparableEntity):
@@ -706,19 +718,19 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.expunge_all()
eq_(sess.query(Person).filter(Engineer.primary_language.has(
Language.name
- == 'cobol')).first(), Engineer(name='vlad',
- primary_language=Language(name='cobol')))
+ == 'cobol')).first(),
+ Engineer(name='vlad', primary_language=Language(name='cobol')))
eq_(sess.query(Engineer).filter(Engineer.primary_language.has(
Language.name
- == 'cobol')).one(), Engineer(name='vlad',
- primary_language=Language(name='cobol')))
+ == 'cobol')).one(),
+ Engineer(name='vlad', primary_language=Language(name='cobol')))
eq_(sess.query(Person).join(Engineer.primary_language).order_by(
Language.name).all(),
[Engineer(name='vlad',
- primary_language=Language(name='cobol')),
- Engineer(name='wally', primary_language=Language(name='cpp'
- )), Engineer(name='dilbert',
- primary_language=Language(name='java'))])
+ primary_language=Language(name='cobol')),
+ Engineer(name='wally', primary_language=Language(name='cpp'
+ )),
+ Engineer(name='dilbert', primary_language=Language(name='java'))])
def test_single_three_levels(self):
@@ -810,11 +822,11 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language = Column('primary_language',
- String(50))
+ String(50))
foo_bar = Column(Integer, primary_key=True)
assert_raises_message(sa.exc.ArgumentError,
- 'place primary key', go)
+ 'place primary key', go)
def test_single_no_table_args(self):
@@ -832,7 +844,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language = Column('primary_language',
- String(50))
+ String(50))
# this should be on the Person class, as this is single
# table inheritance, which is why we test that this
@@ -849,6 +861,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
a_1 = A
+
class A(a_1):
__tablename__ = 'b'
id = Column(Integer(), ForeignKey(a_1.id), primary_key=True)
@@ -857,6 +870,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
class OverlapColPrecedenceTest(DeclarativeTestBase):
+
"""test #1892 cases when declarative does column precedence."""
def _run_test(self, Engineer, e_id, p_id):
@@ -895,7 +909,7 @@ class OverlapColPrecedenceTest(DeclarativeTestBase):
class Engineer(Person):
__tablename__ = 'engineer'
id = Column("eid", Integer, ForeignKey('person.id'),
- primary_key=True)
+ primary_key=True)
self._run_test(Engineer, "eid", "id")
@@ -907,15 +921,18 @@ class OverlapColPrecedenceTest(DeclarativeTestBase):
class Engineer(Person):
__tablename__ = 'engineer'
id = Column("eid", Integer, ForeignKey('person.pid'),
- primary_key=True)
+ primary_key=True)
self._run_test(Engineer, "eid", "pid")
from test.orm.test_events import _RemoveListeners
+
+
class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
+
def _roundtrip(self, Employee, Manager, Engineer, Boss,
- polymorphic=True, explicit_type=False):
+ polymorphic=True, explicit_type=False):
Base.metadata.create_all()
sess = create_session()
e1 = Engineer(name='dilbert', primary_language='java')
@@ -932,7 +949,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
assert_raises_message(
AttributeError,
"does not implement attribute .?'type' "
- "at the instance level.",
+ "at the instance level.",
getattr, obj, "type"
)
else:
@@ -946,37 +963,38 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
if polymorphic:
eq_(sess.query(Employee).order_by(Employee.name).all(),
[Engineer(name='dilbert'), Manager(name='dogbert'),
- Boss(name='pointy haired'), Engineer(name='vlad'), Engineer(name='wally')])
+ Boss(name='pointy haired'),
+ Engineer(name='vlad'), Engineer(name='wally')])
else:
eq_(sess.query(Engineer).order_by(Engineer.name).all(),
[Engineer(name='dilbert'), Engineer(name='vlad'),
- Engineer(name='wally')])
+ Engineer(name='wally')])
eq_(sess.query(Manager).all(), [Manager(name='dogbert')])
eq_(sess.query(Boss).all(), [Boss(name='pointy haired')])
-
def test_explicit(self):
- engineers = Table('engineers', Base.metadata, Column('id',
- Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('primary_language', String(50)))
+ engineers = Table(
+ 'engineers', Base.metadata,
+ Column('id',
+ Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('primary_language', String(50)))
managers = Table('managers', Base.metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('golf_swing', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('golf_swing', String(50))
+ )
boss = Table('boss', Base.metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('golf_swing', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('golf_swing', String(50))
+ )
punion = polymorphic_union({
- 'engineer': engineers,
- 'manager': managers,
- 'boss': boss}, 'type', 'punion')
+ 'engineer': engineers,
+ 'manager': managers,
+ 'boss': boss}, 'type', 'punion')
class Employee(Base, fixtures.ComparableEntity):
@@ -1047,31 +1065,31 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'manager',
- 'concrete': True}
+ 'polymorphic_identity': 'manager',
+ 'concrete': True}
class Boss(Manager):
__tablename__ = 'boss'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'boss',
- 'concrete': True}
+ 'polymorphic_identity': 'boss',
+ 'concrete': True}
class Engineer(Employee):
__tablename__ = 'engineer'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
primary_language = Column(String(40))
__mapper_args__ = {'polymorphic_identity': 'engineer',
- 'concrete': True}
+ 'concrete': True}
self._roundtrip(Employee, Manager, Engineer, Boss)
@@ -1079,42 +1097,42 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
class Employee(ConcreteBase, Base, fixtures.ComparableEntity):
__tablename__ = 'employee'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
__mapper_args__ = {
- 'polymorphic_identity': 'employee',
- 'concrete': True}
+ 'polymorphic_identity': 'employee',
+ 'concrete': True}
+
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'manager',
- 'concrete': True}
+ 'polymorphic_identity': 'manager',
+ 'concrete': True}
class Boss(Manager):
__tablename__ = 'boss'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'boss',
- 'concrete': True}
+ 'polymorphic_identity': 'boss',
+ 'concrete': True}
class Engineer(Employee):
__tablename__ = 'engineer'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
primary_language = Column(String(40))
__mapper_args__ = {'polymorphic_identity': 'engineer',
- 'concrete': True}
+ 'concrete': True}
self._roundtrip(Employee, Manager, Engineer, Boss)
-
def test_has_inherited_table_doesnt_consider_base(self):
class A(Base):
__tablename__ = 'a'
@@ -1140,7 +1158,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
ret = {
'polymorphic_identity': 'default',
'polymorphic_on': cls.type,
- }
+ }
else:
ret = {'polymorphic_identity': cls.__name__}
return ret
@@ -1161,7 +1179,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
@@ -1170,13 +1188,13 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
return "manager"
__mapper_args__ = {
- 'polymorphic_identity': "manager",
- 'concrete': True}
+ 'polymorphic_identity': "manager",
+ 'concrete': True}
class Boss(Manager):
__tablename__ = 'boss'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
@@ -1185,13 +1203,13 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
return "boss"
__mapper_args__ = {
- 'polymorphic_identity': "boss",
- 'concrete': True}
+ 'polymorphic_identity': "boss",
+ 'concrete': True}
class Engineer(Employee):
__tablename__ = 'engineer'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
primary_language = Column(String(40))
@@ -1199,26 +1217,30 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
def type(self):
return "engineer"
__mapper_args__ = {'polymorphic_identity': "engineer",
- 'concrete': True}
+ 'concrete': True}
self._roundtrip(Employee, Manager, Engineer, Boss, explicit_type=True)
-class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL, DeclarativeTestBase):
+
+class ConcreteExtensionConfigTest(
+ _RemoveListeners, testing.AssertsCompiledSQL, DeclarativeTestBase):
__dialect__ = 'default'
def test_classreg_setup(self):
class A(Base, fixtures.ComparableEntity):
__tablename__ = 'a'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer,
+ primary_key=True, test_needs_autoincrement=True)
data = Column(String(50))
collection = relationship("BC", primaryjoin="BC.a_id == A.id",
- collection_class=set)
+ collection_class=set)
class BC(AbstractConcreteBase, Base, fixtures.ComparableEntity):
pass
class B(BC):
__tablename__ = 'b'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer,
+ primary_key=True, test_needs_autoincrement=True)
a_id = Column(Integer, ForeignKey('a.id'))
data = Column(String(50))
@@ -1230,7 +1252,8 @@ class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL,
class C(BC):
__tablename__ = 'c'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer,
+ primary_key=True, test_needs_autoincrement=True)
a_id = Column(Integer, ForeignKey('a.id'))
data = Column(String(50))
c_data = Column(String(50))
@@ -1274,8 +1297,123 @@ class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL,
sess.query(A).join(A.collection),
"SELECT a.id AS a_id, a.data AS a_data FROM a JOIN "
"(SELECT c.id AS id, c.a_id AS a_id, c.data AS data, "
- "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, "
- "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, "
- "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, "
- "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin ON pjoin.a_id = a.id"
+ "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, "
+ "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, "
+ "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, "
+ "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin "
+ "ON pjoin.a_id = a.id"
)
+
+ def test_prop_on_base(self):
+ """test [ticket:2670] """
+
+ counter = mock.Mock()
+
+ class Something(Base):
+ __tablename__ = 'something'
+ id = Column(Integer, primary_key=True)
+
+ class AbstractConcreteAbstraction(AbstractConcreteBase, Base):
+ id = Column(Integer, primary_key=True)
+ x = Column(Integer)
+ y = Column(Integer)
+
+ @declared_attr
+ def something_id(cls):
+ return Column(ForeignKey(Something.id))
+
+ @declared_attr
+ def something(cls):
+ counter(cls, "something")
+ return relationship("Something")
+
+ @declared_attr
+ def something_else(cls):
+ counter(cls, "something_else")
+ return relationship("Something")
+
+ class ConcreteConcreteAbstraction(AbstractConcreteAbstraction):
+ __tablename__ = 'cca'
+ __mapper_args__ = {
+ 'polymorphic_identity': 'ccb',
+ 'concrete': True}
+
+ # concrete is mapped, the abstract base is not (yet)
+ assert ConcreteConcreteAbstraction.__mapper__
+ assert not hasattr(AbstractConcreteAbstraction, '__mapper__')
+
+ session = Session()
+ self.assert_compile(
+ session.query(ConcreteConcreteAbstraction).filter(
+ ConcreteConcreteAbstraction.something.has(id=1)),
+ "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, "
+ "cca.something_id AS cca_something_id FROM cca WHERE EXISTS "
+ "(SELECT 1 FROM something WHERE something.id = cca.something_id "
+ "AND something.id = :id_1)"
+ )
+
+ # now it is
+ assert AbstractConcreteAbstraction.__mapper__
+
+ self.assert_compile(
+ session.query(ConcreteConcreteAbstraction).filter(
+ ConcreteConcreteAbstraction.something_else.has(id=1)),
+ "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, "
+ "cca.something_id AS cca_something_id FROM cca WHERE EXISTS "
+ "(SELECT 1 FROM something WHERE something.id = cca.something_id "
+ "AND something.id = :id_1)"
+ )
+
+ self.assert_compile(
+ session.query(AbstractConcreteAbstraction).filter(
+ AbstractConcreteAbstraction.something.has(id=1)),
+ "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, "
+ "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, "
+ "pjoin.type AS pjoin_type FROM "
+ "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, "
+ "cca.something_id AS something_id, 'ccb' AS type FROM cca) "
+ "AS pjoin WHERE EXISTS (SELECT 1 FROM something "
+ "WHERE something.id = pjoin.something_id AND something.id = :id_1)"
+ )
+
+ self.assert_compile(
+ session.query(AbstractConcreteAbstraction).filter(
+ AbstractConcreteAbstraction.something_else.has(id=1)),
+ "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, "
+ "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, "
+ "pjoin.type AS pjoin_type FROM "
+ "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, "
+ "cca.something_id AS something_id, 'ccb' AS type FROM cca) "
+ "AS pjoin WHERE EXISTS (SELECT 1 FROM something "
+ "WHERE something.id = pjoin.something_id AND something.id = :id_1)"
+ )
+
+ def test_abstract_in_hierarchy(self):
+ class Document(Base, AbstractConcreteBase):
+ doctype = Column(String)
+
+ class ContactDocument(Document):
+ __abstract__ = True
+
+ send_method = Column(String)
+
+ class ActualDocument(ContactDocument):
+ __tablename__ = 'actual_documents'
+ __mapper_args__ = {
+ 'concrete': True,
+ 'polymorphic_identity': 'actual'}
+
+ id = Column(Integer, primary_key=True)
+
+ configure_mappers()
+ session = Session()
+ self.assert_compile(
+ session.query(Document),
+ "SELECT pjoin.doctype AS pjoin_doctype, "
+ "pjoin.send_method AS pjoin_send_method, "
+ "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type "
+ "FROM (SELECT actual_documents.doctype AS doctype, "
+ "actual_documents.send_method AS send_method, "
+ "actual_documents.id AS id, 'actual' AS type "
+ "FROM actual_documents) AS pjoin"
+ ) \ No newline at end of file
diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py
index d3c2ff982..db86927a1 100644
--- a/test/ext/declarative/test_mixin.py
+++ b/test/ext/declarative/test_mixin.py
@@ -3,19 +3,21 @@ from sqlalchemy.testing import eq_, assert_raises, \
from sqlalchemy.ext import declarative as decl
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey, select, func
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, class_mapper, \
configure_mappers, clear_mappers, \
- deferred, column_property, \
- Session
+ deferred, column_property, Session, base as orm_base
from sqlalchemy.util import classproperty
from sqlalchemy.ext.declarative import declared_attr
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, mock
+from sqlalchemy.testing.util import gc_collect
Base = None
+
class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
+
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
@@ -25,6 +27,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
clear_mappers()
Base.metadata.drop_all()
+
class DeclarativeMixinTest(DeclarativeTestBase):
def test_simple(self):
@@ -157,6 +160,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_inherited(self):
class MyMixin:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@@ -169,6 +173,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_classproperty_still_works(self):
class MyMixin(object):
+
@classproperty
def __tablename__(cls):
return cls.__name__.lower()
@@ -182,6 +187,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_not_inherited(self):
class MyMixin:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@@ -195,11 +201,13 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_inheritance_order(self):
class MyMixin1:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower() + '1'
class MyMixin2:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower() + '2'
@@ -212,6 +220,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_dependent_on_subclass(self):
class MyHistoryMixin:
+
@declared_attr
def __tablename__(cls):
return cls.parent_name + '_changelog'
@@ -236,6 +245,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_args_inherited_descriptor(self):
class MyMixin:
+
@declared_attr
def __table_args__(cls):
return {'info': cls.__name__}
@@ -289,7 +299,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
assert Specific.bar.prop is General.bar.prop
@testing.skip_if(lambda: testing.against('oracle'),
- "Test has an empty insert in it at the moment")
+ "Test has an empty insert in it at the moment")
def test_columns_single_inheritance_conflict_resolution(self):
"""Test that a declared_attr can return the existing column and it will
be ignored. this allows conditional columns to be added.
@@ -302,20 +312,24 @@ class DeclarativeMixinTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
class Mixin(object):
+
@declared_attr
def target_id(cls):
- return cls.__table__.c.get('target_id',
- Column(Integer, ForeignKey('other.id'))
- )
+ return cls.__table__.c.get(
+ 'target_id',
+ Column(Integer, ForeignKey('other.id'))
+ )
@declared_attr
def target(cls):
return relationship("Other")
class Engineer(Mixin, Person):
+
"""single table inheritance"""
class Manager(Mixin, Person):
+
"""single table inheritance"""
class Other(Base):
@@ -338,11 +352,10 @@ class DeclarativeMixinTest(DeclarativeTestBase):
Engineer(target=o1),
Manager(target=o2),
Manager(target=o1)
- ])
+ ])
session.commit()
eq_(session.query(Engineer).first().target, o1)
-
def test_columns_joined_table_inheritance(self):
"""Test a column on a mixin with an alternate attribute name,
mapped to a superclass and joined-table inheritance subclass.
@@ -428,6 +441,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_mapper_args_declared_attr(self):
class ComputedMapperArgs:
+
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Person':
@@ -454,6 +468,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
# ComputedMapperArgs on both classes for no apparent reason.
class ComputedMapperArgs:
+
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Person':
@@ -612,7 +627,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
@declared_attr
def __table_args__(cls):
- return {'mysql_engine':'InnoDB'}
+ return {'mysql_engine': 'InnoDB'}
@declared_attr
def __mapper_args__(cls):
@@ -640,13 +655,14 @@ class DeclarativeMixinTest(DeclarativeTestBase):
"""test the @declared_attr approach from a custom base."""
class Base(object):
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@declared_attr
def __table_args__(cls):
- return {'mysql_engine':'InnoDB'}
+ return {'mysql_engine': 'InnoDB'}
@declared_attr
def id(self):
@@ -714,7 +730,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(Generic.__table__.name, 'generic')
eq_(Specific.__table__.name, 'specific')
eq_(list(Generic.__table__.c.keys()), ['timestamp', 'id',
- 'python_type'])
+ 'python_type'])
eq_(list(Specific.__table__.c.keys()), ['id'])
eq_(Generic.__table__.kwargs, {'mysql_engine': 'InnoDB'})
eq_(Specific.__table__.kwargs, {'mysql_engine': 'InnoDB'})
@@ -749,7 +765,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(BaseType.__table__.name, 'basetype')
eq_(list(BaseType.__table__.c.keys()), ['timestamp', 'type', 'id',
- 'value'])
+ 'value'])
eq_(BaseType.__table__.kwargs, {'mysql_engine': 'InnoDB'})
assert Single.__table__ is BaseType.__table__
eq_(Joined.__table__.name, 'joined')
@@ -851,7 +867,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
@declared_attr
def __tablename__(cls):
if decl.has_inherited_table(cls) and TableNameMixin \
- not in cls.__bases__:
+ not in cls.__bases__:
return None
return cls.__name__.lower()
@@ -900,9 +916,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
- __table__ = Table('foo', Base.metadata, Column('data',
- Integer), Column('id', Integer,
- primary_key=True))
+ __table__ = Table('foo', Base.metadata,
+ Column('data', Integer),
+ Column('id', Integer, primary_key=True))
model_col = Model.__table__.c.data
mixin_col = ColumnMixin.data
@@ -920,8 +936,8 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
__table__ = Table('foo', Base.metadata,
- Column('data',Integer),
- Column('id', Integer,primary_key=True))
+ Column('data', Integer),
+ Column('id', Integer, primary_key=True))
foo = relationship("Dest")
assert_raises_message(sa.exc.ArgumentError,
@@ -942,9 +958,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
__table__ = Table('foo', Base.metadata,
- Column('data',Integer),
- Column('tada', Integer),
- Column('id', Integer,primary_key=True))
+ Column('data', Integer),
+ Column('tada', Integer),
+ Column('id', Integer, primary_key=True))
foo = relationship("Dest")
assert_raises_message(sa.exc.ArgumentError,
@@ -959,9 +975,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
- __table__ = Table('foo', Base.metadata, Column('data',
- Integer), Column('id', Integer,
- primary_key=True))
+ __table__ = Table('foo', Base.metadata,
+ Column('data', Integer),
+ Column('id', Integer, primary_key=True))
model_col = Model.__table__.c.data
mixin_col = ColumnMixin.data
@@ -987,10 +1003,11 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'model'
eq_(list(Model.__table__.c.keys()), ['col1', 'col3', 'col2', 'col4',
- 'id'])
+ 'id'])
def test_honor_class_mro_one(self):
class HasXMixin(object):
+
@declared_attr
def x(self):
return Column(Integer)
@@ -1007,6 +1024,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_honor_class_mro_two(self):
class HasXMixin(object):
+
@declared_attr
def x(self):
return Column(Integer)
@@ -1014,6 +1032,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Parent(HasXMixin, Base):
__tablename__ = 'parent'
id = Column(Integer, primary_key=True)
+
def x(self):
return "hi"
@@ -1025,6 +1044,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_arbitrary_attrs_one(self):
class HasMixin(object):
+
@declared_attr
def some_attr(cls):
return cls.__name__ + "SOME ATTR"
@@ -1043,8 +1063,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'filter_a'
id = Column(Integer(), primary_key=True)
parent_id = Column(Integer(),
- ForeignKey('type_a.id'))
+ ForeignKey('type_a.id'))
filter = Column(String())
+
def __init__(self, filter_, **kw):
self.filter = filter_
@@ -1052,16 +1073,18 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'filter_b'
id = Column(Integer(), primary_key=True)
parent_id = Column(Integer(),
- ForeignKey('type_b.id'))
+ ForeignKey('type_b.id'))
filter = Column(String())
+
def __init__(self, filter_, **kw):
self.filter = filter_
class FilterMixin(object):
+
@declared_attr
def _filters(cls):
return relationship(cls.filter_class,
- cascade='all,delete,delete-orphan')
+ cascade='all,delete,delete-orphan')
@declared_attr
def filters(cls):
@@ -1080,6 +1103,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
TypeA(filters=['foo'])
TypeB(filters=['foo'])
+
class DeclarativeMixinPropertyTest(DeclarativeTestBase):
def test_column_property(self):
@@ -1118,9 +1142,9 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
sess.add_all([m1, m2])
sess.flush()
eq_(sess.query(MyModel).filter(MyModel.prop_hoho == 'foo'
- ).one(), m1)
+ ).one(), m1)
eq_(sess.query(MyOtherModel).filter(MyOtherModel.prop_hoho
- == 'bar').one(), m2)
+ == 'bar').one(), m2)
def test_doc(self):
"""test documentation transfer.
@@ -1198,7 +1222,6 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
ModelTwo.__table__.c.version_id
)
-
def test_deferred(self):
class MyMixin(object):
@@ -1235,8 +1258,8 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
@declared_attr
def target(cls):
return relationship('Target',
- primaryjoin='Target.id==%s.target_id'
- % cls.__name__)
+ primaryjoin='Target.id==%s.target_id'
+ % cls.__name__)
else:
@declared_attr
@@ -1279,7 +1302,199 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
self._test_relationship(True)
+class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_singleton_behavior_within_decl(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ counter(cls)
+ return Column('x', Integer)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def my_other_prop(cls):
+ return column_property(cls.my_prop + 5)
+
+ eq_(counter.mock_calls, [mock.call(A)])
+
+ class B(Base, Mixin):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def my_other_prop(cls):
+ return column_property(cls.my_prop + 5)
+
+ eq_(
+ counter.mock_calls,
+ [mock.call(A), mock.call(B)])
+
+ # this is why we need singleton-per-class behavior. We get
+ # an un-bound "x" column otherwise here, because my_prop() generates
+ # multiple columns.
+ a_col = A.my_other_prop.__clause_element__().element.left
+ b_col = B.my_other_prop.__clause_element__().element.left
+ is_(a_col.table, A.__table__)
+ is_(b_col.table, B.__table__)
+ is_(a_col, A.__table__.c.x)
+ is_(b_col, B.__table__.c.x)
+
+ s = Session()
+ self.assert_compile(
+ s.query(A),
+ "SELECT a.x AS a_x, a.x + :x_1 AS anon_1, a.id AS a_id FROM a"
+ )
+ self.assert_compile(
+ s.query(B),
+ "SELECT b.x AS b_x, b.x + :x_1 AS anon_1, b.id AS b_id FROM b"
+ )
+
+
+ def test_singleton_gc(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ counter(cls.__name__)
+ return Column('x', Integer)
+
+ class A(Base, Mixin):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def my_other_prop(cls):
+ return column_property(cls.my_prop + 5)
+
+ eq_(counter.mock_calls, [mock.call("A")])
+ del A
+ gc_collect()
+ assert "A" not in Base._decl_class_registry
+
+ def test_can_we_access_the_mixin_straight(self):
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ return Column('x', Integer)
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "Unmanaged access of declarative attribute my_prop "
+ "from non-mapped class Mixin",
+ getattr, Mixin, "my_prop"
+ )
+
+ def test_property_noncascade(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ counter(cls)
+ return column_property(cls.x + 2)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ x = Column(Integer)
+
+ class B(A):
+ pass
+
+ eq_(counter.mock_calls, [mock.call(A)])
+
+ def test_property_cascade(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr.cascading
+ def my_prop(cls):
+ counter(cls)
+ return column_property(cls.x + 2)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ x = Column(Integer)
+
+ class B(A):
+ pass
+
+ eq_(counter.mock_calls, [mock.call(A), mock.call(B)])
+
+ def test_column_pre_map(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_col(cls):
+ counter(cls)
+ assert not orm_base._mapper_or_none(cls)
+ return Column('x', Integer)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+
+ eq_(counter.mock_calls, [mock.call(A)])
+
+ def test_mixin_attr_refers_to_column_copies(self):
+ # this @declared_attr can refer to User.id
+ # freely because we now do the "copy column" operation
+ # before the declared_attr is invoked.
+
+ counter = mock.Mock()
+
+ class HasAddressCount(object):
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def address_count(cls):
+ counter(cls.id)
+ return column_property(
+ select([func.count(Address.id)]).
+ where(Address.user_id == cls.id).
+ as_scalar()
+ )
+
+ class Address(Base):
+ __tablename__ = 'address'
+ id = Column(Integer, primary_key=True)
+ user_id = Column(ForeignKey('user.id'))
+
+ class User(Base, HasAddressCount):
+ __tablename__ = 'user'
+
+ eq_(
+ counter.mock_calls,
+ [mock.call(User.id)]
+ )
+
+ sess = Session()
+ self.assert_compile(
+ sess.query(User).having(User.address_count > 5),
+ 'SELECT (SELECT count(address.id) AS '
+ 'count_1 FROM address WHERE address.user_id = "user".id) '
+ 'AS anon_1, "user".id AS user_id FROM "user" '
+ 'HAVING (SELECT count(address.id) AS '
+ 'count_1 FROM address WHERE address.user_id = "user".id) '
+ '> :param_1'
+ )
+
+
class AbstractTest(DeclarativeTestBase):
+
def test_abstract_boolean(self):
class A(Base):
diff --git a/test/ext/declarative/test_reflection.py b/test/ext/declarative/test_reflection.py
index f4bda6995..c7f7bc05d 100644
--- a/test/ext/declarative/test_reflection.py
+++ b/test/ext/declarative/test_reflection.py
@@ -1,7 +1,7 @@
from sqlalchemy.testing import eq_, assert_raises
from sqlalchemy.ext import declarative as decl
from sqlalchemy import testing
-from sqlalchemy import MetaData, Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, \
clear_mappers, \
@@ -10,6 +10,7 @@ from sqlalchemy.testing import fixtures
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.ext.declarative.base import _DeferredMapperConfig
+
class DeclarativeReflectionBase(fixtures.TablesTest):
__requires__ = 'reflectable_autoincrement',
@@ -21,13 +22,14 @@ class DeclarativeReflectionBase(fixtures.TablesTest):
super(DeclarativeReflectionBase, self).teardown()
clear_mappers()
+
class DeclarativeReflectionTest(DeclarativeReflectionBase):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Table(
'addresses',
@@ -37,7 +39,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
Column('email', String(50)),
Column('user_id', Integer, ForeignKey('users.id')),
test_needs_fk=True,
- )
+ )
Table(
'imhandles',
metadata,
@@ -47,8 +49,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
Column('network', String(50)),
Column('handle', String(50)),
test_needs_fk=True,
- )
-
+ )
def test_basic(self):
class User(Base, fixtures.ComparableEntity):
@@ -69,13 +70,14 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_autoincrement=True)
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
@@ -100,13 +102,14 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_autoincrement=True)
u1 = User(nom='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(nom='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(nom='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(nom='u1'))
@@ -131,61 +134,66 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_autoincrement=True)
handles = relationship('IMHandle', backref='user')
- u1 = User(name='u1', handles=[IMHandle(network='blabber',
- handle='foo'), IMHandle(network='lol', handle='zomg'
- )])
+ u1 = User(name='u1', handles=[
+ IMHandle(network='blabber', handle='foo'),
+ IMHandle(network='lol', handle='zomg')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- handles=[IMHandle(network='blabber', handle='foo'),
- IMHandle(network='lol', handle='zomg')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', handles=[IMHandle(network='blabber', handle='foo'),
+ IMHandle(network='lol', handle='zomg')])])
a1 = sess.query(IMHandle).filter(IMHandle.handle == 'zomg'
- ).one()
+ ).one()
eq_(a1, IMHandle(network='lol', handle='zomg'))
eq_(a1.user, User(name='u1'))
+
class DeferredReflectBase(DeclarativeReflectionBase):
+
def teardown(self):
super(DeferredReflectBase, self).teardown()
_DeferredMapperConfig._configs.clear()
Base = None
+
class DeferredReflectPKFKTest(DeferredReflectBase):
+
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- )
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ )
Table("b", metadata,
- Column('id', Integer,
- ForeignKey('a.id'),
- primary_key=True),
- Column('x', Integer, primary_key=True)
- )
+ Column('id', Integer,
+ ForeignKey('a.id'),
+ primary_key=True),
+ Column('x', Integer, primary_key=True)
+ )
def test_pk_fk(self):
class B(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'b'
a = relationship("A")
class A(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'a'
decl.DeferredReflection.prepare(testing.db)
+
class DeferredReflectionTest(DeferredReflectBase):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Table(
'addresses',
@@ -195,7 +203,7 @@ class DeferredReflectionTest(DeferredReflectBase):
Column('email', String(50)),
Column('user_id', Integer, ForeignKey('users.id')),
test_needs_fk=True,
- )
+ )
def _roundtrip(self):
@@ -203,25 +211,26 @@ class DeferredReflectionTest(DeferredReflectBase):
Address = Base._decl_class_registry['Address']
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
def test_basic_deferred(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
addresses = relationship("Address", backref="user")
class Address(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'addresses'
decl.DeferredReflection.prepare(testing.db)
@@ -249,12 +258,12 @@ class DeferredReflectionTest(DeferredReflectBase):
def test_redefine_fk_double(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
addresses = relationship("Address", backref="user")
class Address(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'addresses'
user_id = Column(Integer, ForeignKey('users.id'))
@@ -262,10 +271,11 @@ class DeferredReflectionTest(DeferredReflectBase):
self._roundtrip()
def test_mapper_args_deferred(self):
- """test that __mapper_args__ is not called until *after* table reflection"""
+ """test that __mapper_args__ is not called until *after*
+ table reflection"""
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
@decl.declared_attr
@@ -296,10 +306,11 @@ class DeferredReflectionTest(DeferredReflectBase):
@testing.requires.predictable_gc
def test_cls_not_strong_ref(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
+
class Address(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'addresses'
eq_(len(_DeferredMapperConfig._configs), 2)
del Address
@@ -308,26 +319,28 @@ class DeferredReflectionTest(DeferredReflectBase):
decl.DeferredReflection.prepare(testing.db)
assert not _DeferredMapperConfig._configs
+
class DeferredSecondaryReflectionTest(DeferredReflectBase):
+
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Table('user_items', metadata,
- Column('user_id', ForeignKey('users.id'), primary_key=True),
- Column('item_id', ForeignKey('items.id'), primary_key=True),
- test_needs_fk=True
- )
+ Column('user_id', ForeignKey('users.id'), primary_key=True),
+ Column('item_id', ForeignKey('items.id'), primary_key=True),
+ test_needs_fk=True
+ )
Table('items', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- test_needs_fk=True
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ test_needs_fk=True
+ )
def _roundtrip(self):
@@ -340,8 +353,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
sess.add(u1)
sess.commit()
- eq_(sess.query(User).all(), [User(name='u1',
- items=[Item(name='i1'), Item(name='i2')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', items=[Item(name='i1'), Item(name='i2')])])
def test_string_resolution(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
@@ -359,7 +372,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
__tablename__ = 'users'
- items = relationship("Item", secondary=Table("user_items", Base.metadata))
+ items = relationship("Item",
+ secondary=Table("user_items", Base.metadata))
class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base):
__tablename__ = 'items'
@@ -367,7 +381,9 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
+
class DeferredInhReflectBase(DeferredReflectBase):
+
def _roundtrip(self):
Foo = Base._decl_class_registry['Foo']
Bar = Base._decl_class_registry['Bar']
@@ -392,24 +408,25 @@ class DeferredInhReflectBase(DeferredReflectBase):
]
)
+
class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(32)),
- Column('data', String(30)),
- Column('bar_data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(32)),
+ Column('data', String(30)),
+ Column('bar_data', String(30))
+ )
def test_basic(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__mapper_args__ = {"polymorphic_identity": "bar"}
@@ -419,10 +436,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
def test_add_subclass_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__mapper_args__ = {"polymorphic_identity": "bar"}
@@ -433,10 +450,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
def test_add_pk_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
id = Column(Integer, primary_key=True)
class Bar(Foo):
@@ -445,28 +462,30 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
+
class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
+
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(32)),
- Column('data', String(30)),
- test_needs_fk=True,
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(32)),
+ Column('data', String(30)),
+ test_needs_fk=True,
+ )
Table('bar', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- Column('bar_data', String(30)),
- test_needs_fk=True,
- )
+ Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
+ Column('bar_data', String(30)),
+ test_needs_fk=True,
+ )
def test_basic(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
@@ -477,10 +496,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
def test_add_subclass_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
@@ -492,10 +511,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
def test_add_pk_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
id = Column(Integer, primary_key=True)
class Bar(Foo):
@@ -507,10 +526,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
def test_add_fk_pk_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py
index f24164cb7..0a57b9caa 100644
--- a/test/ext/test_automap.py
+++ b/test/ext/test_automap.py
@@ -1,13 +1,14 @@
-from sqlalchemy.testing import fixtures, eq_
+from sqlalchemy.testing import fixtures
from ..orm._fixtures import FixtureTest
from sqlalchemy.ext.automap import automap_base
-from sqlalchemy.orm import relationship, interfaces, backref
+from sqlalchemy.orm import relationship, interfaces, configure_mappers
from sqlalchemy.ext.automap import generate_relationship
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock
from sqlalchemy import String, Integer, ForeignKey
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
+
class AutomapTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -27,6 +28,7 @@ class AutomapTest(fixtures.MappedTest):
def test_relationship_explicit_override_o2m(self):
Base = automap_base(metadata=self.metadata)
prop = relationship("addresses", collection_class=set)
+
class User(Base):
__tablename__ = 'users'
@@ -44,6 +46,7 @@ class AutomapTest(fixtures.MappedTest):
Base = automap_base(metadata=self.metadata)
prop = relationship("users")
+
class Address(Base):
__tablename__ = 'addresses'
@@ -57,7 +60,6 @@ class AutomapTest(fixtures.MappedTest):
u1 = User(name='u1', address_collection=[a1])
assert a1.users is u1
-
def test_relationship_self_referential(self):
Base = automap_base(metadata=self.metadata)
Base.prepare()
@@ -75,17 +77,19 @@ class AutomapTest(fixtures.MappedTest):
def classname_for_table(base, tablename, table):
return str("cls_" + tablename)
- def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
+ def name_for_scalar_relationship(
+ base, local_cls, referred_cls, constraint):
return "scalar_" + referred_cls.__name__
- def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+ def name_for_collection_relationship(
+ base, local_cls, referred_cls, constraint):
return "coll_" + referred_cls.__name__
Base.prepare(
- classname_for_table=classname_for_table,
- name_for_scalar_relationship=name_for_scalar_relationship,
- name_for_collection_relationship=name_for_collection_relationship
- )
+ classname_for_table=classname_for_table,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship
+ )
User = Base.classes.cls_users
Address = Base.classes.cls_addresses
@@ -113,9 +117,10 @@ class AutomapTest(fixtures.MappedTest):
class Order(Base):
__tablename__ = 'orders'
- items_collection = relationship("items",
- secondary="order_items",
- collection_class=set)
+ items_collection = relationship(
+ "items",
+ secondary="order_items",
+ collection_class=set)
Base.prepare()
Item = Base.classes['items']
@@ -133,41 +138,115 @@ class AutomapTest(fixtures.MappedTest):
Base = automap_base(metadata=self.metadata)
mock = Mock()
- def _gen_relationship(base, direction, return_fn, attrname,
- local_cls, referred_cls, **kw):
+
+ def _gen_relationship(
+ base, direction, return_fn, attrname,
+ local_cls, referred_cls, **kw):
mock(base, direction, attrname)
- return generate_relationship(base, direction, return_fn,
- attrname, local_cls, referred_cls, **kw)
+ return generate_relationship(
+ base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw)
Base.prepare(generate_relationship=_gen_relationship)
assert set(tuple(c[1]) for c in mock.mock_calls).issuperset([
- (Base, interfaces.MANYTOONE, "nodes"),
- (Base, interfaces.MANYTOMANY, "keywords_collection"),
- (Base, interfaces.MANYTOMANY, "items_collection"),
- (Base, interfaces.MANYTOONE, "users"),
- (Base, interfaces.ONETOMANY, "addresses_collection"),
+ (Base, interfaces.MANYTOONE, "nodes"),
+ (Base, interfaces.MANYTOMANY, "keywords_collection"),
+ (Base, interfaces.MANYTOMANY, "items_collection"),
+ (Base, interfaces.MANYTOONE, "users"),
+ (Base, interfaces.ONETOMANY, "addresses_collection"),
])
+class CascadeTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "a", metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ "b", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('aid', ForeignKey('a.id'), nullable=True)
+ )
+ Table(
+ "c", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('aid', ForeignKey('a.id'), nullable=False)
+ )
+ Table(
+ "d", metadata,
+ Column('id', Integer, primary_key=True),
+ Column(
+ 'aid', ForeignKey('a.id', ondelete="cascade"), nullable=False)
+ )
+ Table(
+ "e", metadata,
+ Column('id', Integer, primary_key=True),
+ Column(
+ 'aid', ForeignKey('a.id', ondelete="set null"),
+ nullable=True)
+ )
+
+ def test_o2m_relationship_cascade(self):
+ Base = automap_base(metadata=self.metadata)
+ Base.prepare()
+
+ configure_mappers()
+
+ b_rel = Base.classes.a.b_collection
+ assert not b_rel.property.cascade.delete
+ assert not b_rel.property.cascade.delete_orphan
+ assert not b_rel.property.passive_deletes
+
+ assert b_rel.property.cascade.save_update
+
+ c_rel = Base.classes.a.c_collection
+ assert c_rel.property.cascade.delete
+ assert c_rel.property.cascade.delete_orphan
+ assert not c_rel.property.passive_deletes
+
+ assert c_rel.property.cascade.save_update
+
+ d_rel = Base.classes.a.d_collection
+ assert d_rel.property.cascade.delete
+ assert d_rel.property.cascade.delete_orphan
+ assert d_rel.property.passive_deletes
+
+ assert d_rel.property.cascade.save_update
+
+ e_rel = Base.classes.a.e_collection
+ assert not e_rel.property.cascade.delete
+ assert not e_rel.property.cascade.delete_orphan
+ assert e_rel.property.passive_deletes
+
+ assert e_rel.property.cascade.save_update
+
+
class AutomapInhTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('single', metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(10)),
- test_needs_fk=True
- )
-
- Table('joined_base', metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(10)),
- test_needs_fk=True
- )
-
- Table('joined_inh', metadata,
- Column('id', Integer, ForeignKey('joined_base.id'), primary_key=True),
- test_needs_fk=True
- )
+ Table(
+ 'single', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(10)),
+ test_needs_fk=True
+ )
+
+ Table(
+ 'joined_base', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(10)),
+ test_needs_fk=True
+ )
+
+ Table(
+ 'joined_inh', metadata,
+ Column(
+ 'id', Integer,
+ ForeignKey('joined_base.id'), primary_key=True),
+ test_needs_fk=True
+ )
FixtureTest.define_tables(metadata)
@@ -179,7 +258,8 @@ class AutomapInhTest(fixtures.MappedTest):
type = Column(String)
- __mapper_args__ = {"polymorphic_identity": "u0",
+ __mapper_args__ = {
+ "polymorphic_identity": "u0",
"polymorphic_on": type}
class SubUser1(Single):
@@ -200,14 +280,14 @@ class AutomapInhTest(fixtures.MappedTest):
type = Column(String)
- __mapper_args__ = {"polymorphic_identity": "u0",
+ __mapper_args__ = {
+ "polymorphic_identity": "u0",
"polymorphic_on": type}
class SubJoined(Joined):
__tablename__ = 'joined_inh'
__mapper_args__ = {"polymorphic_identity": "u1"}
-
Base.prepare(engine=testing.db, reflect=True)
assert SubJoined.__mapper__.inherits is Joined.__mapper__
@@ -217,6 +297,9 @@ class AutomapInhTest(fixtures.MappedTest):
def test_conditional_relationship(self):
Base = automap_base()
+
def _gen_relationship(*arg, **kw):
return None
- Base.prepare(engine=testing.db, reflect=True, generate_relationship=_gen_relationship)
+ Base.prepare(
+ engine=testing.db, reflect=True,
+ generate_relationship=_gen_relationship)
diff --git a/test/ext/test_orderinglist.py b/test/ext/test_orderinglist.py
index 3223c8048..0eba137e7 100644
--- a/test/ext/test_orderinglist.py
+++ b/test/ext/test_orderinglist.py
@@ -349,6 +349,28 @@ class OrderingListTest(fixtures.TestBase):
self.assert_(srt.bullets[1].text == 'new 2')
self.assert_(srt.bullets[2].text == '3')
+ def test_replace_two(self):
+ """test #3191"""
+
+ self._setup(ordering_list('position', reorder_on_append=True))
+
+ s1 = Slide('Slide #1')
+
+ b1, b2, b3, b4 = Bullet('1'), Bullet('2'), Bullet('3'), Bullet('4')
+ s1.bullets = [b1, b2, b3]
+
+ eq_(
+ [b.position for b in s1.bullets],
+ [0, 1, 2]
+ )
+
+ s1.bullets = [b4, b2, b1]
+ eq_(
+ [b.position for b in s1.bullets],
+ [0, 1, 2]
+ )
+
+
def test_funky_ordering(self):
class Pos(object):
def __init__(self):
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index be42cce52..dbbe4c435 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -386,7 +386,31 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
]
)
- def test_outer_join(self):
+ def test_of_type_aliased_fromjoinpoint(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employee':relationship(Employee)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company).outerjoin(
+ Company.employee.of_type(Engineer),
+ aliased=True, from_joinpoint=True),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name FROM companies "
+ "LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_outer_join_prop(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
self.classes.Engineer
@@ -407,7 +431,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
"= employees.company_id AND employees.type IN (:type_1)"
)
- def test_outer_join_alias(self):
+ def test_outer_join_prop_alias(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
self.classes.Engineer
@@ -431,6 +455,184 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
)
+ def test_outer_join_literal_onclause(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer).outerjoin(
+ Engineer, Company.company_id == Engineer.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id FROM companies "
+ "LEFT OUTER JOIN employees ON "
+ "companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1)"
+ )
+
+ def test_outer_join_literal_onclause_alias(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ eng_alias = aliased(Engineer)
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, eng_alias).outerjoin(
+ eng_alias, Company.company_id == eng_alias.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees_1.employee_id AS employees_1_employee_id, "
+ "employees_1.name AS employees_1_name, "
+ "employees_1.manager_data AS employees_1_manager_data, "
+ "employees_1.engineer_info AS employees_1_engineer_info, "
+ "employees_1.type AS employees_1_type, "
+ "employees_1.company_id AS employees_1_company_id "
+ "FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_outer_join_no_onclause(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer).outerjoin(
+ Engineer),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id "
+ "FROM companies LEFT OUTER JOIN employees ON "
+ "companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1)"
+ )
+
+ def test_outer_join_no_onclause_alias(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ eng_alias = aliased(Engineer)
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, eng_alias).outerjoin(
+ eng_alias),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees_1.employee_id AS employees_1_employee_id, "
+ "employees_1.name AS employees_1_name, "
+ "employees_1.manager_data AS employees_1_manager_data, "
+ "employees_1.engineer_info AS employees_1_engineer_info, "
+ "employees_1.type AS employees_1_type, "
+ "employees_1.company_id AS employees_1_company_id "
+ "FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_no_aliasing_from_overlap(self):
+ # test [ticket:3233]
+
+ Company, Employee, Engineer, Manager = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer,\
+ self.classes.Manager
+
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employees': relationship(Employee, backref="company")
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+ mapper(Manager, inherits=Employee, polymorphic_identity='manager')
+
+ s = create_session()
+
+ q1 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.employees)
+
+ q2 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.company_id == Manager.company_id)
+
+ q3 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.employees.of_type(Manager))
+
+ q4 = s.query(Engineer).\
+ join(Company, Company.company_id == Engineer.company_id).\
+ join(Manager, Company.employees.of_type(Manager))
+
+ q5 = s.query(Engineer).\
+ join(Company, Company.company_id == Engineer.company_id).\
+ join(Manager, Company.company_id == Manager.company_id)
+
+ # note that the query is incorrect SQL; we JOIN to
+ # employees twice. However, this is what's expected so we seek
+ # to be consistent; previously, aliasing would sneak in due to the
+ # nature of the "left" side.
+ for q in [q1, q2, q3, q4, q5]:
+ self.assert_compile(
+ q,
+ "SELECT employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id "
+ "FROM employees JOIN companies "
+ "ON companies.company_id = employees.company_id "
+ "JOIN employees "
+ "ON companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1) "
+ "WHERE employees.type IN (:type_2)"
+ )
+
def test_relationship_to_subclass(self):
JuniorEngineer, Company, companies, Manager, \
Employee, employees, Engineer = (self.classes.JuniorEngineer,
diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py
index 2bee3cbd6..48faa172f 100644
--- a/test/orm/test_assorted_eager.py
+++ b/test/orm/test_assorted_eager.py
@@ -82,8 +82,8 @@ class EagerTest(fixtures.MappedTest):
mapper(Category, categories)
mapper(Option, options, properties=dict(
- owner=relationship(Owner),
- test=relationship(Thing)))
+ owner=relationship(Owner, viewonly=True),
+ test=relationship(Thing, viewonly=True)))
mapper(Thing, tests, properties=dict(
owner=relationship(Owner, backref='tests'),
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index 46d5f86e5..9c1f7a985 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -2522,6 +2522,53 @@ class ListenerTest(fixtures.ORMTest):
f1.barset.add(b1)
assert f1.barset.pop().data == 'some bar appended'
+ def test_named(self):
+ canary = Mock()
+
+ class Foo(object):
+ pass
+
+ class Bar(object):
+ pass
+
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
+ attributes.register_attribute(
+ Foo, 'data', uselist=False,
+ useobject=False)
+ attributes.register_attribute(
+ Foo, 'barlist', uselist=True,
+ useobject=True)
+
+ event.listen(Foo.data, 'set', canary.set, named=True)
+ event.listen(Foo.barlist, 'append', canary.append, named=True)
+ event.listen(Foo.barlist, 'remove', canary.remove, named=True)
+
+ f1 = Foo()
+ b1 = Bar()
+ f1.data = 5
+ f1.barlist.append(b1)
+ f1.barlist.remove(b1)
+ eq_(
+ canary.mock_calls,
+ [
+ call.set(
+ oldvalue=attributes.NO_VALUE,
+ initiator=attributes.Event(
+ Foo.data.impl, attributes.OP_REPLACE),
+ target=f1, value=5),
+ call.append(
+ initiator=attributes.Event(
+ Foo.barlist.impl, attributes.OP_APPEND),
+ target=f1,
+ value=b1),
+ call.remove(
+ initiator=attributes.Event(
+ Foo.barlist.impl, attributes.OP_REMOVE),
+ target=f1,
+ value=b1)]
+ )
+
def test_collection_link_events(self):
class Foo(object):
pass
@@ -2559,9 +2606,6 @@ class ListenerTest(fixtures.ORMTest):
)
-
-
-
def test_none_on_collection_event(self):
"""test that append/remove of None in collections emits events.
diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py
index 0d869130b..33cd66ebc 100644
--- a/test/orm/test_bind.py
+++ b/test/orm/test_bind.py
@@ -1,14 +1,206 @@
-from sqlalchemy.testing import assert_raises, assert_raises_message
-from sqlalchemy import MetaData, Integer
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy import MetaData, Integer, ForeignKey
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import mapper, create_session
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, eq_, engines, is_
+from sqlalchemy.orm import relationship, Session, backref, sessionmaker
+from test.orm import _fixtures
+from sqlalchemy.testing.mock import Mock
-class BindTest(fixtures.MappedTest):
+class BindIntegrationTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ def test_mapped_binds(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ # ensure tables are unbound
+ m2 = sa.MetaData()
+ users_unbound = users.tometadata(m2)
+ addresses_unbound = addresses.tometadata(m2)
+
+ mapper(Address, addresses_unbound)
+ mapper(User, users_unbound, properties={
+ 'addresses': relationship(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ sess = Session(binds={User: self.metadata.bind,
+ Address: self.metadata.bind})
+
+ u1 = User(id=1, name='ed')
+ sess.add(u1)
+ eq_(sess.query(User).filter(User.id == 1).all(),
+ [User(id=1, name='ed')])
+
+ # test expression binding
+
+ sess.execute(users_unbound.insert(), params=dict(id=2,
+ name='jack'))
+ eq_(sess.execute(users_unbound.select(users_unbound.c.id
+ == 2)).fetchall(), [(2, 'jack')])
+
+ eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
+ [(2, 'jack')])
+
+ sess.execute(users_unbound.delete())
+ eq_(sess.execute(users_unbound.select()).fetchall(), [])
+
+ sess.close()
+
+ def test_table_binds(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ # ensure tables are unbound
+ m2 = sa.MetaData()
+ users_unbound = users.tometadata(m2)
+ addresses_unbound = addresses.tometadata(m2)
+
+ mapper(Address, addresses_unbound)
+ mapper(User, users_unbound, properties={
+ 'addresses': relationship(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ Session = sessionmaker(binds={users_unbound: self.metadata.bind,
+ addresses_unbound: self.metadata.bind})
+ sess = Session()
+
+ u1 = User(id=1, name='ed')
+ sess.add(u1)
+ eq_(sess.query(User).filter(User.id == 1).all(),
+ [User(id=1, name='ed')])
+
+ sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))
+
+ eq_(sess.execute(users_unbound.select(users_unbound.c.id
+ == 2)).fetchall(), [(2, 'jack')])
+
+ eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
+ [(2, 'jack')])
+
+ sess.execute(users_unbound.delete())
+ eq_(sess.execute(users_unbound.select()).fetchall(), [])
+
+ sess.close()
+
+ def test_bind_from_metadata(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+
+ session = create_session()
+ session.execute(users.insert(), dict(name='Johnny'))
+
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 1
+
+ session.execute(users.delete())
+
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 0
+ session.close()
+
+ def test_bind_arguments(self):
+ users, Address, addresses, User = (self.tables.users,
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
+
+ mapper(User, users)
+ mapper(Address, addresses)
+
+ e1 = engines.testing_engine()
+ e2 = engines.testing_engine()
+ e3 = engines.testing_engine()
+
+ sess = Session(e3)
+ sess.bind_mapper(User, e1)
+ sess.bind_mapper(Address, e2)
+
+ assert sess.connection().engine is e3
+ assert sess.connection(bind=e1).engine is e1
+ assert sess.connection(mapper=Address, bind=e1).engine is e1
+ assert sess.connection(mapper=Address).engine is e2
+ assert sess.connection(clause=addresses.select()).engine is e2
+ assert sess.connection(mapper=User,
+ clause=addresses.select()).engine is e1
+ assert sess.connection(mapper=User,
+ clause=addresses.select(),
+ bind=e2).engine is e2
+
+ sess.close()
+
+ @engines.close_open_connections
+ def test_bound_connection(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ c = testing.db.connect()
+ sess = create_session(bind=c)
+ sess.begin()
+ transaction = sess.transaction
+ u = User(name='u1')
+ sess.add(u)
+ sess.flush()
+ assert transaction._connection_for_bind(testing.db) \
+ is transaction._connection_for_bind(c) is c
+
+ assert_raises_message(sa.exc.InvalidRequestError,
+ 'Session already has a Connection '
+ 'associated',
+ transaction._connection_for_bind,
+ testing.db.connect())
+ transaction.rollback()
+ assert len(sess.query(User).all()) == 0
+ sess.close()
+
+ def test_bound_connection_transactional(self):
+ User, users = self.classes.User, self.tables.users
+
+ mapper(User, users)
+ c = testing.db.connect()
+
+ sess = create_session(bind=c, autocommit=False)
+ u = User(name='u1')
+ sess.add(u)
+ sess.flush()
+ sess.close()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 0
+
+ sess = create_session(bind=c, autocommit=False)
+ u = User(name='u2')
+ sess.add(u)
+ sess.flush()
+ sess.commit()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 1
+ c.execute("delete from users")
+ assert c.scalar("select count(1) from users") == 0
+
+ c = testing.db.connect()
+
+ trans = c.begin()
+ sess = create_session(bind=c, autocommit=True)
+ u = User(name='u3')
+ sess.add(u)
+ sess.flush()
+ assert c.in_transaction()
+ trans.commit()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 1
+
+
+class SessionBindTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('test_table', metadata,
@@ -60,3 +252,216 @@ class BindTest(fixtures.MappedTest):
sess.flush)
+class GetBindTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'base_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ 'w_mixin_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ 'joined_sub_table', metadata,
+ Column('id', ForeignKey('base_table.id'), primary_key=True)
+ )
+ Table(
+ 'concrete_sub_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class MixinOne(cls.Basic):
+ pass
+
+ class BaseClass(cls.Basic):
+ pass
+
+ class ClassWMixin(MixinOne, cls.Basic):
+ pass
+
+ class JoinedSubClass(BaseClass):
+ pass
+
+ class ConcreteSubClass(BaseClass):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.ClassWMixin, cls.tables.w_mixin_table)
+ mapper(cls.classes.BaseClass, cls.tables.base_table)
+ mapper(
+ cls.classes.JoinedSubClass,
+ cls.tables.joined_sub_table, inherits=cls.classes.BaseClass)
+ mapper(
+ cls.classes.ConcreteSubClass,
+ cls.tables.concrete_sub_table, inherits=cls.classes.BaseClass,
+ concrete=True)
+
+ def _fixture(self, binds):
+ return Session(binds=binds)
+
+ def test_fallback_table_metadata(self):
+ session = self._fixture({})
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ testing.db
+ )
+
+ def test_bind_base_table_base_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+
+ def test_bind_base_table_joined_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_joined_sub_table_joined_sub_class(self):
+ base_class_bind = Mock(name='base')
+ joined_class_bind = Mock(name='joined')
+ session = self._fixture({
+ self.tables.base_table: base_class_bind,
+ self.tables.joined_sub_table: joined_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ # joined table inheritance has to query based on the base
+ # table, so this is what we expect
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_base_table_concrete_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ testing.db
+ )
+
+ def test_bind_sub_table_concrete_sub_class(self):
+ base_class_bind = Mock(name='base')
+ concrete_sub_bind = Mock(name='concrete')
+
+ session = self._fixture({
+ self.tables.base_table: base_class_bind,
+ self.tables.concrete_sub_table: concrete_sub_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ concrete_sub_bind
+ )
+
+ def test_bind_base_class_base_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+
+ def test_bind_mixin_class_simple_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.MixinOne: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ClassWMixin),
+ base_class_bind
+ )
+
+ def test_bind_base_class_joined_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_joined_sub_class_joined_sub_class(self):
+ base_class_bind = Mock(name='base')
+ joined_class_bind = Mock(name='joined')
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind,
+ self.classes.JoinedSubClass: joined_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ joined_class_bind
+ )
+
+ def test_bind_base_class_concrete_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ base_class_bind
+ )
+
+ def test_bind_sub_class_concrete_sub_class(self):
+ base_class_bind = Mock(name='base')
+ concrete_sub_bind = Mock(name='concrete')
+
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind,
+ self.classes.ConcreteSubClass: concrete_sub_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ concrete_sub_bind
+ )
+
+
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index bd6a17286..e39911d0f 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -1,3 +1,4 @@
+import copy
from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import Integer, String, ForeignKey, Sequence, \
@@ -13,6 +14,7 @@ from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
+
class CascadeArgTest(fixtures.MappedTest):
run_inserts = None
run_create_tables = None
@@ -85,6 +87,12 @@ class CascadeArgTest(fixtures.MappedTest):
orm_util.CascadeOptions("all, delete-orphan"),
frozenset)
+ def test_cascade_deepcopy(self):
+ old = orm_util.CascadeOptions("all, delete-orphan")
+ new = copy.deepcopy(old)
+ eq_(old, new)
+
+
def test_cascade_assignable(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
diff --git a/test/orm/test_collection.py b/test/orm/test_collection.py
index f94c742b3..82331b9af 100644
--- a/test/orm/test_collection.py
+++ b/test/orm/test_collection.py
@@ -2191,6 +2191,23 @@ class InstrumentationTest(fixtures.ORMTest):
f1.attr = l2
eq_(canary, [adapter_1, f1.attr._sa_adapter, None])
+ def test_referenced_by_owner(self):
+
+ class Foo(object):
+ pass
+
+ instrumentation.register_class(Foo)
+ attributes.register_attribute(
+ Foo, 'attr', uselist=True, useobject=True)
+
+ f1 = Foo()
+ f1.attr.append(3)
+
+ adapter = collections.collection_adapter(f1.attr)
+ assert adapter._referenced_by_owner
+
+ f1.attr = []
+ assert not adapter._referenced_by_owner
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index 214b592b5..4c6d9bbe1 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -1253,8 +1253,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
orders=relationship(Order, lazy=False, order_by=orders.c.id),
))
q = create_session().query(User)
- self.l = q.all()
- eq_(self.static.user_all_result, q.order_by(User.id).all())
+ def go():
+ eq_(self.static.user_all_result, q.order_by(User.id).all())
+ self.assert_sql_count(testing.db, go, 1)
def test_against_select(self):
"""test eager loading of a mapper which is against a select"""
diff --git a/test/orm/test_events.py b/test/orm/test_events.py
index e6efd6fb9..904293102 100644
--- a/test/orm/test_events.py
+++ b/test/orm/test_events.py
@@ -112,6 +112,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
mapper(User, users)
canary = self.listen_all(User)
+ named_canary = self.listen_all(User, named=True)
sess = create_session()
u = User(name='u1')
@@ -125,13 +126,15 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
sess.delete(u)
sess.flush()
- eq_(canary,
- ['init', 'before_insert',
- 'after_insert', 'expire',
- 'refresh',
- 'load',
- 'before_update', 'after_update', 'before_delete',
- 'after_delete'])
+ expected = [
+ 'init', 'before_insert',
+ 'after_insert', 'expire',
+ 'refresh',
+ 'load',
+ 'before_update', 'after_update', 'before_delete',
+ 'after_delete']
+ eq_(canary, expected)
+ eq_(named_canary, expected)
def test_insert_before_configured(self):
users, User = self.tables.users, self.classes.User
@@ -1193,6 +1196,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
'before_commit', 'after_commit','after_transaction_end']
)
+
def test_rollback_hook(self):
User, users = self.classes.User, self.tables.users
sess, canary = self._listener_fixture()
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 40bc01b5d..eba47dbec 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -361,6 +361,27 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
)
+class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+ Address = cls.classes.Address
+ users, addresses = (cls.tables.users, cls.tables.addresses)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address),
+ 'ad_syn': synonym("addresses")
+ })
+ mapper(Address, addresses)
+
+ def test_join_on_synonym(self):
+ User = self.classes.User
+ self.assert_compile(
+ Session().query(User).join(User.ad_syn),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users JOIN addresses ON users.id = addresses.user_id"
+ )
+
+
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -409,24 +430,6 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
sess.query(literal_column('x'), User).join, Address
)
- def test_join_on_synonym(self):
-
- class User(object):
- pass
- class Address(object):
- pass
- users, addresses = (self.tables.users, self.tables.addresses)
- mapper(User, users, properties={
- 'addresses':relationship(Address),
- 'ad_syn':synonym("addresses")
- })
- mapper(Address, addresses)
- self.assert_compile(
- Session().query(User).join(User.ad_syn),
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users JOIN addresses ON users.id = addresses.user_id"
- )
-
def test_multi_tuple_form(self):
"""test the 'tuple' form of join, now superseded
by the two-element join() form.
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 1c5fca144..354bbe5b1 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -1236,7 +1236,7 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
run_setup_mappers = 'each'
- def _fixture(self, label=True):
+ def _fixture(self, label=True, polymorphic=False):
User, Address = self.classes("User", "Address")
users, addresses = self.tables("users", "addresses")
stmt = select([func.max(addresses.c.email_address)]).\
@@ -1247,7 +1247,7 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, properties={
"ead": column_property(stmt)
- })
+ }, with_polymorphic="*" if polymorphic else None)
mapper(Address, addresses)
def test_order_by_column_prop_string(self):
@@ -1355,6 +1355,22 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"users AS users_1 ORDER BY email_ad, anon_1"
)
+ def test_order_by_column_labeled_prop_attr_aliased_four(self):
+ User = self.classes.User
+ self._fixture(label=True, polymorphic=True)
+
+ ua = aliased(User)
+ s = Session()
+ q = s.query(ua, User.id).order_by(ua.ead)
+ self.assert_compile(
+ q,
+ "SELECT (SELECT max(addresses.email_address) AS max_1 FROM "
+ "addresses WHERE addresses.user_id = users_1.id) AS anon_1, "
+ "users_1.id AS users_1_id, users_1.name AS users_1_name, "
+ "users.id AS users_id FROM users AS users_1, users ORDER BY anon_1"
+ )
+
+
def test_order_by_column_unlabeled_prop_attr_aliased_one(self):
User = self.classes.User
self._fixture(label=False)
@@ -2467,6 +2483,8 @@ class YieldTest(_fixtures.FixtureTest):
class HintsTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
def test_hints(self):
User = self.classes.User
@@ -2502,6 +2520,28 @@ class HintsTest(QueryTest, AssertsCompiledSQL):
"ON users_1.id > users.id", dialect=dialect
)
+ def test_statement_hints(self):
+ User = self.classes.User
+
+ sess = create_session()
+ stmt = sess.query(User).\
+ with_statement_hint("test hint one").\
+ with_statement_hint("test hint two").\
+ with_statement_hint("test hint three", "postgresql")
+
+ self.assert_compile(
+ stmt,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users test hint one test hint two",
+ )
+
+ self.assert_compile(
+ stmt,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users test hint one test hint two test hint three",
+ dialect='postgresql'
+ )
+
class TextTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/orm/test_rel_fn.py b/test/orm/test_rel_fn.py
index f0aa538f4..150b59b75 100644
--- a/test/orm/test_rel_fn.py
+++ b/test/orm/test_rel_fn.py
@@ -242,6 +242,22 @@ class _JoinFixtures(object):
**kw
)
+ def _join_fixture_o2m_composite_selfref_func_remote_side(self, **kw):
+ return relationships.JoinCondition(
+ self.composite_selfref,
+ self.composite_selfref,
+ self.composite_selfref,
+ self.composite_selfref,
+ primaryjoin=and_(
+ self.composite_selfref.c.group_id ==
+ func.foo(self.composite_selfref.c.group_id),
+ self.composite_selfref.c.parent_id ==
+ self.composite_selfref.c.id
+ ),
+ remote_side=set([self.composite_selfref.c.parent_id]),
+ **kw
+ )
+
def _join_fixture_o2m_composite_selfref_func_annotated(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
@@ -729,6 +745,10 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase,
self._join_fixture_o2m_composite_selfref_func
)
+ def test_determine_local_remote_pairs_o2m_composite_selfref_func_rs(self):
+ # no warning
+ self._join_fixture_o2m_composite_selfref_func_remote_side()
+
def test_determine_local_remote_pairs_o2m_overlap_func_warning(self):
self._assert_non_simple_warning(
self._join_fixture_m2o_sub_to_joined_sub_func
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 6bcb02639..2a15ce666 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -5,20 +5,22 @@ from sqlalchemy import testing
from sqlalchemy import Integer, String, ForeignKey, MetaData, and_
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, relation, \
- backref, create_session, configure_mappers, \
- clear_mappers, sessionmaker, attributes,\
- Session, composite, column_property, foreign,\
- remote, synonym, joinedload, subqueryload
-from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
+ backref, create_session, configure_mappers, \
+ clear_mappers, sessionmaker, attributes,\
+ Session, composite, column_property, foreign,\
+ remote, synonym, joinedload, subqueryload
+from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE
from sqlalchemy.testing import eq_, startswith_, AssertsCompiledSQL, is_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import exc
from sqlalchemy import inspect
+
class _RelationshipErrors(object):
+
def _assert_raises_no_relevant_fks(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
sa.exc.ArgumentError,
"Could not locate any relevant foreign key columns "
@@ -33,7 +35,7 @@ class _RelationshipErrors(object):
)
def _assert_raises_no_equality(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
sa.exc.ArgumentError,
"Could not locate any simple equality expressions "
@@ -50,7 +52,7 @@ class _RelationshipErrors(object):
)
def _assert_raises_ambig_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.ArgumentError,
@@ -78,7 +80,7 @@ class _RelationshipErrors(object):
fn, *arg, **kw)
def _assert_raises_no_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.NoForeignKeysError,
@@ -86,7 +88,8 @@ class _RelationshipErrors(object):
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables "
"via secondary table '%s'. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated with a "
+ "ForeignKey "
"or ForeignKeyConstraint, or specify 'primaryjoin' and "
"'secondaryjoin' expressions"
% (relname, secondary_arg),
@@ -97,7 +100,8 @@ class _RelationshipErrors(object):
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated with a "
+ "ForeignKey "
"or ForeignKeyConstraint, or specify a 'primaryjoin' "
"expression."
% (relname,),
@@ -125,12 +129,16 @@ class _RelationshipErrors(object):
"pairs based on join condition and remote_side arguments. "
r"Consider using the remote\(\) annotation to "
"accurately mark those elements of the join "
- "condition that are on the remote side of the relationship." % relname,
+ "condition that are on the remote side of the relationship." % (
+ relname
+ ),
fn, *arg, **kw
)
+
class DependencyTwoParentTest(fixtures.MappedTest):
+
"""Test flush() when a mapper is dependent on multiple relationships"""
run_setup_mappers = 'once'
@@ -140,74 +148,77 @@ class DependencyTwoParentTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tbl_a", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(128)))
Table("tbl_b", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(128)))
Table("tbl_c", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
- nullable=False),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
+ nullable=False),
+ Column("name", String(128)))
Table("tbl_d", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
- nullable=False),
- Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
+ nullable=False),
+ Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
+ Column("name", String(128)))
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
+
class C(cls.Basic):
pass
+
class D(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
A, C, B, D, tbl_b, tbl_c, tbl_a, tbl_d = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.D,
- cls.tables.tbl_b,
- cls.tables.tbl_c,
- cls.tables.tbl_a,
- cls.tables.tbl_d)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.D,
+ cls.tables.tbl_b,
+ cls.tables.tbl_c,
+ cls.tables.tbl_a,
+ cls.tables.tbl_d)
mapper(A, tbl_a, properties=dict(
c_rows=relationship(C, cascade="all, delete-orphan",
- backref="a_row")))
+ backref="a_row")))
mapper(B, tbl_b)
mapper(C, tbl_c, properties=dict(
d_rows=relationship(D, cascade="all, delete-orphan",
- backref="c_row")))
+ backref="c_row")))
mapper(D, tbl_d, properties=dict(
b_row=relationship(B)))
@classmethod
def insert_data(cls):
A, C, B, D = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.D)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.D)
session = create_session()
a = A(name='a1')
b = B(name='b1')
c = C(name='c1', a_row=a)
- d1 = D(name='d1', b_row=b, c_row=c)
- d2 = D(name='d2', b_row=b, c_row=c)
- d3 = D(name='d3', b_row=b, c_row=c)
+ d1 = D(name='d1', b_row=b, c_row=c) # noqa
+ d2 = D(name='d2', b_row=b, c_row=c) # noqa
+ d3 = D(name='d3', b_row=b, c_row=c) # noqa
session.add(a)
session.add(b)
session.flush()
@@ -230,7 +241,9 @@ class DependencyTwoParentTest(fixtures.MappedTest):
session.delete(c)
session.flush()
+
class M2ODontOverwriteFKTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table(
@@ -248,13 +261,13 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
class A(fixtures.BasicEntity):
pass
+
class B(fixtures.BasicEntity):
pass
-
mapper(A, a, properties={
- 'b': relationship(B, uselist=uselist)
- })
+ 'b': relationship(B, uselist=uselist)
+ })
mapper(B, b)
return A, B
@@ -271,7 +284,6 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
sess.commit()
# test that was broken by #3060
- from sqlalchemy.orm import joinedload
a1 = sess.query(A).options(joinedload("b")).first()
a1.bid = b1.id
sess.flush()
@@ -340,8 +352,8 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
assert a1.bid is not None
-
class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
+
"""Tests the ultimate join condition, a single column
that points to itself, e.g. within a SQL function or similar.
The test is against a materialized path setup.
@@ -365,28 +377,28 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('entity', metadata,
- Column('path', String(100), primary_key=True)
- )
+ Column('path', String(100), primary_key=True)
+ )
@classmethod
def setup_classes(cls):
class Entity(cls.Basic):
+
def __init__(self, path):
self.path = path
-
def _descendants_fixture(self, data=True):
Entity = self.classes.Entity
entity = self.tables.entity
m = mapper(Entity, entity, properties={
- "descendants": relationship(Entity,
- primaryjoin=
- remote(foreign(entity.c.path)).like(
- entity.c.path.concat('/%')),
- viewonly=True,
- order_by=entity.c.path)
- })
+ "descendants": relationship(
+ Entity,
+ primaryjoin=remote(foreign(entity.c.path)).like(
+ entity.c.path.concat('/%')),
+ viewonly=True,
+ order_by=entity.c.path)
+ })
configure_mappers()
assert m.get_property("descendants").direction is ONETOMANY
if data:
@@ -397,13 +409,13 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
entity = self.tables.entity
m = mapper(Entity, entity, properties={
- "anscestors": relationship(Entity,
- primaryjoin=
- entity.c.path.like(
- remote(foreign(entity.c.path)).concat('/%')),
- viewonly=True,
- order_by=entity.c.path)
- })
+ "anscestors": relationship(
+ Entity,
+ primaryjoin=entity.c.path.like(
+ remote(foreign(entity.c.path)).concat('/%')),
+ viewonly=True,
+ order_by=entity.c.path)
+ })
configure_mappers()
assert m.get_property("anscestors").direction is ONETOMANY
if data:
@@ -447,7 +459,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._descendants_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo").\
- options(joinedload(Entity.descendants)).first()
+ options(joinedload(Entity.descendants)).first()
eq_(
[e.path for e in e1.descendants],
@@ -459,7 +471,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._descendants_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo").\
- options(subqueryload(Entity.descendants)).first()
+ options(subqueryload(Entity.descendants)).first()
eq_(
[e.path for e in e1.descendants],
@@ -471,7 +483,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._anscestors_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo/bar2/bat1").\
- options(joinedload(Entity.anscestors)).first()
+ options(joinedload(Entity.anscestors)).first()
eq_(
[e.path for e in e1.anscestors],
["/foo", "/foo/bar2"]
@@ -488,8 +500,8 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
)
-
class CompositeSelfRefFKTest(fixtures.MappedTest):
+
"""Tests a composite FK where, in
the relationship(), one col points
to itself in the same table.
@@ -515,7 +527,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('company_t', metadata,
Column('company_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('name', String(30)))
Table('employee_t', metadata,
@@ -533,10 +545,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
@classmethod
def setup_classes(cls):
class Company(cls.Basic):
+
def __init__(self, name):
self.name = name
class Employee(cls.Basic):
+
def __init__(self, name, company, emp_id, reports_to=None):
self.name = name
self.company = company
@@ -545,116 +559,202 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
def test_explicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company,
- primaryjoin=employee_t.c.company_id==
- company_t.c.company_id,
- backref='employees'),
- 'reports_to':relationship(Employee, primaryjoin=
- sa.and_(
- employee_t.c.emp_id==employee_t.c.reports_to_id,
- employee_t.c.company_id==employee_t.c.company_id
- ),
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company,
+ primaryjoin=employee_t.c.company_id ==
+ company_t.c.company_id,
+ backref='employees'),
+ 'reports_to': relationship(Employee, primaryjoin=sa.and_(
+ employee_t.c.emp_id == employee_t.c.reports_to_id,
+ employee_t.c.company_id == employee_t.c.company_id
+ ),
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
- foreign_keys=[employee_t.c.reports_to_id, employee_t.c.company_id],
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id],
backref=backref('employees',
- foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id]))
+ foreign_keys=[employee_t.c.reports_to_id,
+ employee_t.c.company_id]))
})
self._test()
def test_implicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id],
- backref=backref('employees', foreign_keys=
- [employee_t.c.reports_to_id, employee_t.c.company_id])
- )
+ employee_t.c.company_id],
+ backref=backref(
+ 'employees',
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id])
+ )
})
self._test()
def test_very_implicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
backref='employees'
- )
+ )
})
self._test()
def test_very_explicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
- _local_remote_pairs = [
- (employee_t.c.reports_to_id, employee_t.c.emp_id),
- (employee_t.c.company_id, employee_t.c.company_id)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ _local_remote_pairs=[
+ (employee_t.c.reports_to_id, employee_t.c.emp_id),
+ (employee_t.c.company_id, employee_t.c.company_id)
],
- foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id],
- backref=backref('employees', foreign_keys=
- [employee_t.c.reports_to_id, employee_t.c.company_id])
- )
+ foreign_keys=[
+ employee_t.c.reports_to_id,
+ employee_t.c.company_id],
+ backref=backref(
+ 'employees',
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id])
+ )
})
self._test()
def test_annotated(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
primaryjoin=sa.and_(
- remote(employee_t.c.emp_id)==employee_t.c.reports_to_id,
- remote(employee_t.c.company_id)==employee_t.c.company_id
+ remote(employee_t.c.emp_id) == employee_t.c.reports_to_id,
+ remote(employee_t.c.company_id) == employee_t.c.company_id
),
backref=backref('employees')
- )
+ )
})
self._test()
+ def test_overlapping_warning(self):
+ Employee, Company, employee_t, company_t = (self.classes.Employee,
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
+
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ primaryjoin=sa.and_(
+ remote(employee_t.c.emp_id) == employee_t.c.reports_to_id,
+ remote(employee_t.c.company_id) == employee_t.c.company_id
+ ),
+ backref=backref('employees')
+ )
+ })
+
+ assert_raises_message(
+ exc.SAWarning,
+ r"relationship .* will copy column .* to column "
+ "employee_t.company_id, which conflicts with relationship\(s\)",
+ configure_mappers
+ )
+
+ def test_annotated_no_overwriting(self):
+ Employee, Company, employee_t, company_t = (self.classes.Employee,
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
+
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ primaryjoin=sa.and_(
+ remote(employee_t.c.emp_id) ==
+ foreign(employee_t.c.reports_to_id),
+ remote(employee_t.c.company_id) == employee_t.c.company_id
+ ),
+ backref=backref('employees')
+ )
+ })
+
+ self._test_no_warning()
+
+ def _test_no_overwrite(self, sess, expect_failure):
+ # test [ticket:3230]
+
+ Employee, Company = self.classes.Employee, self.classes.Company
+
+ c1 = sess.query(Company).filter_by(name='c1').one()
+ e3 = sess.query(Employee).filter_by(name='emp3').one()
+ e3.reports_to = None
+
+ if expect_failure:
+ # if foreign() isn't applied specifically to
+ # employee_t.c.reports_to_id only, then
+ # employee_t.c.company_id goes foreign as well and then
+ # this happens
+ assert_raises_message(
+ AssertionError,
+ "Dependency rule tried to blank-out primary key column "
+ "'employee_t.company_id'",
+ sess.flush
+ )
+ else:
+ sess.flush()
+ eq_(e3.company, c1)
+
+ @testing.emits_warning("relationship .* will copy column ")
def _test(self):
+ self._test_no_warning(overwrites=True)
+
+ def _test_no_warning(self, overwrites=False):
self._test_relationships()
sess = Session()
self._setup_data(sess)
self._test_lazy_relations(sess)
self._test_join_aliasing(sess)
+ self._test_no_overwrite(sess, expect_failure=overwrites)
def _test_relationships(self):
configure_mappers()
@@ -665,7 +765,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.emp_id, employee_t.c.reports_to_id),
- ])
+ ])
)
eq_(
Employee.employees.property.remote_side,
@@ -676,7 +776,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.reports_to_id, employee_t.c.emp_id),
- ])
+ ])
)
def _setup_data(self, sess):
@@ -686,12 +786,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
c2 = Company('c2')
e1 = Employee('emp1', c1, 1)
- e2 = Employee('emp2', c1, 2, e1)
+ e2 = Employee('emp2', c1, 2, e1) # noqa
e3 = Employee('emp3', c1, 3, e1)
- e4 = Employee('emp4', c1, 4, e3)
+ e4 = Employee('emp4', c1, 4, e3) # noqa
e5 = Employee('emp5', c2, 1)
- e6 = Employee('emp6', c2, 2, e5)
- e7 = Employee('emp7', c2, 3, e5)
+ e6 = Employee('emp6', c2, 2, e5) # noqa
+ e7 = Employee('emp7', c2, 3, e5) # noqa
sess.add_all((c1, c2))
sess.commit()
@@ -711,64 +811,66 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
assert test_e5.name == 'emp5', test_e5.name
assert [x.name for x in test_e1.employees] == ['emp2', 'emp3']
assert sess.query(Employee).\
- get([c1.company_id, 3]).reports_to.name == 'emp1'
+ get([c1.company_id, 3]).reports_to.name == 'emp1'
assert sess.query(Employee).\
- get([c2.company_id, 3]).reports_to.name == 'emp5'
+ get([c2.company_id, 3]).reports_to.name == 'emp5'
def _test_join_aliasing(self, sess):
Employee, Company = self.classes.Employee, self.classes.Company
eq_(
- [n for n, in sess.query(Employee.name).\
- join(Employee.reports_to, aliased=True).\
- filter_by(name='emp5').\
- reset_joinpoint().\
- order_by(Employee.name)],
+ [n for n, in sess.query(Employee.name).
+ join(Employee.reports_to, aliased=True).
+ filter_by(name='emp5').
+ reset_joinpoint().
+ order_by(Employee.name)],
['emp6', 'emp7']
)
-
class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
+
@classmethod
def define_tables(cls, metadata):
Table("parent", metadata,
- Column('x', Integer, primary_key=True),
- Column('y', Integer, primary_key=True),
- Column('z', Integer),
- )
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True),
+ Column('z', Integer),
+ )
Table("child", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x', Integer),
- Column('y', Integer),
- Column('z', Integer),
- # note 'z' is not here
- sa.ForeignKeyConstraint(
- ["x", "y"],
- ["parent.x", "parent.y"]
- )
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('z', Integer),
+ # note 'z' is not here
+ sa.ForeignKeyConstraint(
+ ["x", "y"],
+ ["parent.x", "parent.y"]
+ )
+ )
+
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
+
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mapper(Parent, parent, properties={
- 'children':relationship(Child, primaryjoin=and_(
- parent.c.x==child.c.x,
- parent.c.y==child.c.y,
- parent.c.z==child.c.z,
+ 'children': relationship(Child, primaryjoin=and_(
+ parent.c.x == child.c.x,
+ parent.c.y == child.c.y,
+ parent.c.z == child.c.z,
))
})
mapper(Child, child)
def test_joins_fully(self):
Parent, Child = self.classes.Parent, self.classes.Child
- s = Session()
+
self.assert_compile(
Parent.children.property.strategy._lazywhere,
":param_1 = child.x AND :param_2 = child.y AND :param_3 = child.z"
@@ -776,19 +878,20 @@ class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
class SynonymsAsFKsTest(fixtures.MappedTest):
+
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column("foo",Integer,),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("foo", Integer,),
test_needs_fk=True)
- Table("tableB",metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
+ Table("tableB", metadata,
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("_a_id", Integer, key='a_id', primary_key=True),
test_needs_fk=True)
@@ -798,6 +901,7 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
pass
class B(cls.Basic):
+
@property
def a_id(self):
return self._a_id
@@ -832,18 +936,19 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
class FKsAsPksTest(fixtures.MappedTest):
+
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column("foo",Integer,),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("foo", Integer,),
test_needs_fk=True)
- Table("tableB",metadata,
- Column("id",Integer,ForeignKey("tableA.id"),primary_key=True),
+ Table("tableB", metadata,
+ Column("id", Integer, ForeignKey("tableA.id"), primary_key=True),
test_needs_fk=True)
@classmethod
@@ -863,9 +968,8 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
mapper(A, tableA, properties={
- 'b':relationship(B, cascade="all,delete-orphan", uselist=False)})
+ 'b': relationship(B, cascade="all,delete-orphan", uselist=False)})
mapper(B, tableB)
configure_mappers()
@@ -890,7 +994,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade="save-update")})
+ 'bs': relationship(B, cascade="save-update")})
mapper(B, tableB)
a1 = A()
@@ -915,7 +1019,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(B, tableB, properties={
- 'a':relationship(A, cascade="save-update")})
+ 'a': relationship(A, cascade="save-update")})
mapper(A, tableA)
b1 = B()
@@ -938,7 +1042,8 @@ class FKsAsPksTest(fixtures.MappedTest):
A, tableA = self.classes.A, self.tables.tableA
# postgresql cant handle a nullable PK column...?
- tableC = Table('tablec', tableA.metadata,
+ tableC = Table(
+ 'tablec', tableA.metadata,
Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('tableA.id'),
primary_key=True, autoincrement=False, nullable=True))
@@ -947,7 +1052,7 @@ class FKsAsPksTest(fixtures.MappedTest):
class C(fixtures.BasicEntity):
pass
mapper(C, tableC, properties={
- 'a':relationship(A, cascade="save-update")
+ 'a': relationship(A, cascade="save-update")
})
mapper(A, tableA)
@@ -968,12 +1073,11 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(B, tableB, properties={
- 'a':relationship(A, cascade=cascade, single_parent=True)
+ 'a': relationship(A, cascade=cascade, single_parent=True)
})
mapper(A, tableA)
@@ -999,12 +1103,11 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade=cascade)
+ 'bs': relationship(B, cascade=cascade)
})
mapper(B, tableB)
@@ -1029,7 +1132,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade="none")})
+ 'bs': relationship(B, cascade="none")})
mapper(B, tableB)
a1 = A()
@@ -1054,7 +1157,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(B, tableB, properties={
- 'a':relationship(A, cascade="none")})
+ 'a': relationship(A, cascade="none")})
mapper(A, tableA)
b1 = B()
@@ -1070,39 +1173,42 @@ class FKsAsPksTest(fixtures.MappedTest):
assert a1 not in sess
assert b1 not in sess
+
class UniqueColReferenceSwitchTest(fixtures.MappedTest):
+
"""test a relationship based on a primary
join against a unique non-pk column"""
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("ident", String(10), nullable=False,
- unique=True),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("ident", String(10), nullable=False,
+ unique=True),
+ )
Table("table_b", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("a_ident", String(10),
- ForeignKey('table_a.ident'),
- nullable=False),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_ident", String(10),
+ ForeignKey('table_a.ident'),
+ nullable=False),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
def test_switch_parent(self):
A, B, table_b, table_a = (self.classes.A,
- self.classes.B,
- self.tables.table_b,
- self.tables.table_a)
+ self.classes.B,
+ self.tables.table_b,
+ self.tables.table_a)
mapper(A, table_a)
mapper(B, table_b, properties={"a": relationship(A, backref="bs")})
@@ -1122,7 +1228,9 @@ class UniqueColReferenceSwitchTest(fixtures.MappedTest):
session.delete(a1)
session.flush()
+
class RelationshipToSelectableTest(fixtures.MappedTest):
+
"""Test a map to a select that relates to a map to the table."""
@classmethod
@@ -1142,33 +1250,40 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
class Container(fixtures.BasicEntity):
pass
+
class LineItem(fixtures.BasicEntity):
pass
container_select = sa.select(
[items.c.policyNum, items.c.policyEffDate, items.c.type],
distinct=True,
- ).alias('container_select')
+ ).alias('container_select')
mapper(LineItem, items)
- mapper(Container,
- container_select,
- order_by=sa.asc(container_select.c.type),
- properties=dict(
- lineItems=relationship(LineItem,
- lazy='select',
- cascade='all, delete-orphan',
- order_by=sa.asc(items.c.id),
- primaryjoin=sa.and_(
- container_select.c.policyNum==items.c.policyNum,
- container_select.c.policyEffDate==
- items.c.policyEffDate,
- container_select.c.type==items.c.type),
- foreign_keys=[
- items.c.policyNum,
- items.c.policyEffDate,
- items.c.type])))
+ mapper(
+ Container,
+ container_select,
+ order_by=sa.asc(container_select.c.type),
+ properties=dict(
+ lineItems=relationship(
+ LineItem,
+ lazy='select',
+ cascade='all, delete-orphan',
+ order_by=sa.asc(items.c.id),
+ primaryjoin=sa.and_(
+ container_select.c.policyNum == items.c.policyNum,
+ container_select.c.policyEffDate ==
+ items.c.policyEffDate,
+ container_select.c.type == items.c.type),
+ foreign_keys=[
+ items.c.policyNum,
+ items.c.policyEffDate,
+ items.c.type
+ ]
+ )
+ )
+ )
session = create_session()
con = Container()
@@ -1189,7 +1304,9 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
for old, new in zip(con.lineItems, newcon.lineItems):
eq_(old.id, new.id)
+
class FKEquatedToConstantTest(fixtures.MappedTest):
+
"""test a relationship with a non-column entity in the primary join,
is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
@@ -1199,31 +1316,32 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('tags', metadata, Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("data", String(50)),
- )
+ test_needs_autoincrement=True),
+ Column("data", String(50)),
+ )
Table('tag_foo', metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('tagid', Integer),
- Column("data", String(50)),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('tagid', Integer),
+ Column("data", String(50)),
+ )
def test_basic(self):
tag_foo, tags = self.tables.tag_foo, self.tables.tags
class Tag(fixtures.ComparableEntity):
pass
+
class TagInstance(fixtures.ComparableEntity):
pass
mapper(Tag, tags, properties={
- 'foo':relationship(TagInstance,
- primaryjoin=sa.and_(tag_foo.c.data=='iplc_case',
- tag_foo.c.tagid==tags.c.id),
- foreign_keys=[tag_foo.c.tagid, tag_foo.c.data],
- ),
+ 'foo': relationship(
+ TagInstance,
+ primaryjoin=sa.and_(tag_foo.c.data == 'iplc_case',
+ tag_foo.c.tagid == tags.c.id),
+ foreign_keys=[tag_foo.c.tagid, tag_foo.c.data]),
})
mapper(TagInstance, tag_foo)
@@ -1248,41 +1366,43 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
[TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')]
)
+
class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50))
+ )
Table('addresses', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('user_id', Integer),
- Column('email', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer),
+ Column('email', String(50))
+ )
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
def test_backref(self):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
-
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- primaryjoin=addresses.c.user_id==users.c.id,
- foreign_keys=addresses.c.user_id,
- backref='user')
+ 'addresses': relationship(
+ Address,
+ primaryjoin=addresses.c.user_id == users.c.id,
+ foreign_keys=addresses.c.user_id,
+ backref='user')
})
mapper(Address, addresses)
@@ -1292,9 +1412,11 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
sess.commit()
eq_(sess.query(Address).all(), [
Address(email='a1', user=User(name='u1'))
- ])
+ ])
+
class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
+
"""test ambiguous joins due to FKs on both sides treated as
self-referential.
@@ -1307,25 +1429,28 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- subscriber_table = Table('subscriber', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- )
-
- address_table = Table('address',
- metadata,
- Column('subscriber_id', Integer,
- ForeignKey('subscriber.id'), primary_key=True),
- Column('type', String(1), primary_key=True),
- )
+ Table(
+ 'subscriber', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
+
+ Table(
+ 'address', metadata,
+ Column(
+ 'subscriber_id', Integer,
+ ForeignKey('subscriber.id'), primary_key=True),
+ Column('type', String(1), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
subscriber, address = cls.tables.subscriber, cls.tables.address
- subscriber_and_address = subscriber.join(address,
- and_(address.c.subscriber_id==subscriber.c.id,
- address.c.type.in_(['A', 'B', 'C'])))
+ subscriber_and_address = subscriber.join(
+ address,
+ and_(address.c.subscriber_id == subscriber.c.id,
+ address.c.type.in_(['A', 'B', 'C'])))
class Address(cls.Comparable):
pass
@@ -1336,10 +1461,10 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
mapper(Address, address)
mapper(Subscriber, subscriber_and_address, properties={
- 'id':[subscriber.c.id, address.c.subscriber_id],
- 'addresses' : relationship(Address,
- backref=backref("customer"))
- })
+ 'id': [subscriber.c.id, address.c.subscriber_id],
+ 'addresses': relationship(Address,
+ backref=backref("customer"))
+ })
def test_mapping(self):
Subscriber, Address = self.classes.Subscriber, self.classes.Address
@@ -1349,11 +1474,11 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
assert Address.customer.property.direction is MANYTOONE
s1 = Subscriber(type='A',
- addresses = [
- Address(type='D'),
- Address(type='E'),
- ]
- )
+ addresses=[
+ Address(type='D'),
+ Address(type='E'),
+ ]
+ )
a1 = Address(type='B', customer=Subscriber(type='C'))
assert s1.addresses[0].customer is s1
@@ -1375,22 +1500,23 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
class ManualBackrefTest(_fixtures.FixtureTest):
+
"""Test explicit relationships that are backrefs to each other."""
run_inserts = None
def test_o2m(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='user')
+ 'addresses': relationship(Address, back_populates='user')
})
mapper(Address, addresses, properties={
- 'user':relationship(User, back_populates='addresses')
+ 'user': relationship(User, back_populates='addresses')
})
sess = create_session()
@@ -1409,52 +1535,56 @@ class ManualBackrefTest(_fixtures.FixtureTest):
def test_invalid_key(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='userr')
+ 'addresses': relationship(Address, back_populates='userr')
})
mapper(Address, addresses, properties={
- 'user':relationship(User, back_populates='addresses')
+ 'user': relationship(User, back_populates='addresses')
})
assert_raises(sa.exc.InvalidRequestError, configure_mappers)
def test_invalid_target(self):
- addresses, Dingaling, User, dingalings, Address, users = (self.tables.addresses,
- self.classes.Dingaling,
- self.classes.User,
- self.tables.dingalings,
- self.classes.Address,
- self.tables.users)
+ addresses, Dingaling, User, dingalings, Address, users = (
+ self.tables.addresses,
+ self.classes.Dingaling,
+ self.classes.User,
+ self.tables.dingalings,
+ self.classes.Address,
+ self.tables.users)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='dingaling'),
+ 'addresses': relationship(Address, back_populates='dingaling'),
})
mapper(Dingaling, dingalings)
mapper(Address, addresses, properties={
- 'dingaling':relationship(Dingaling)
+ 'dingaling': relationship(Dingaling)
})
assert_raises_message(sa.exc.ArgumentError,
- r"reverse_property 'dingaling' on relationship "
- "User.addresses references "
- "relationship Address.dingaling, which does not "
- "reference mapper Mapper\|User\|users",
- configure_mappers)
+ r"reverse_property 'dingaling' on relationship "
+ "User.addresses references "
+ "relationship Address.dingaling, which does not "
+ "reference mapper Mapper\|User\|users",
+ configure_mappers)
+
class JoinConditionErrorTest(fixtures.TestBase):
def test_clauseelement_pj(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
+
class C2(Base):
__tablename__ = 'c2'
id = Column('id', Integer, primary_key=True)
@@ -1466,39 +1596,42 @@ class JoinConditionErrorTest(fixtures.TestBase):
def test_clauseelement_pj_false(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
+
class C2(Base):
__tablename__ = 'c2'
id = Column('id', Integer, primary_key=True)
c1id = Column('c1id', Integer, ForeignKey('c1.id'))
- c2 = relationship(C1, primaryjoin="x"=="y")
+ c2 = relationship(C1, primaryjoin="x" == "y")
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_only_column_elements(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('t2.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('t2.id')),
+ )
t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
+
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2,
- primaryjoin=t1.join(t2))})
+ mapper(C1, t1, properties={
+ 'c2': relationship(C2, primaryjoin=t1.join(t2))})
mapper(C2, t2)
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_invalid_string_args(self):
from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy import util
for argname, arg in [
('remote_side', ['c1.id']),
@@ -1508,8 +1641,9 @@ class JoinConditionErrorTest(fixtures.TestBase):
('order_by', ['id']),
]:
clear_mappers()
- kw = {argname:arg}
+ kw = {argname: arg}
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
@@ -1527,51 +1661,52 @@ class JoinConditionErrorTest(fixtures.TestBase):
(argname, arg[0], type(arg[0])),
configure_mappers)
-
def test_fk_error_not_raised_unrelated(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
- )
- t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
+ )
+ t2 = Table('t2', m, # noqa
+ Column('id', Integer, primary_key=True),
+ )
t3 = Table('t3', m,
- Column('id', Integer, primary_key=True),
- Column('t1id', Integer, ForeignKey('t1.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1id', Integer, ForeignKey('t1.id'))
+ )
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2)})
+ mapper(C1, t1, properties={'c2': relationship(C2)})
mapper(C2, t3)
- assert C1.c2.property.primaryjoin.compare(t1.c.id==t3.c.t1id)
+ assert C1.c2.property.primaryjoin.compare(t1.c.id == t3.c.t1id)
def test_join_error_raised(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- )
- t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
+ t2 = Table('t2', m, # noqa
+ Column('id', Integer, primary_key=True),
+ )
t3 = Table('t3', m,
- Column('id', Integer, primary_key=True),
- Column('t1id', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1id', Integer)
+ )
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2)})
+ mapper(C1, t1, properties={'c2': relationship(C2)})
mapper(C2, t3)
assert_raises(sa.exc.ArgumentError, configure_mappers)
@@ -1579,7 +1714,9 @@ class JoinConditionErrorTest(fixtures.TestBase):
def teardown(self):
clear_mappers()
+
class TypeMatchTest(fixtures.MappedTest):
+
"""test errors raised when trying to add items
whose type is not handled by a relationship"""
@@ -1587,33 +1724,38 @@ class TypeMatchTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table("a", metadata,
Column('aid', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('adata', String(30)))
Table("b", metadata,
- Column('bid', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("a_id", Integer, ForeignKey("a.aid")),
- Column('bdata', String(30)))
+ Column('bid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_id", Integer, ForeignKey("a.aid")),
+ Column('bdata', String(30)))
Table("c", metadata,
Column('cid', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column("b_id", Integer, ForeignKey("b.bid")),
Column('cdata', String(30)))
Table("d", metadata,
Column('did', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('ddata', String(30)))
def test_o2m_oncascade(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(fixtures.BasicEntity): pass
- mapper(A, a, properties={'bs':relationship(B)})
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(fixtures.BasicEntity):
+ pass
+ mapper(A, a, properties={'bs': relationship(B)})
mapper(B, b)
mapper(C, c)
@@ -1633,13 +1775,18 @@ class TypeMatchTest(fixtures.MappedTest):
def test_o2m_onflush(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(fixtures.BasicEntity): pass
- mapper(A, a, properties={'bs':relationship(B, cascade="none")})
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(fixtures.BasicEntity):
+ pass
+ mapper(A, a, properties={'bs': relationship(B, cascade="none")})
mapper(B, b)
mapper(C, c)
@@ -1653,18 +1800,23 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_o2m_nopoly_onflush(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
+
+ class A(fixtures.BasicEntity):
+ pass
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(B): pass
- mapper(A, a, properties={'bs':relationship(B, cascade="none")})
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(B):
+ pass
+ mapper(A, a, properties={'bs': relationship(B, cascade="none")})
mapper(B, b)
mapper(C, c, inherits=B)
@@ -1678,20 +1830,25 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_m2o_nopoly_onflush(self):
a, b, d = (self.tables.a,
- self.tables.b,
- self.tables.d)
+ self.tables.b,
+ self.tables.d)
+
+ class A(fixtures.BasicEntity):
+ pass
- class A(fixtures.BasicEntity): pass
- class B(A): pass
- class D(fixtures.BasicEntity): pass
+ class B(A):
+ pass
+
+ class D(fixtures.BasicEntity):
+ pass
mapper(A, a)
mapper(B, b, inherits=A)
- mapper(D, d, properties={"a":relationship(A, cascade="none")})
+ mapper(D, d, properties={"a": relationship(A, cascade="none")})
b1 = B()
d1 = D()
d1.a = b1
@@ -1699,27 +1856,33 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(d1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_m2o_oncascade(self):
a, b, d = (self.tables.a,
- self.tables.b,
- self.tables.d)
+ self.tables.b,
+ self.tables.d)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class D(fixtures.BasicEntity): pass
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class D(fixtures.BasicEntity):
+ pass
mapper(A, a)
mapper(B, b)
- mapper(D, d, properties={"a":relationship(A)})
+ mapper(D, d, properties={"a": relationship(A)})
b1 = B()
d1 = D()
d1.a = b1
sess = create_session()
assert_raises_message(AssertionError,
- "doesn't handle objects of type",
- sess.add, d1)
+ "doesn't handle objects of type",
+ sess.add, d1)
+
class TypedAssociationTable(fixtures.MappedTest):
@@ -1727,8 +1890,10 @@ class TypedAssociationTable(fixtures.MappedTest):
def define_tables(cls, metadata):
class MySpecialType(sa.types.TypeDecorator):
impl = String
+
def process_bind_param(self, value, dialect):
return "lala" + value
+
def process_result_value(self, value, dialect):
return value[4:]
@@ -1746,15 +1911,17 @@ class TypedAssociationTable(fixtures.MappedTest):
"""Many-to-many tables with special types for candidate keys."""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+ class T1(fixtures.BasicEntity):
+ pass
- class T1(fixtures.BasicEntity): pass
- class T2(fixtures.BasicEntity): pass
+ class T2(fixtures.BasicEntity):
+ pass
mapper(T2, t2)
mapper(T1, t1, properties={
- 't2s':relationship(T2, secondary=t3, backref='t1s')})
+ 't2s': relationship(T2, secondary=t3, backref='t1s')})
a = T1()
a.col1 = "aid"
@@ -1775,7 +1942,9 @@ class TypedAssociationTable(fixtures.MappedTest):
assert t3.count().scalar() == 1
+
class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
+
"""test op() in conjunction with join conditions"""
run_create_tables = run_deletes = None
@@ -1785,47 +1954,50 @@ class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(50))
+ )
Table('b', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(50))
+ )
def test_join_on_custom_op(self):
class A(fixtures.BasicEntity):
pass
+
class B(fixtures.BasicEntity):
pass
mapper(A, self.tables.a, properties={
- 'bs': relationship(B,
- primaryjoin=self.tables.a.c.foo.op(
- '&*', is_comparison=True
- )(foreign(self.tables.b.c.foo)),
- viewonly=True
- )
- })
+ 'bs': relationship(B,
+ primaryjoin=self.tables.a.c.foo.op(
+ '&*', is_comparison=True
+ )(foreign(self.tables.b.c.foo)),
+ viewonly=True
+ )
+ })
mapper(B, self.tables.b)
self.assert_compile(
Session().query(A).join(A.bs),
- "SELECT a.id AS a_id, a.foo AS a_foo FROM a JOIN b ON a.foo &* b.foo"
+ "SELECT a.id AS a_id, a.foo AS a_foo "
+ "FROM a JOIN b ON a.foo &* b.foo"
)
class ViewOnlyHistoryTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
def _assert_fk(self, a1, b1, is_set):
s = Session(testing.db)
@@ -1842,12 +2014,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_o2m_viewonly_oneside(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1, properties={
"bs": relationship(B, viewonly=True,
- backref=backref("a", viewonly=False))
+ backref=backref("a", viewonly=False))
})
mapper(B, self.tables.t2)
@@ -1867,12 +2040,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_m2o_viewonly_oneside(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1, properties={
"bs": relationship(B, viewonly=False,
- backref=backref("a", viewonly=True))
+ backref=backref("a", viewonly=True))
})
mapper(B, self.tables.t2)
@@ -1892,6 +2066,7 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_o2m_viewonly_only(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
@@ -1910,13 +2085,14 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_m2o_viewonly_only(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1)
mapper(B, self.tables.t2, properties={
'a': relationship(A, viewonly=True)
- })
+ })
a1 = A()
b1 = B()
@@ -1925,34 +2101,39 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
self._assert_fk(a1, b1, False)
+
class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ )
Table("t1t2", metadata,
- Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True),
- Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True),
- )
+ Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True),
+ Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True),
+ )
def test_viewonly(self):
t1t2, t2, t1 = (self.tables.t1t2,
- self.tables.t2,
- self.tables.t1)
+ self.tables.t2,
+ self.tables.t1)
- class A(fixtures.ComparableEntity):pass
- class B(fixtures.ComparableEntity):pass
+ class A(fixtures.ComparableEntity):
+ pass
+
+ class B(fixtures.ComparableEntity):
+ pass
mapper(A, t1, properties={
- 'bs':relationship(B, secondary=t1t2,
- backref=backref('as_', viewonly=True))
+ 'bs': relationship(B, secondary=t1t2,
+ backref=backref('as_', viewonly=True))
})
mapper(B, t2)
@@ -1971,25 +2152,27 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
sess.query(B).first(), B(as_=[A(id=a1.id)])
)
+
class ViewOnlyOverlappingNames(fixtures.MappedTest):
+
"""'viewonly' mappings with overlapping PK column names."""
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
Table("t3", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t2id', Integer, ForeignKey('t2.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t2id', Integer, ForeignKey('t2.id')))
def test_three_table_view(self):
"""A three table join with overlapping PK names.
@@ -2001,23 +2184,29 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
"""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+
+ class C1(fixtures.BasicEntity):
+ pass
+
+ class C2(fixtures.BasicEntity):
+ pass
- class C1(fixtures.BasicEntity): pass
- class C2(fixtures.BasicEntity): pass
- class C3(fixtures.BasicEntity): pass
+ class C3(fixtures.BasicEntity):
+ pass
mapper(C1, t1, properties={
- 't2s':relationship(C2),
- 't2_view':relationship(C2,
- viewonly=True,
- primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
- t3.c.t2id==t2.c.id,
- t3.c.data==t1.c.data))})
+ 't2s': relationship(C2),
+ 't2_view': relationship(
+ C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.id == t2.c.t1id,
+ t3.c.t2id == t2.c.id,
+ t3.c.data == t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relationship(C2)})
+ 't2': relationship(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -2026,7 +2215,7 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
c2b = C2()
c1.t2s.append(c2b)
c3 = C3()
- c3.data='c1data'
+ c3.data = 'c1data'
c3.t2 = c2b
sess = create_session()
sess.add(c1)
@@ -2038,25 +2227,27 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
assert set([x.id for x in c1.t2s]) == set([c2a.id, c2b.id])
assert set([x.id for x in c1.t2_view]) == set([c2b.id])
+
class ViewOnlyUniqueNames(fixtures.MappedTest):
+
"""'viewonly' mappings with unique PK column names."""
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('t1id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('t1id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('t2id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
+ Column('t2id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
Table("t3", metadata,
- Column('t3id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
+ Column('t3id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
def test_three_table_view(self):
"""A three table join with overlapping PK names.
@@ -2067,23 +2258,29 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
"""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+
+ class C1(fixtures.BasicEntity):
+ pass
+
+ class C2(fixtures.BasicEntity):
+ pass
- class C1(fixtures.BasicEntity): pass
- class C2(fixtures.BasicEntity): pass
- class C3(fixtures.BasicEntity): pass
+ class C3(fixtures.BasicEntity):
+ pass
mapper(C1, t1, properties={
- 't2s':relationship(C2),
- 't2_view':relationship(C2,
- viewonly=True,
- primaryjoin=sa.and_(t1.c.t1id==t2.c.t1id_ref,
- t3.c.t2id_ref==t2.c.t2id,
- t3.c.data==t1.c.data))})
+ 't2s': relationship(C2),
+ 't2_view': relationship(
+ C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.t1id == t2.c.t1id_ref,
+ t3.c.t2id_ref == t2.c.t2id,
+ t3.c.data == t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relationship(C2)})
+ 't2': relationship(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -2092,7 +2289,7 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
c2b = C2()
c1.t2s.append(c2b)
c3 = C3()
- c3.data='c1data'
+ c3.data = 'c1data'
c3.t2 = c2b
sess = create_session()
@@ -2104,30 +2301,35 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
assert set([x.t2id for x in c1.t2s]) == set([c2a.t2id, c2b.t2id])
assert set([x.t2id for x in c1.t2_view]) == set([c2b.t2id])
+
class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
+
"""test that local-remote is correctly determined for m2m"""
def test_local_remote(self):
meta = MetaData()
t1 = Table('t1', meta,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
t2 = Table('t2', meta,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
t12 = Table('tab', meta,
- Column('t1_id', Integer, ForeignKey('t1.id',)),
- Column('t2_id', Integer, ForeignKey('t2.id',)),
- )
+ Column('t1_id', Integer, ForeignKey('t1.id',)),
+ Column('t2_id', Integer, ForeignKey('t2.id',)),
+ )
- class A(object): pass
- class B(object): pass
- mapper( B, t2, )
- m = mapper( A, t1, properties=dict(
- b_view = relationship( B, secondary=t12, viewonly=True),
- b_plain= relationship( B, secondary=t12),
- )
+ class A(object):
+ pass
+
+ class B(object):
+ pass
+ mapper(B, t2, )
+ m = mapper(A, t1, properties=dict(
+ b_view=relationship(B, secondary=t12, viewonly=True),
+ b_plain=relationship(B, secondary=t12),
+ )
)
configure_mappers()
assert m.get_property('b_view').local_remote_pairs == \
@@ -2135,31 +2337,32 @@ class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
[(t1.c.id, t12.c.t1_id), (t2.c.id, t12.c.t2_id)]
-
class ViewOnlyNonEquijoin(fixtures.MappedTest):
+
"""'viewonly' mappings based on non-equijoins."""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True))
+ Column('id', Integer, primary_key=True))
Table('bars', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer))
def test_viewonly_join(self):
bars, foos = self.tables.bars, self.tables.foos
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id > bars.c.fid,
- foreign_keys=[bars.c.fid],
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ viewonly=True)})
mapper(Bar, bars)
@@ -2180,17 +2383,22 @@ class ViewOnlyNonEquijoin(fixtures.MappedTest):
class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
+
"""'viewonly' mappings that contain the same 'remote' column twice"""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('bid1', Integer,ForeignKey('bars.id')),
- Column('bid2', Integer,ForeignKey('bars.id')))
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('bid1', Integer, ForeignKey('bars.id')),
+ Column('bid2', Integer, ForeignKey('bars.id')))
Table('bars', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
def test_relationship_on_or(self):
@@ -2198,15 +2406,16 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
- bars.c.id == foos.c.bid2),
- uselist=True,
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
+ bars.c.id == foos.c.bid2),
+ uselist=True,
+ viewonly=True)})
mapper(Bar, bars)
sess = create_session()
@@ -2228,18 +2437,20 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
eq_(sess.query(Foo).filter_by(id=f2.id).one(),
Foo(bars=[Bar(data='b3')]))
+
class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
+
"""'viewonly' mappings that contain the same 'local' column twice"""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('bars', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('fid1', Integer, ForeignKey('foos.id')),
Column('fid2', Integer, ForeignKey('foos.id')),
Column('data', String(50)))
@@ -2249,14 +2460,15 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
- bars.c.fid2 == foos.c.id),
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
+ bars.c.fid2 == foos.c.id),
+ viewonly=True)})
mapper(Bar, bars)
sess = create_session()
@@ -2279,57 +2491,61 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
eq_(sess.query(Foo).filter_by(id=f2.id).one(),
Foo(bars=[Bar(data='b3'), Bar(data='b4')]))
+
class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
+
"""'viewonly' mappings with a complex join condition."""
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2tot3', metadata,
- Column('t2id', Integer, ForeignKey('t2.id')),
- Column('t3id', Integer, ForeignKey('t3.id')))
+ Column('t2id', Integer, ForeignKey('t2.id')),
+ Column('t3id', Integer, ForeignKey('t3.id')))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
+
class T3(cls.Comparable):
pass
def test_basic(self):
T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1,
- self.tables.t2,
- self.classes.T2,
- self.classes.T3,
- self.tables.t3,
- self.tables.t2tot3,
- self.tables.t1)
+ self.tables.t2,
+ self.classes.T2,
+ self.classes.T3,
+ self.tables.t3,
+ self.tables.t2tot3,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't3s':relationship(T3, primaryjoin=sa.and_(
- t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id),
- viewonly=True,
- foreign_keys=t3.c.id, remote_side=t2.c.t1id)
+ 't3s': relationship(T3, primaryjoin=sa.and_(
+ t1.c.id == t2.c.t1id,
+ t2.c.id == t2tot3.c.t2id,
+ t3.c.id == t2tot3.c.t3id),
+ viewonly=True,
+ foreign_keys=t3.c.id, remote_side=t2.c.t1id)
})
mapper(T2, t2, properties={
- 't1':relationship(T1),
- 't3s':relationship(T3, secondary=t2tot3)
+ 't1': relationship(T1),
+ 't3s': relationship(T3, secondary=t2tot3)
})
mapper(T3, t3)
@@ -2341,31 +2557,32 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
a = sess.query(T1).first()
eq_(a.t3s, [T3(data='t3')])
-
def test_remote_side_escalation(self):
T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1,
- self.tables.t2,
- self.classes.T2,
- self.classes.T3,
- self.tables.t3,
- self.tables.t2tot3,
- self.tables.t1)
+ self.tables.t2,
+ self.classes.T2,
+ self.classes.T3,
+ self.tables.t3,
+ self.tables.t2tot3,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't3s':relationship(T3,
- primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id
- ),
- viewonly=True,
- foreign_keys=t3.c.id)})
+ 't3s': relationship(T3,
+ primaryjoin=sa.and_(t1.c.id == t2.c.t1id,
+ t2.c.id == t2tot3.c.t2id,
+ t3.c.id == t2tot3.c.t3id
+ ),
+ viewonly=True,
+ foreign_keys=t3.c.id)})
mapper(T2, t2, properties={
- 't1':relationship(T1),
- 't3s':relationship(T3, secondary=t2tot3)})
+ 't1': relationship(T1),
+ 't3s': relationship(T3, secondary=t2tot3)})
mapper(T3, t3)
self._assert_raises_no_local_remote(configure_mappers, "T1.t3s")
+
class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
+
"""test a complex annotation using between().
Using declarative here as an integration test for the local()
@@ -2381,23 +2598,23 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
__tablename__ = "network"
id = Column(sa.Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
ip_net_addr = Column(Integer)
ip_broadcast_addr = Column(Integer)
- addresses = relationship("Address",
- primaryjoin="remote(foreign(Address.ip_addr)).between("
- "Network.ip_net_addr,"
- "Network.ip_broadcast_addr)",
- viewonly=True
- )
+ addresses = relationship(
+ "Address",
+ primaryjoin="remote(foreign(Address.ip_addr)).between("
+ "Network.ip_net_addr,"
+ "Network.ip_broadcast_addr)",
+ viewonly=True
+ )
class Address(fixtures.ComparableEntity, Base):
__tablename__ = "address"
ip_addr = Column(Integer, primary_key=True)
-
@classmethod
def insert_data(cls):
Network, Address = cls.classes.Network, cls.classes.Address
@@ -2417,11 +2634,11 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
session = Session(testing.db)
eq_(
- session.query(Address.ip_addr).\
- select_from(Network).\
- join(Network.addresses).\
- filter(Network.ip_net_addr == 15).\
- all(),
+ session.query(Address.ip_addr).
+ select_from(Network).
+ join(Network.addresses).
+ filter(Network.ip_net_addr == 15).
+ all(),
[(17, ), (18, )]
)
@@ -2439,59 +2656,61 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', String(50), primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('t1id', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('t1id', String(50)))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
def test_onetomany_funcfk_oldstyle(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
# old _local_remote_pairs
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id]
- )
- })
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id]
+ )
+ })
mapper(T2, t2)
self._test_onetomany()
def test_onetomany_funcfk_annotated(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
# use annotation
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==
- foreign(sa.func.lower(t2.c.t1id)),
- )})
+ 't2s': relationship(T2,
+ primaryjoin=t1.c.id ==
+ foreign(sa.func.lower(t2.c.t1id)),
+ )})
mapper(T2, t2)
self._test_onetomany()
def _test_onetomany(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
is_(T1.t2s.property.direction, ONETOMANY)
eq_(T1.t2s.property.local_remote_pairs, [(t1.c.id, t2.c.t1id)])
sess = create_session()
@@ -2511,17 +2730,17 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_manytoone_funcfk(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relationship(T1,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id],
- uselist=True)})
+ 't1': relationship(T1,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id],
+ uselist=True)})
sess = create_session()
a1 = T1(id='number1', data='a1')
@@ -2539,15 +2758,16 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_onetomany_func_referent(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id,
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id])})
mapper(T2, t2)
sess = create_session()
@@ -2562,21 +2782,21 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
eq_(sess.query(T1).first(),
T1(id='NuMbeR1', data='a1', t2s=[
- T2(data='b1', t1id='number1'),
- T2(data='b2', t1id='number1')]))
+ T2(data='b1', t1id='number1'),
+ T2(data='b2', t1id='number1')]))
def test_manytoone_func_referent(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relationship(T1,
- primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id], uselist=True)})
+ 't1': relationship(T1,
+ primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id,
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id], uselist=True)})
sess = create_session()
a1 = T1(id='NuMbeR1', data='a1')
@@ -2594,40 +2814,44 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_escalation_1(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id],
- remote_side=[t2.c.t1id])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id],
+ remote_side=[t2.c.t1id])})
mapper(T2, t2)
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_escalation_2(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
mapper(T2, t2)
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
+
class InvalidRemoteSideTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50)),
- Column('t_id', Integer, ForeignKey('t1.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t_id', Integer, ForeignKey('t1.id'))
+ )
@classmethod
def setup_classes(cls):
@@ -2638,10 +2862,11 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, backref='parent')
+ 't1s': relationship(T1, backref='parent')
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
@@ -2651,12 +2876,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1,
- backref=backref('parent', remote_side=t1.c.id),
- remote_side=t1.c.id)
+ 't1s': relationship(T1,
+ backref=backref('parent', remote_side=t1.c.id),
+ remote_side=t1.c.id)
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
@@ -2666,12 +2892,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent'),
- 'parent':relationship(T1, back_populates='t1s'),
+ 't1s': relationship(T1, back_populates='parent'),
+ 'parent': relationship(T1, back_populates='t1s'),
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
@@ -2680,44 +2907,48 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent',
+ 't1s': relationship(T1, back_populates='parent',
remote_side=t1.c.id),
- 'parent':relationship(T1, back_populates='t1s',
- remote_side=t1.c.id)
+ 'parent': relationship(T1, back_populates='t1s',
+ remote_side=t1.c.id)
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
+
class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True)
+ )
Table("b", metadata,
- Column('id', Integer, primary_key=True),
- Column('aid_1', Integer, ForeignKey('a.id')),
- Column('aid_2', Integer, ForeignKey('a.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('aid_1', Integer, ForeignKey('a.id')),
+ Column('aid_2', Integer, ForeignKey('a.id')),
+ )
Table("atob", metadata,
- Column('aid', Integer),
- Column('bid', Integer),
- )
+ Column('aid', Integer),
+ Column('bid', Integer),
+ )
Table("atob_ambiguous", metadata,
- Column('aid1', Integer, ForeignKey('a.id')),
- Column('bid1', Integer, ForeignKey('b.id')),
- Column('aid2', Integer, ForeignKey('a.id')),
- Column('bid2', Integer, ForeignKey('b.id')),
- )
+ Column('aid1', Integer, ForeignKey('a.id')),
+ Column('bid1', Integer, ForeignKey('b.id')),
+ Column('aid2', Integer, ForeignKey('a.id')),
+ Column('bid2', Integer, ForeignKey('b.id')),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
@@ -2725,7 +2956,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B)
+ 'bs': relationship(B)
})
mapper(B, b)
self._assert_raises_ambig_join(
@@ -2738,12 +2969,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, foreign_keys=b.c.aid_1)
+ 'bs': relationship(B, foreign_keys=b.c.aid_1)
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2754,12 +2985,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, primaryjoin=a.c.id==b.c.aid_1)
+ 'bs': relationship(B, primaryjoin=a.c.id == b.c.aid_1)
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2770,12 +3001,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, primaryjoin=a.c.id==foreign(b.c.aid_1))
+ 'bs': relationship(B, primaryjoin=a.c.id == foreign(b.c.aid_1))
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2786,7 +3017,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b)
+ 'bs': relationship(B, secondary=a_to_b)
})
mapper(B, b)
self._assert_raises_no_join(
@@ -2798,7 +3029,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b)
+ 'bs': relationship(B, secondary=a_to_b)
})
mapper(B, b)
@@ -2808,20 +3039,20 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
"atob_ambiguous"
)
-
def test_with_fks_m2m(self):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b,
- foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
+ 'bs': relationship(B, secondary=a_to_b,
+ foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
})
mapper(B, b)
sa.orm.configure_mappers()
class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
- testing.AssertsExecutionResults):
+ testing.AssertsExecutionResults):
+
"""test support for a relationship where the 'secondary' table is a
compound join().
@@ -2835,35 +3066,44 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
@classmethod
def define_tables(cls, metadata):
- Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('b_id', ForeignKey('b.id'))
- )
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('b_id', ForeignKey('b.id'))
+ )
Table('b', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('d_id', ForeignKey('d.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('d_id', ForeignKey('d.id'))
+ )
Table('c', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('a_id', ForeignKey('a.id')),
- Column('d_id', ForeignKey('d.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('a_id', ForeignKey('a.id')),
+ Column('d_id', ForeignKey('d.id'))
+ )
Table('d', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
+
class C(cls.Comparable):
pass
+
class D(cls.Comparable):
pass
@@ -2875,21 +3115,23 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
#j = join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id).alias()
mapper(A, a, properties={
"b": relationship(B),
- "d": relationship(D, secondary=j,
- primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
- secondaryjoin=d.c.id == b.c.d_id,
- #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
- #secondaryjoin=d.c.id == j.c.b_d_id,
- uselist=False
- )
- })
+ "d": relationship(
+ D, secondary=j,
+ primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
+ secondaryjoin=d.c.id == b.c.d_id,
+ #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
+ #secondaryjoin=d.c.id == j.c.b_d_id,
+ uselist=False,
+ viewonly=True
+ )
+ })
mapper(B, b, properties={
- "d": relationship(D)
- })
+ "d": relationship(D)
+ })
mapper(C, c, properties={
- "a": relationship(A),
- "d": relationship(D)
- })
+ "a": relationship(A),
+ "d": relationship(D)
+ })
mapper(D, d)
@classmethod
@@ -2931,8 +3173,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
sess.query(A).join(A.d),
"SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id "
"FROM a JOIN (b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
- "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
- "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
+ "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
+ "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
dialect="postgresql"
)
@@ -2944,8 +3186,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
"SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id, "
"d_1.id AS d_1_id, d_1.name AS d_1_name FROM a LEFT OUTER JOIN "
"(b AS b_1 JOIN d AS d_2 ON b_1.d_id = d_2.id JOIN c AS c_1 "
- "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) "
- "ON a.b_id = b_1.id AND a.id = c_1.a_id",
+ "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) "
+ "ON a.b_id = b_1.id AND a.id = c_1.a_id",
dialect="postgresql"
)
@@ -2964,14 +3206,15 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
# referring to just the columns wont actually render all those
# join conditions.
self.assert_sql_execution(
- testing.db,
- go,
- CompiledSQL(
- "SELECT d.id AS d_id, d.name AS d_name FROM b "
- "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id "
- "WHERE :param_1 = b.id AND :param_2 = c.a_id AND d.id = b.d_id",
- {'param_1': a1.id, 'param_2': a1.id}
- )
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT d.id AS d_id, d.name AS d_name FROM b "
+ "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id "
+ "WHERE :param_1 = b.id AND :param_2 = c.a_id "
+ "AND d.id = b.d_id",
+ {'param_1': a1.id, 'param_2': a1.id}
+ )
)
mapping = {
@@ -2988,7 +3231,6 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
for a, d in sess.query(A, D).outerjoin(A.d):
eq_(self.mapping[a.name], d.name if d is not None else None)
-
def test_joinedload(self):
A, D = self.classes.A, self.classes.D
sess = Session()
@@ -3005,7 +3247,9 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
d = a.d
eq_(self.mapping[a.name], d.name if d is not None else None)
-class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest):
+
+class InvalidRelationshipEscalationTest(
+ _RelationshipErrors, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -3017,20 +3261,20 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Column('fid', Integer))
Table('foos_with_fks', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer, ForeignKey('foos_with_fks.id')))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer, ForeignKey('foos_with_fks.id')))
Table('bars_with_fks', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer, ForeignKey('foos_with_fks.id')))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer, ForeignKey('foos_with_fks.id')))
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(cls.Basic):
pass
-
def test_no_join(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3038,12 +3282,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar)})
+ 'bars': relationship(Bar)})
mapper(Bar, bars)
self._assert_raises_no_join(sa.orm.configure_mappers,
- "Foo.bars", None
- )
+ "Foo.bars", None
+ )
def test_no_join_self_ref(self):
bars, Foo, Bar, foos = (self.tables.bars,
@@ -3052,7 +3296,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo)})
+ 'foos': relationship(Foo)})
mapper(Bar, bars)
self._assert_raises_no_join(
@@ -3068,8 +3312,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3084,9 +3328,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid,
- foreign_keys=bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_equality(
sa.orm.configure_mappers,
@@ -3094,25 +3338,27 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
)
def test_no_equated_wo_fks_works_on_relaxed(self):
- foos_with_fks, Foo, Bar, bars_with_fks, foos = (self.tables.foos_with_fks,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos)
+ foos_with_fks, Foo, Bar, bars_with_fks, foos = (
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos)
# very unique - the join between parent/child
# has no fks, but there is an fk join between two other
# tables in the join condition, for those users that try creating
# these big-long-string-of-joining-many-tables primaryjoins.
- # in this case we don't get eq_pairs, but we hit the "works if viewonly"
- # rule. so here we add another clause regarding "try foreign keys".
+ # in this case we don't get eq_pairs, but we hit the
+ # "works if viewonly" rule. so here we add another clause regarding
+ # "try foreign keys".
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=and_(
- bars_with_fks.c.fid==foos_with_fks.c.id,
- foos_with_fks.c.id==foos.c.id,
- )
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=and_(
+ bars_with_fks.c.fid == foos_with_fks.c.id,
+ foos_with_fks.c.id == foos.c.id,
+ )
+ )})
mapper(Bar, bars_with_fks)
self._assert_raises_no_equality(
@@ -3129,9 +3375,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[foos.c.id, bars.c.fid])})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[foos.c.id, bars.c.fid])})
mapper(Bar, bars)
self._assert_raises_ambiguous_direction(
@@ -3146,12 +3392,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[bars.c.fid],
- remote_side=[foos.c.id, bars.c.fid],
- viewonly=True
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ remote_side=[foos.c.id, bars.c.fid],
+ viewonly=True
+ )})
mapper(Bar, bars)
self._assert_raises_no_local_remote(
@@ -3159,7 +3405,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
"Foo.bars",
)
-
def test_ambiguous_remoteside_m2o(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3167,12 +3412,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[foos.c.id],
- remote_side=[foos.c.id, bars.c.fid],
- viewonly=True
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[foos.c.id],
+ remote_side=[foos.c.id, bars.c.fid],
+ viewonly=True
+ )})
mapper(Bar, bars)
self._assert_raises_no_local_remote(
@@ -3180,7 +3425,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
"Foo.bars",
)
-
def test_no_equated_self_ref_no_fks(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3188,14 +3432,14 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid)})
mapper(Bar, bars)
- self._assert_raises_no_relevant_fks(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
-
+ self._assert_raises_no_relevant_fks(
+ configure_mappers,
+ "foos.id > foos.fid", "Foo.foos", "primary"
+ )
def test_no_equated_self_ref_no_equality(self):
bars, Foo, Bar, foos = (self.tables.bars,
@@ -3204,27 +3448,28 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- foreign_keys=[foos.c.fid])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ foreign_keys=[foos.c.fid])})
mapper(Bar, bars)
self._assert_raises_no_equality(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
+ "foos.id > foos.fid", "Foo.foos", "primary"
+ )
def test_no_equated_viewonly(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid,
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3234,24 +3479,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id>bars_with_fks.c.fid,
- viewonly=True)})
+ 'bars': relationship(
+ Bar,
+ primaryjoin=foos_with_fks.c.id > bars_with_fks.c.fid,
+ viewonly=True)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
def test_no_equated_self_ref_viewonly(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- viewonly=True)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3261,9 +3508,10 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos_with_fks.c.id>foos_with_fks.c.fid,
- viewonly=True)})
+ 'foos': relationship(
+ Foo,
+ primaryjoin=foos_with_fks.c.id > foos_with_fks.c.fid,
+ viewonly=True)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
@@ -3271,25 +3519,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Foo, foos = self.classes.Foo, self.tables.foos
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- viewonly=True,
- foreign_keys=[foos.c.fid])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ viewonly=True,
+ foreign_keys=[foos.c.fid])})
sa.orm.configure_mappers()
eq_(Foo.foos.property.local_remote_pairs, [(foos.c.id, foos.c.fid)])
def test_equated(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3299,8 +3548,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
+ 'bars': relationship(
+ Bar,
+ primaryjoin=foos_with_fks.c.id == bars_with_fks.c.fid)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
@@ -3308,24 +3558,23 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Foo, foos = self.classes.Foo, self.tables.foos
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id==foos.c.fid)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id == foos.c.fid)})
self._assert_raises_no_relevant_fks(
configure_mappers,
"foos.id = foos.fid", "Foo.foos", "primary"
)
-
def test_equated_self_ref_wrong_fks(self):
bars, Foo, foos = (self.tables.bars,
- self.classes.Foo,
- self.tables.foos)
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id==foos.c.fid,
- foreign_keys=[bars.c.id])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id == foos.c.fid,
+ foreign_keys=[bars.c.id])})
self._assert_raises_no_relevant_fks(
configure_mappers,
@@ -3333,7 +3582,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
)
-class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedTest):
+class InvalidRelationshipEscalationTestM2M(
+ _RelationshipErrors, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -3345,9 +3595,9 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
Column('id', Integer, primary_key=True))
Table('foobars_with_fks', metadata,
- Column('fid', Integer, ForeignKey('foos.id')),
- Column('bid', Integer, ForeignKey('bars.id'))
- )
+ Column('fid', Integer, ForeignKey('foos.id')),
+ Column('bid', Integer, ForeignKey('bars.id'))
+ )
Table('foobars_with_many_columns', metadata,
Column('fid', Integer),
@@ -3362,15 +3612,16 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(cls.Basic):
pass
def test_no_join(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars)})
@@ -3384,15 +3635,15 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_no_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id > foobars.c.fid)})
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_join(
@@ -3402,17 +3653,18 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
)
def test_no_fks(self):
- foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_many_columns,
- self.tables.bars,
- self.classes.Bar,
- self.tables.foobars,
- self.classes.Foo,
- self.tables.foos)
+ foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (
+ self.tables.foobars_with_many_columns,
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.foobars,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars,
- primaryjoin=foos.c.id==foobars.c.fid,
- secondaryjoin=foobars.c.bid==bars.c.id)})
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid == bars.c.id)})
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3426,12 +3678,13 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id ==
- foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid ==
- bars.c.id)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id ==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid ==
+ bars.c.id)})
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3445,17 +3698,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_local_col_setup(self):
foobars_with_fks, bars, Bar, Foo, foos = (
- self.tables.foobars_with_fks,
- self.tables.bars,
- self.classes.Bar,
- self.classes.Foo,
- self.tables.foos)
+ self.tables.foobars_with_fks,
+ self.tables.bars,
+ self.classes.Bar,
+ self.classes.Foo,
+ self.tables.foos)
# ensure m2m backref is set up with correct annotations
# [ticket:2578]
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars_with_fks, backref="foos")
- })
+ })
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3467,65 +3720,66 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
set([bars.c.id])
)
-
-
def test_bad_primaryjoin(self):
- foobars_with_fks, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_fks,
- self.tables.bars,
- self.classes.Bar,
- self.tables.foobars,
- self.classes.Foo,
- self.tables.foos)
+ foobars_with_fks, bars, Bar, foobars, Foo, foos = (
+ self.tables.foobars_with_fks,
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.foobars,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id > foobars.c.fid,
- secondaryjoin=foobars.c.bid<=bars.c.id)})
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
- 'foos.id > foobars.fid',
- "Foo.bars",
- "primary")
+ configure_mappers,
+ 'foos.id > foobars.fid',
+ "Foo.bars",
+ "primary")
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_fks,
- primaryjoin=foos.c.id > foobars_with_fks.c.fid,
- secondaryjoin=foobars_with_fks.c.bid<=bars.c.id)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_fks,
+ primaryjoin=foos.c.id > foobars_with_fks.c.fid,
+ secondaryjoin=foobars_with_fks.c.bid <= bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
- 'foos.id > foobars_with_fks.fid',
- "Foo.bars",
- "primary")
+ configure_mappers,
+ 'foos.id > foobars_with_fks.fid',
+ "Foo.bars",
+ "primary")
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_fks,
- primaryjoin=foos.c.id > foobars_with_fks.c.fid,
- secondaryjoin=foobars_with_fks.c.bid<=bars.c.id,
- viewonly=True)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_fks,
+ primaryjoin=foos.c.id > foobars_with_fks.c.fid,
+ secondaryjoin=foobars_with_fks.c.bid <= bars.c.id,
+ viewonly=True)})
mapper(Bar, bars)
sa.orm.configure_mappers()
def test_bad_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id == foobars.c.fid,
- secondaryjoin=foobars.c.bid <= bars.c.id,
- foreign_keys=[foobars.c.fid])})
+ 'bars': relationship(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid])})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
configure_mappers,
@@ -3536,17 +3790,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_no_equated_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id == foobars.c.fid,
- secondaryjoin=foobars.c.bid <= bars.c.id,
- foreign_keys=[foobars.c.fid, foobars.c.bid])})
+ 'bars': relationship(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid, foobars.c.bid])})
mapper(Bar, bars)
self._assert_raises_no_equality(
@@ -3556,6 +3810,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
"secondary"
)
+
class ActiveHistoryFlagTest(_fixtures.FixtureTest):
run_inserts = None
run_deletes = None
@@ -3572,27 +3827,27 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
setattr(obj, attrname, newvalue)
eq_(
attributes.get_history(obj, attrname),
- ([newvalue,], (), [oldvalue,])
+ ([newvalue, ], (), [oldvalue, ])
)
def test_column_property_flag(self):
User, users = self.classes.User, self.tables.users
mapper(User, users, properties={
- 'name':column_property(users.c.name,
- active_history=True)
+ 'name': column_property(users.c.name,
+ active_history=True)
})
u1 = User(name='jack')
self._test_attribute(u1, 'name', 'ed')
def test_relationship_property_flag(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User, active_history=True)
+ 'user': relationship(User, active_history=True)
})
mapper(User, users)
u1 = User(name='jack')
@@ -3604,27 +3859,30 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
Order, orders = self.classes.Order, self.tables.orders
class MyComposite(object):
+
def __init__(self, description, isopen):
self.description = description
self.isopen = isopen
+
def __composite_values__(self):
return [self.description, self.isopen]
+
def __eq__(self, other):
return isinstance(other, MyComposite) and \
other.description == self.description
mapper(Order, orders, properties={
- 'composite':composite(
- MyComposite,
- orders.c.description,
- orders.c.isopen,
- active_history=True)
+ 'composite': composite(
+ MyComposite,
+ orders.c.description,
+ orders.c.isopen,
+ active_history=True)
})
o1 = Order(composite=MyComposite('foo', 1))
self._test_attribute(o1, "composite", MyComposite('bar', 1))
-
class RelationDeprecationTest(fixtures.MappedTest):
+
"""test usage of the old 'relation' function."""
run_inserts = 'once'
@@ -3655,34 +3913,32 @@ class RelationDeprecationTest(fixtures.MappedTest):
def fixtures(cls):
return dict(
users_table=(
- ('id', 'name'),
- (1, 'jack'),
- (2, 'ed'),
- (3, 'fred'),
- (4, 'chuck')),
+ ('id', 'name'),
+ (1, 'jack'),
+ (2, 'ed'),
+ (3, 'fred'),
+ (4, 'chuck')),
addresses_table=(
- ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
- (1, 1, 'jack@jack.home', 'Personal', 0),
- (2, 1, 'jack@jack.bizz', 'Work', 1),
- (3, 2, 'ed@foo.bar', 'Personal', 0),
- (4, 3, 'fred@the.fred', 'Personal', 10)))
+ ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
+ (1, 1, 'jack@jack.home', 'Personal', 0),
+ (2, 1, 'jack@jack.bizz', 'Work', 1),
+ (3, 2, 'ed@foo.bar', 'Personal', 0),
+ (4, 3, 'fred@the.fred', 'Personal', 10)))
def test_relation(self):
- addresses_table, User, users_table, Address = (self.tables.addresses_table,
- self.classes.User,
- self.tables.users_table,
- self.classes.Address)
+ addresses_table, User, users_table, Address = (
+ self.tables.addresses_table,
+ self.classes.User,
+ self.tables.users_table,
+ self.classes.Address)
mapper(User, users_table, properties=dict(
addresses=relation(Address, backref='user'),
- ))
+ ))
mapper(Address, addresses_table)
session = create_session()
- ed = session.query(User).filter(User.addresses.any(
+ session.query(User).filter(User.addresses.any(
Address.email_address == 'ed@foo.bar')).one()
-
-
-
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 74a7a7442..96728612d 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -18,194 +18,6 @@ from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import event, ForeignKey
-class BindTest(_fixtures.FixtureTest):
- run_inserts = None
-
- def test_mapped_binds(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
-
- # ensure tables are unbound
- m2 = sa.MetaData()
- users_unbound = users.tometadata(m2)
- addresses_unbound = addresses.tometadata(m2)
-
- mapper(Address, addresses_unbound)
- mapper(User, users_unbound, properties={
- 'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
-
- sess = Session(binds={User: self.metadata.bind,
- Address: self.metadata.bind})
-
- u1 = User(id=1, name='ed')
- sess.add(u1)
- eq_(sess.query(User).filter(User.id == 1).all(),
- [User(id=1, name='ed')])
-
- # test expression binding
-
- sess.execute(users_unbound.insert(), params=dict(id=2,
- name='jack'))
- eq_(sess.execute(users_unbound.select(users_unbound.c.id
- == 2)).fetchall(), [(2, 'jack')])
-
- eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
- [(2, 'jack')])
-
- sess.execute(users_unbound.delete())
- eq_(sess.execute(users_unbound.select()).fetchall(), [])
-
- sess.close()
-
- def test_table_binds(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
-
- # ensure tables are unbound
- m2 = sa.MetaData()
- users_unbound = users.tometadata(m2)
- addresses_unbound = addresses.tometadata(m2)
-
- mapper(Address, addresses_unbound)
- mapper(User, users_unbound, properties={
- 'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
-
- Session = sessionmaker(binds={users_unbound: self.metadata.bind,
- addresses_unbound: self.metadata.bind})
- sess = Session()
-
- u1 = User(id=1, name='ed')
- sess.add(u1)
- eq_(sess.query(User).filter(User.id == 1).all(),
- [User(id=1, name='ed')])
-
- sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))
-
- eq_(sess.execute(users_unbound.select(users_unbound.c.id
- == 2)).fetchall(), [(2, 'jack')])
-
- eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
- [(2, 'jack')])
-
- sess.execute(users_unbound.delete())
- eq_(sess.execute(users_unbound.select()).fetchall(), [])
-
- sess.close()
-
- def test_bind_from_metadata(self):
- users, User = self.tables.users, self.classes.User
-
- mapper(User, users)
-
- session = create_session()
- session.execute(users.insert(), dict(name='Johnny'))
-
- assert len(session.query(User).filter_by(name='Johnny').all()) == 1
-
- session.execute(users.delete())
-
- assert len(session.query(User).filter_by(name='Johnny').all()) == 0
- session.close()
-
- def test_bind_arguments(self):
- users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
- mapper(User, users)
- mapper(Address, addresses)
-
- e1 = engines.testing_engine()
- e2 = engines.testing_engine()
- e3 = engines.testing_engine()
-
- sess = Session(e3)
- sess.bind_mapper(User, e1)
- sess.bind_mapper(Address, e2)
-
- assert sess.connection().engine is e3
- assert sess.connection(bind=e1).engine is e1
- assert sess.connection(mapper=Address, bind=e1).engine is e1
- assert sess.connection(mapper=Address).engine is e2
- assert sess.connection(clause=addresses.select()).engine is e2
- assert sess.connection(mapper=User,
- clause=addresses.select()).engine is e1
- assert sess.connection(mapper=User,
- clause=addresses.select(),
- bind=e2).engine is e2
-
- sess.close()
-
- @engines.close_open_connections
- def test_bound_connection(self):
- users, User = self.tables.users, self.classes.User
-
- mapper(User, users)
- c = testing.db.connect()
- sess = create_session(bind=c)
- sess.begin()
- transaction = sess.transaction
- u = User(name='u1')
- sess.add(u)
- sess.flush()
- assert transaction._connection_for_bind(testing.db) \
- is transaction._connection_for_bind(c) is c
-
- assert_raises_message(sa.exc.InvalidRequestError,
- 'Session already has a Connection '
- 'associated',
- transaction._connection_for_bind,
- testing.db.connect())
- transaction.rollback()
- assert len(sess.query(User).all()) == 0
- sess.close()
-
- def test_bound_connection_transactional(self):
- User, users = self.classes.User, self.tables.users
-
- mapper(User, users)
- c = testing.db.connect()
-
- sess = create_session(bind=c, autocommit=False)
- u = User(name='u1')
- sess.add(u)
- sess.flush()
- sess.close()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 0
-
- sess = create_session(bind=c, autocommit=False)
- u = User(name='u2')
- sess.add(u)
- sess.flush()
- sess.commit()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
- c.execute("delete from users")
- assert c.scalar("select count(1) from users") == 0
-
- c = testing.db.connect()
-
- trans = c.begin()
- sess = create_session(bind=c, autocommit=True)
- u = User(name='u3')
- sess.add(u)
- sess.flush()
- assert c.in_transaction()
- trans.commit()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
class ExecutionTest(_fixtures.FixtureTest):
run_inserts = None
@@ -392,6 +204,7 @@ class SessionUtilTest(_fixtures.FixtureTest):
sess.flush()
make_transient(u1)
sess.rollback()
+ assert attributes.instance_state(u1).transient
def test_make_transient_to_detached(self):
users, User = self.tables.users, self.classes.User
@@ -849,7 +662,7 @@ class SessionStateTest(_fixtures.FixtureTest):
go()
eq_(canary, [False])
- def test_deleted_expunged(self):
+ def test_deleted_auto_expunged(self):
users, User = self.tables.users, self.classes.User
mapper(User, users)
@@ -870,6 +683,53 @@ class SessionStateTest(_fixtures.FixtureTest):
assert object_session(u1) is None
+ def test_explicit_expunge_pending(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ sess = Session()
+ u1 = User(name='x')
+ sess.add(u1)
+
+ sess.flush()
+ sess.expunge(u1)
+
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ sess.rollback()
+
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ def test_explicit_expunge_deleted(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ sess = Session()
+ sess.add(User(name='x'))
+ sess.commit()
+
+ u1 = sess.query(User).first()
+ sess.delete(u1)
+
+ sess.flush()
+
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is sess
+
+ sess.expunge(u1)
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ sess.rollback()
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+
class SessionStateWFixtureTest(_fixtures.FixtureTest):
__backend__ = True
@@ -1591,14 +1451,19 @@ class SessionInterface(fixtures.TestBase):
eq_(watchdog, instance_methods,
watchdog.symmetric_difference(instance_methods))
- def _test_class_guards(self, user_arg):
+ def _test_class_guards(self, user_arg, is_class=True):
watchdog = set()
def raises_(method, *args, **kw):
watchdog.add(method)
callable_ = getattr(create_session(), method)
- assert_raises(sa.orm.exc.UnmappedClassError,
- callable_, *args, **kw)
+ if is_class:
+ assert_raises(
+ sa.orm.exc.UnmappedClassError,
+ callable_, *args, **kw)
+ else:
+ assert_raises(
+ sa.exc.NoInspectionAvailable, callable_, *args, **kw)
raises_('connection', mapper=user_arg)
@@ -1621,7 +1486,7 @@ class SessionInterface(fixtures.TestBase):
def test_unmapped_primitives(self):
for prim in ('doh', 123, ('t', 'u', 'p', 'l', 'e')):
self._test_instance_guards(prim)
- self._test_class_guards(prim)
+ self._test_class_guards(prim, is_class=False)
def test_unmapped_class_for_instance(self):
class Unmapped(object):
@@ -1645,7 +1510,7 @@ class SessionInterface(fixtures.TestBase):
self._map_it(Mapped)
self._test_instance_guards(early)
- self._test_class_guards(early)
+ self._test_class_guards(early, is_class=False)
class TLTransactionTest(fixtures.MappedTest):
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index 35d527ca8..a3ad37e60 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -1,9 +1,9 @@
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy.testing import fixtures
-from sqlalchemy import Integer, String, ForeignKey, or_, and_, exc, \
- select, func, Boolean, case, text
+from sqlalchemy import Integer, String, ForeignKey, or_, exc, \
+ select, func, Boolean, case, text, column
from sqlalchemy.orm import mapper, relationship, backref, Session, \
- joinedload, aliased
+ joinedload, synonym
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
@@ -18,7 +18,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(32)),
- Column('age', Integer))
+ Column('age_int', Integer))
@classmethod
def setup_classes(cls):
@@ -30,10 +30,10 @@ class UpdateDeleteTest(fixtures.MappedTest):
users = cls.tables.users
users.insert().execute([
- dict(id=1, name='john', age=25),
- dict(id=2, name='jack', age=47),
- dict(id=3, name='jill', age=29),
- dict(id=4, name='jane', age=37),
+ dict(id=1, name='john', age_int=25),
+ dict(id=2, name='jack', age_int=47),
+ dict(id=3, name='jill', age_int=29),
+ dict(id=4, name='jane', age_int=37),
])
@classmethod
@@ -41,7 +41,9 @@ class UpdateDeleteTest(fixtures.MappedTest):
User = cls.classes.User
users = cls.tables.users
- mapper(User, users)
+ mapper(User, users, properties={
+ 'age': users.c.age_int
+ })
def test_illegal_eval(self):
User = self.classes.User
@@ -70,14 +72,118 @@ class UpdateDeleteTest(fixtures.MappedTest):
):
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.update\(\) when %s\(\) has been called" % mname,
+ r"Can't call Query.update\(\) when "
+ "%s\(\) has been called" % mname,
q.update,
{'name': 'ed'})
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.delete\(\) when %s\(\) has been called" % mname,
+ r"Can't call Query.delete\(\) when "
+ "%s\(\) has been called" % mname,
q.delete)
+ def test_evaluate_clauseelement(self):
+ User = self.classes.User
+
+ class Thing(object):
+ def __clause_element__(self):
+ return User.name.__clause_element__()
+
+ s = Session()
+ jill = s.query(User).get(3)
+ s.query(User).update(
+ {Thing(): 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.name, 'moonbeam')
+
+ def test_evaluate_invalid(self):
+ User = self.classes.User
+
+ class Thing(object):
+ def __clause_element__(self):
+ return 5
+
+ s = Session()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Invalid expression type: 5",
+ s.query(User).update, {Thing(): 'moonbeam'},
+ synchronize_session='evaluate'
+ )
+
+ def test_evaluate_unmapped_col(self):
+ User = self.classes.User
+
+ s = Session()
+ jill = s.query(User).get(3)
+ s.query(User).update(
+ {column('name'): 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.name, 'jill')
+ s.expire(jill)
+ eq_(jill.name, 'moonbeam')
+
+ def test_evaluate_synonym_string(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name", )
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {'uname': 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
+ def test_evaluate_synonym_attr(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name", )
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.uname: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
+ def test_evaluate_double_synonym_attr(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name"),
+ 'ufoo': synonym('uname')
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.ufoo: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.ufoo, 'moonbeam')
+
+ def test_evaluate_hybrid_attr(self):
+ from sqlalchemy.ext.hybrid import hybrid_property
+
+ class Foo(object):
+ @hybrid_property
+ def uname(self):
+ return self.name
+
+ mapper(Foo, self.tables.users)
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.uname: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
def test_delete(self):
User = self.classes.User
@@ -116,7 +222,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='evaluate')
assert john not in sess and jill not in sess
sess.rollback()
@@ -127,7 +234,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='fetch')
assert john not in sess and jill not in sess
sess.rollback()
@@ -139,7 +247,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session=False)
assert john in sess and jill in sess
@@ -152,7 +261,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='fetch')
assert john not in sess and jill not in sess
@@ -202,7 +312,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter(User.age > 27).\
update(
- {users.c.age: User.age - 10}, synchronize_session='evaluate')
+ {users.c.age_int: User.age - 10},
+ synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [25, 27, 19, 27])
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([25, 27, 19, 27])))
@@ -213,12 +324,25 @@ class UpdateDeleteTest(fixtures.MappedTest):
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([15, 27, 19, 27])))
+ def test_update_against_table_col(self):
+ User, users = self.classes.User, self.tables.users
+
+ sess = Session()
+ john, jack, jill, jane = sess.query(User).order_by(User.id).all()
+ eq_([john.age, jack.age, jill.age, jane.age], [25, 47, 29, 37])
+ sess.query(User).filter(User.age > 27).\
+ update(
+ {users.c.age_int: User.age - 10},
+ synchronize_session='evaluate')
+ eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 19, 27])
+
def test_update_against_metadata(self):
User, users = self.classes.User, self.tables.users
sess = Session()
- sess.query(users).update({users.c.age: 29}, synchronize_session=False)
+ sess.query(users).update(
+ {users.c.age_int: 29}, synchronize_session=False)
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([29, 29, 29, 29])))
@@ -229,7 +353,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(text('age > :x')).params(x=29).\
+ sess.query(User).filter(text('age_int > :x')).params(x=29).\
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])
@@ -393,7 +517,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='j2').\
delete(
- synchronize_session='evaluate')
+ synchronize_session='evaluate')
assert john not in sess
def test_autoflush_before_fetch_delete(self):
@@ -405,7 +529,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='j2').\
delete(
- synchronize_session='fetch')
+ synchronize_session='fetch')
assert john not in sess
def test_evaluate_before_update(self):
@@ -447,7 +571,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='john').\
filter_by(age=25).\
delete(
- synchronize_session='evaluate')
+ synchronize_session='evaluate')
assert john not in sess
def test_fetch_before_delete(self):
@@ -460,7 +584,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='john').\
filter_by(age=25).\
delete(
- synchronize_session='fetch')
+ synchronize_session='fetch')
assert john not in sess
@@ -540,7 +664,8 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).options(joinedload(User.documents)).filter(User.age > 29).\
+ sess.query(User).options(
+ joinedload(User.documents)).filter(User.age > 29).\
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])
@@ -632,8 +757,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, None),
(3, None), (4, True),
- (5, True), (6, None),
- ])
+ (5, True), (6, None)])
)
def test_no_eval_against_multi_table_criteria(self):
@@ -666,8 +790,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, None),
(3, None), (4, True),
- (5, True), (6, None),
- ])
+ (5, True), (6, None)])
)
@testing.requires.update_where_target_in_subquery
@@ -690,8 +813,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, False),
(3, False), (4, True),
- (5, True), (6, False),
- ])
+ (5, True), (6, False)])
)
@testing.only_on('mysql', 'Multi table update')
@@ -706,8 +828,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
filter(User.id == 2).update({
Document.samename: 'd_samename',
User.samename: 'u_samename'
- }, synchronize_session=False
- )
+ }, synchronize_session=False)
eq_(
s.query(User.id, Document.samename, User.samename).
filter(User.id == Document.user_id).
diff --git a/test/profiles.txt b/test/profiles.txt
index 12222b637..dc4d05264 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,34 +1,28 @@
# /Users/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
-# For each test in aaa_profiling, the corresponding function and
+# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
# the test is skipped.
-# If a callcount does exist, it is compared to what we received.
+# If a callcount does exist, it is compared to what we received.
# assertions are raised if the counts do not match.
-#
-# To add a new callcount test, apply the function_call_count
-# decorator and re-run the tests using the --write-profiles
+#
+# To add a new callcount test, apply the function_call_count
+# decorator and re-run the tests using the --write-profiles
# option - this file will be rewritten including the new count.
-#
+#
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 77
@@ -36,22 +30,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_cextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 165
@@ -59,22 +47,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_cextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 199
@@ -82,22 +64,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysql
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_nocextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 78
@@ -105,22 +81,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 148
@@ -134,8 +104,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgre
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4265
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4260
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_nocextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4266
@@ -150,8 +118,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_nocextensions 6426
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_nocextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428
@@ -166,8 +132,8 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 40149
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 19280
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 28297
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_cextensions 107603
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_nocextensions 116606
+
+
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 29138
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32398
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 37327
@@ -182,8 +148,8 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 30054
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 27144
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 30149
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_cextensions 53281
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_nocextensions 56284
+
+
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 29068
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32197
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 31179
@@ -198,8 +164,8 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_nocextensions 18988
+
+
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
@@ -214,8 +180,8 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 122553
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165111
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_cextensions 200102
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_nocextensions 201852
+
+
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 125352
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364
@@ -230,8 +196,8 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19219
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22530
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_cextensions 24956
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_nocextensions 24936
+
+
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19492
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271
@@ -246,8 +212,8 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_ce
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1348
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1626
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_cextensions 2215
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_nocextensions 2230
+
+
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1355
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671
@@ -262,8 +228,8 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 117,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_cextensions 122,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_nocextensions 122,19
+
+
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 122,19
@@ -278,8 +244,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psy
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_nocextensions 78
+
+
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78
@@ -294,8 +260,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_ps
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_nocextensions 24
+
+
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
@@ -310,8 +276,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_po
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_nocextensions 9
+
+
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
@@ -320,22 +286,22 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_po
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_nocextensions 45
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_nocextensions 43
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_nocextensions 43
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 43
@@ -343,22 +309,22 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_nocextensions 80
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 80
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_nocextensions 78
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_nocextensions 78
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 78
@@ -366,22 +332,22 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_nocextensions 15
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_nocextensions 16
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_nocextensions 16
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16
@@ -389,22 +355,22 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_cextensions 92959
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_nocextensions 107979
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15534
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15477
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_cextensions 109136
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_nocextensions 123136
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_cextensions 79876
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_nocextensions 93876
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 462
@@ -412,22 +378,22 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_cextensions 92959
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_nocextensions 107979
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45534
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15477
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_cextensions 109136
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_nocextensions 123136
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_cextensions 79876
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_nocextensions 93876
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 462
diff --git a/test/requirements.py b/test/requirements.py
index 21dd2913e..daa20d05a 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -308,6 +308,17 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def temp_table_names(self):
+ """target dialect supports listing of temporary table names"""
+
+ return only_on(['sqlite', 'oracle'])
+
+ @property
+ def temporary_views(self):
+ """target database supports temporary views"""
+ return only_on(['sqlite', 'postgresql'])
+
+ @property
def update_nowait(self):
"""Target database must support SELECT...FOR UPDATE NOWAIT"""
return skip_if(["firebird", "mssql", "mysql", "sqlite", "sybase"],
@@ -421,6 +432,12 @@ class DefaultRequirements(SuiteRequirements):
no_support('sybase', 'FIXME: guessing, needs confirmation'),
no_support('mssql+pymssql', 'no FreeTDS support'),
LambdaPredicate(
+ lambda config: against(config, "mysql+mysqlconnector") and
+ config.db.dialect._mysqlconnector_version_info > (2, 0) and
+ util.py2k,
+ "bug in mysqlconnector 2.0"
+ ),
+ LambdaPredicate(
lambda config: against(config, 'mssql+pyodbc') and
config.db.dialect.freetds and
config.db.dialect.freetds_driver_version < "0.91",
@@ -443,7 +460,7 @@ class DefaultRequirements(SuiteRequirements):
after an insert() construct executes.
"""
return fails_on_everything_except('mysql',
- 'sqlite+pysqlite',
+ 'sqlite+pysqlite', 'sqlite+pysqlcipher',
'sybase', 'mssql')
@property
@@ -460,7 +477,7 @@ class DefaultRequirements(SuiteRequirements):
"""
return skip_if('mssql+pymssql', 'crashes on pymssql') + \
fails_on_everything_except('mysql',
- 'sqlite+pysqlite')
+ 'sqlite+pysqlite', 'sqlite+pysqlcipher')
@property
def sane_multi_rowcount(self):
@@ -717,6 +734,14 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def postgresql_test_dblink(self):
+ return skip_if(
+ lambda config: not config.file_config.has_option(
+ 'sqla_testing', 'postgres_test_db_link'),
+ "postgres_test_db_link option not specified in config"
+ )
+
+ @property
def percent_schema_names(self):
return skip_if(
[
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 4f8ced72c..bfafed599 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -238,6 +238,22 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
checkparams=params
)
+ def test_limit_offset_select_literal_binds(self):
+ stmt = select([1]).limit(5).offset(6)
+ self.assert_compile(
+ stmt,
+ "SELECT 1 LIMIT 5 OFFSET 6",
+ literal_binds=True
+ )
+
+ def test_limit_offset_compound_select_literal_binds(self):
+ stmt = select([1]).union(select([2])).limit(5).offset(6)
+ self.assert_compile(
+ stmt,
+ "SELECT 1 UNION SELECT 2 LIMIT 5 OFFSET 6",
+ literal_binds=True
+ )
+
def test_select_precol_compile_ordering(self):
s1 = select([column('x')]).select_from(text('a')).limit(5).as_scalar()
s2 = select([s1]).limit(10)
@@ -2169,6 +2185,27 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT x + foo() OVER () AS anon_1"
)
+ # test a reference to a label that in the referecned selectable;
+ # this resolves
+ expr = (table1.c.myid + 5).label('sum')
+ stmt = select([expr]).alias()
+ self.assert_compile(
+ select([stmt.c.sum, func.row_number().over(order_by=stmt.c.sum)]),
+ "SELECT anon_1.sum, row_number() OVER (ORDER BY anon_1.sum) "
+ "AS anon_2 FROM (SELECT mytable.myid + :myid_1 AS sum "
+ "FROM mytable) AS anon_1"
+ )
+
+ # test a reference to a label that's at the same level as the OVER
+ # in the columns clause; doesn't resolve
+ expr = (table1.c.myid + 5).label('sum')
+ self.assert_compile(
+ select([expr, func.row_number().over(order_by=expr)]),
+ "SELECT mytable.myid + :myid_1 AS sum, "
+ "row_number() OVER "
+ "(ORDER BY mytable.myid + :myid_1) AS anon_1 FROM mytable"
+ )
+
def test_date_between(self):
import datetime
table = Table('dt', metadata,
@@ -2399,6 +2436,23 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=dialect
)
+ def test_statement_hints(self):
+
+ stmt = select([table1.c.myid]).\
+ with_statement_hint("test hint one").\
+ with_statement_hint("test hint two", 'mysql')
+
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid FROM mytable test hint one",
+ )
+
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid FROM mytable test hint one test hint two",
+ dialect='mysql'
+ )
+
def test_literal_as_text_fromstring(self):
self.assert_compile(
and_(text("a"), text("b")),
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index abce600df..10e557b76 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -14,6 +14,7 @@ from sqlalchemy.dialects import sqlite
from sqlalchemy.testing import fixtures
from sqlalchemy.util import u, b
from sqlalchemy import util
+import itertools
t = f = f2 = ts = currenttime = metadata = default_generator = None
@@ -1278,3 +1279,67 @@ class UnicodeDefaultsTest(fixtures.TestBase):
"foobar", Unicode(32),
default=default
)
+
+
+class InsertFromSelectTest(fixtures.TestBase):
+ __backend__ = True
+
+ def _fixture(self):
+ data = Table(
+ 'data', self.metadata,
+ Column('x', Integer),
+ Column('y', Integer)
+ )
+ data.create()
+ testing.db.execute(data.insert(), {'x': 2, 'y': 5}, {'x': 7, 'y': 12})
+ return data
+
+ @testing.provide_metadata
+ def test_insert_from_select_override_defaults(self):
+ data = self._fixture()
+
+ table = Table('sometable', self.metadata,
+ Column('x', Integer),
+ Column('foo', Integer, default=12),
+ Column('y', Integer))
+
+ table.create()
+
+ sel = select([data.c.x, data.c.y])
+
+ ins = table.insert().\
+ from_select(["x", "y"], sel)
+ testing.db.execute(ins)
+
+ eq_(
+ testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
+ [(2, 12, 5), (7, 12, 12)]
+ )
+
+ @testing.provide_metadata
+ def test_insert_from_select_fn_defaults(self):
+ data = self._fixture()
+
+ counter = itertools.count(1)
+
+ def foo(ctx):
+ return next(counter)
+
+ table = Table('sometable', self.metadata,
+ Column('x', Integer),
+ Column('foo', Integer, default=foo),
+ Column('y', Integer))
+
+ table.create()
+
+ sel = select([data.c.x, data.c.y])
+
+ ins = table.insert().\
+ from_select(["x", "y"], sel)
+ testing.db.execute(ins)
+
+ # counter is only called once!
+ eq_(
+ testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
+ [(2, 1, 5), (7, 1, 12)]
+ )
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index 9b7649e63..ec8d9b5c0 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -1,7 +1,8 @@
from sqlalchemy.testing import eq_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
- Column, Sequence, MetaData, extract, Date, String, bindparam
+ Column, Sequence, MetaData, extract, Date, String, bindparam, \
+ literal_column
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
@@ -15,6 +16,13 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
+table1 = table('mytable',
+ column('myid', Integer),
+ column('name', String),
+ column('description', String),
+ )
+
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -367,6 +375,108 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
expr = func.rows("foo").alias('bar')
assert len(expr.c)
+ def test_funcfilter_empty(self):
+ self.assert_compile(
+ func.count(1).filter(),
+ "count(:param_1)"
+ )
+
+ def test_funcfilter_criterion(self):
+ self.assert_compile(
+ func.count(1).filter(
+ table1.c.name != None
+ ),
+ "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)"
+ )
+
+ def test_funcfilter_compound_criterion(self):
+ self.assert_compile(
+ func.count(1).filter(
+ table1.c.name == None,
+ table1.c.myid > 0
+ ),
+ "count(:param_1) FILTER (WHERE mytable.name IS NULL AND "
+ "mytable.myid > :myid_1)"
+ )
+
+ def test_funcfilter_label(self):
+ self.assert_compile(
+ select([func.count(1).filter(
+ table1.c.description != None
+ ).label('foo')]),
+ "SELECT count(:param_1) FILTER (WHERE mytable.description "
+ "IS NOT NULL) AS foo FROM mytable"
+ )
+
+ def test_funcfilter_fromobj_fromfunc(self):
+ # test from_obj generation.
+ # from func:
+ self.assert_compile(
+ select([
+ func.max(table1.c.name).filter(
+ literal_column('description') != None
+ )
+ ]),
+ "SELECT max(mytable.name) FILTER (WHERE description "
+ "IS NOT NULL) AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_fromobj_fromcriterion(self):
+ # from criterion:
+ self.assert_compile(
+ select([
+ func.count(1).filter(
+ table1.c.name == 'name'
+ )
+ ]),
+ "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) "
+ "AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_chaining(self):
+ # test chaining:
+ self.assert_compile(
+ select([
+ func.count(1).filter(
+ table1.c.name == 'name'
+ ).filter(
+ table1.c.description == 'description'
+ )
+ ]),
+ "SELECT count(:param_1) FILTER (WHERE "
+ "mytable.name = :name_1 AND mytable.description = :description_1) "
+ "AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_windowing_orderby(self):
+ # test filtered windowing:
+ self.assert_compile(
+ select([
+ func.rank().filter(
+ table1.c.name > 'foo'
+ ).over(
+ order_by=table1.c.name
+ )
+ ]),
+ "SELECT rank() FILTER (WHERE mytable.name > :name_1) "
+ "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_windowing_orderby_partitionby(self):
+ self.assert_compile(
+ select([
+ func.rank().filter(
+ table1.c.name > 'foo'
+ ).over(
+ order_by=table1.c.name,
+ partition_by=['description']
+ )
+ ]),
+ "SELECT rank() FILTER (WHERE mytable.name > :name_1) "
+ "OVER (PARTITION BY mytable.description ORDER BY mytable.name) "
+ "AS anon_1 FROM mytable"
+ )
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index 013ba8082..6044cecb0 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -539,6 +539,11 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
expr2 = CloningVisitor().traverse(expr)
assert str(expr) == str(expr2)
+ def test_funcfilter(self):
+ expr = func.count(1).filter(t1.c.col1 > 1)
+ expr2 = CloningVisitor().traverse(expr)
+ assert str(expr) == str(expr2)
+
def test_adapt_union(self):
u = union(
t1.select().where(t1.c.col1 == 4),
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index 232c5758b..bd4eaa3e2 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -183,7 +183,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
- def test_insert_from_select_select_no_defaults(self):
+ def test_insert_from_select_no_defaults(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
@@ -191,7 +191,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
table1 = self.tables.mytable
sel = select([table1.c.myid]).where(table1.c.name == 'foo')
ins = table.insert().\
- from_select(["id"], sel)
+ from_select(["id"], sel, include_defaults=False)
self.assert_compile(
ins,
"INSERT INTO sometable (id) SELECT mytable.myid "
@@ -199,6 +199,84 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_insert_from_select_with_sql_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=func.foobar()))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, foobar() AS foobar_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_with_python_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=12))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, :foo AS anon_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ # value filled in at execution time
+ checkparams={"name_1": "foo", "foo": None}
+ )
+
+ def test_insert_from_select_override_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=12))
+ table1 = self.tables.mytable
+ sel = select(
+ [table1.c.myid, table1.c.myid.label('q')]).where(
+ table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id", "foo"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, mytable.myid AS q "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_fn_defaults(self):
+ metadata = MetaData()
+
+ def foo(ctx):
+ return 12
+
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=foo))
+ table1 = self.tables.mytable
+ sel = select(
+ [table1.c.myid]).where(
+ table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, :foo AS anon_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ # value filled in at execution time
+ checkparams={"name_1": "foo", "foo": None}
+ )
+
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index a209cdd7a..21eed3abd 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -16,7 +16,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import ComparesTables, AssertsCompiledSQL
from sqlalchemy.testing import eq_, is_, mock
from contextlib import contextmanager
-
+from sqlalchemy import util
class MetaDataTest(fixtures.TestBase, ComparesTables):
@@ -679,6 +679,86 @@ class ToMetaDataTest(fixtures.TestBase, ComparesTables):
eq_(str(table_c.join(table2_c).onclause),
'myschema.mytable.myid = myschema.othertable.myid')
+ def test_change_name_retain_metadata(self):
+ meta = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ Column('name', String(40), nullable=True),
+ Column('description', String(30),
+ CheckConstraint("description='hi'")),
+ UniqueConstraint('name'),
+ schema='myschema',
+ )
+
+ table2 = table.tometadata(table.metadata, name='newtable')
+ table3 = table.tometadata(table.metadata, schema='newschema',
+ name='newtable')
+
+ assert table.metadata is table2.metadata
+ assert table.metadata is table3.metadata
+ eq_((table.name, table2.name, table3.name),
+ ('mytable', 'newtable', 'newtable'))
+ eq_((table.key, table2.key, table3.key),
+ ('myschema.mytable', 'myschema.newtable', 'newschema.newtable'))
+
+ def test_change_name_change_metadata(self):
+ meta = MetaData()
+ meta2 = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ Column('name', String(40), nullable=True),
+ Column('description', String(30),
+ CheckConstraint("description='hi'")),
+ UniqueConstraint('name'),
+ schema='myschema',
+ )
+
+ table2 = table.tometadata(meta2, name='newtable')
+
+ assert table.metadata is not table2.metadata
+ eq_((table.name, table2.name),
+ ('mytable', 'newtable'))
+ eq_((table.key, table2.key),
+ ('myschema.mytable', 'myschema.newtable'))
+
+ def test_change_name_selfref_fk_moves(self):
+ meta = MetaData()
+
+ referenced = Table('ref', meta,
+ Column('id', Integer, primary_key=True),
+ )
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', ForeignKey('mytable.id')),
+ Column('ref_id', ForeignKey('ref.id'))
+ )
+
+ table2 = table.tometadata(table.metadata, name='newtable')
+ assert table.metadata is table2.metadata
+ assert table2.c.ref_id.references(referenced.c.id)
+ assert table2.c.parent_id.references(table2.c.id)
+
+ def test_change_name_selfref_fk_moves_w_schema(self):
+ meta = MetaData()
+
+ referenced = Table('ref', meta,
+ Column('id', Integer, primary_key=True),
+ )
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', ForeignKey('mytable.id')),
+ Column('ref_id', ForeignKey('ref.id'))
+ )
+
+ table2 = table.tometadata(
+ table.metadata, name='newtable', schema='newschema')
+ ref2 = referenced.tometadata(table.metadata, schema='newschema')
+ assert table.metadata is table2.metadata
+ assert table2.c.ref_id.references(ref2.c.id)
+ assert table2.c.parent_id.references(table2.c.id)
+
def _assert_fk(self, t2, schema, expected, referred_schema_fn=None):
m2 = MetaData()
existing_schema = t2.schema
@@ -2126,7 +2206,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
assert_raises_message(
exc.ArgumentError,
- "Column object already assigned to Table 't'",
+ "Column object 'x' already assigned to Table 't'",
Table, 'q', MetaData(), c)
def test_incomplete_key(self):
@@ -2707,7 +2787,7 @@ class DialectKWArgTest(fixtures.TestBase):
lambda arg: "goofy_%s" % arg):
with self._fixture():
idx = Index('a', 'b')
- idx.kwargs[u'participating_x'] = 7
+ idx.kwargs[util.u('participating_x')] = 7
eq_(
list(idx.dialect_kwargs),
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 5c401845b..e8ad88511 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -1,4 +1,4 @@
-from sqlalchemy.testing import fixtures, eq_, is_
+from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
@@ -778,6 +778,25 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT x WHERE NOT NULL"
)
+ def test_constant_non_singleton(self):
+ is_not_(null(), null())
+ is_not_(false(), false())
+ is_not_(true(), true())
+
+ def test_constant_render_distinct(self):
+ self.assert_compile(
+ select([null(), null()]),
+ "SELECT NULL AS anon_1, NULL AS anon_2"
+ )
+ self.assert_compile(
+ select([true(), true()]),
+ "SELECT true AS anon_1, true AS anon_2"
+ )
+ self.assert_compile(
+ select([false(), false()]),
+ "SELECT false AS anon_1, false AS anon_2"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 430c3fe7c..2f13486eb 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -81,11 +81,10 @@ class QueryTest(fixtures.TestBase):
assert_raises_message(
exc.StatementError,
- r"A value is required for bind parameter 'user_name', in "
+ r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
+ "bind parameter 'user_name', in "
"parameter group 2 "
- "\(original cause: (sqlalchemy.exc.)?InvalidRequestError: A "
- "value is required for bind parameter 'user_name', in "
- "parameter group 2\) u?'INSERT INTO query_users",
+ r"\[SQL: u?'INSERT INTO query_users",
users.insert().execute,
{'user_id': 7, 'user_name': 'jack'},
{'user_id': 8, 'user_name': 'ed'},
@@ -295,9 +294,6 @@ class QueryTest(fixtures.TestBase):
l.append(row)
self.assert_(len(l) == 3)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.subqueries
def test_anonymous_rows(self):
users.insert().execute(
@@ -509,9 +505,6 @@ class QueryTest(fixtures.TestBase):
lambda: row[accessor]
)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
@@ -570,9 +563,6 @@ class QueryTest(fixtures.TestBase):
):
eq_(expr.execute().fetchall(), result)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.mod_operator_as_percent_sign
@testing.emits_warning('.*now automatically escapes.*')
def test_percents_in_text(self):
@@ -623,9 +613,6 @@ class QueryTest(fixtures.TestBase):
c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
def test_repeated_bindparams(self):
"""Tests that a BindParam can be used more than once.
@@ -1319,9 +1306,6 @@ class QueryTest(fixtures.TestBase):
# Null values are not outside any set
assert len(r) == 0
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.emits_warning('.*empty sequence.*')
@testing.fails_on('firebird', "uses sql-92 rules")
@testing.fails_on('sybase', "uses sql-92 rules")
@@ -1348,9 +1332,6 @@ class QueryTest(fixtures.TestBase):
r = s.execute(search_key=None).fetchall()
assert len(r) == 0
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.emits_warning('.*empty sequence.*')
def test_literal_in(self):
"""similar to test_bind_in but use a bind with a value."""
@@ -2510,9 +2491,6 @@ class OperatorTest(fixtures.TestBase):
metadata.drop_all()
# TODO: seems like more tests warranted for this setup.
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
def test_modulo(self):
eq_(
select([flds.c.intcol % 3],
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index a3b2b0e93..99d0cbe76 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults
+from sqlalchemy.sql import elements
from sqlalchemy import testing
from sqlalchemy.sql import util as sql_util, visitors, expression
from sqlalchemy import exc
@@ -1934,6 +1935,29 @@ class AnnotationsTest(fixtures.TestBase):
assert (c2 == 5).left._annotations == {"foo": "bar", "bat": "hoho"}
+class ReprTest(fixtures.TestBase):
+ def test_ensure_repr_elements(self):
+ for obj in [
+ elements.Cast(1, 2),
+ elements.TypeClause(String()),
+ elements.ColumnClause('x'),
+ elements.BindParameter('q'),
+ elements.Null(),
+ elements.True_(),
+ elements.False_(),
+ elements.ClauseList(),
+ elements.BooleanClauseList.and_(),
+ elements.Tuple(),
+ elements.Case([]),
+ elements.Extract('foo', column('x')),
+ elements.UnaryExpression(column('x')),
+ elements.Grouping(column('x')),
+ elements.Over(func.foo()),
+ elements.Label('q', column('x')),
+ ]:
+ repr(obj)
+
+
class WithLabelsTest(fixtures.TestBase):
def _assert_labels_warning(self, s):