summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/build/changelog/unreleased_14/6023.rst6
-rw-r--r--doc/build/orm/tutorial.rst4
-rw-r--r--lib/sqlalchemy/dialects/postgresql/array.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py8
-rw-r--r--lib/sqlalchemy/orm/strategies.py1
-rw-r--r--lib/sqlalchemy/sql/type_api.py3
-rw-r--r--lib/sqlalchemy/testing/warnings.py2
-rw-r--r--test/dialect/postgresql/test_types.py28
-rw-r--r--test/ext/test_associationproxy.py4
-rw-r--r--test/orm/test_backref_mutations.py7
-rw-r--r--test/orm/test_cascade.py29
-rw-r--r--test/orm/test_deprecations.py247
-rw-r--r--test/orm/test_expire.py5
-rw-r--r--test/orm/test_lazy_relations.py2
-rw-r--r--test/orm/test_load_on_fks.py339
-rw-r--r--test/orm/test_mapper.py4
-rw-r--r--test/orm/test_merge.py4
-rw-r--r--test/orm/test_naturalpks.py2
-rw-r--r--test/orm/test_session.py5
-rw-r--r--test/orm/test_versioning.py2
20 files changed, 510 insertions, 194 deletions
diff --git a/doc/build/changelog/unreleased_14/6023.rst b/doc/build/changelog/unreleased_14/6023.rst
new file mode 100644
index 000000000..88d9777ba
--- /dev/null
+++ b/doc/build/changelog/unreleased_14/6023.rst
@@ -0,0 +1,6 @@
+.. change::
+ :tags: postgresql, pg8000
+ :tickets: 7167
+
+ Improve array handling when using PostgreSQL with the
+ pg8000 dialect.
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index dbfc84b6c..fb5202342 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -347,6 +347,10 @@ connect it to the :class:`~sqlalchemy.orm.session.Session` using
>>> Session.configure(bind=engine) # once engine is available
+.. Setup code, not for display - ensure no cascade_backrefs warnings occur
+
+ >>> Session.configure(future=True)
+
.. sidebar:: Session Lifecycle Patterns
The question of when to make a :class:`.Session` depends a lot on what
diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py
index 9659d31b9..0cb574dac 100644
--- a/lib/sqlalchemy/dialects/postgresql/array.py
+++ b/lib/sqlalchemy/dialects/postgresql/array.py
@@ -375,7 +375,7 @@ class ARRAY(sqltypes.ARRAY):
if value is None:
return value
# isinstance(value, util.string_types) is required to handle
- # the # case where a TypeDecorator for and Array of Enum is
+ # the case where a TypeDecorator for and Array of Enum is
# used like was required in sa < 1.3.17
return super_rp(
handle_raw_string(value)
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index d42dd9560..a94f9dcdb 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -93,6 +93,8 @@ import decimal
import re
from uuid import UUID as _python_UUID
+from .array import ARRAY as PGARRAY
+from .base import _ColonCast
from .base import _DECIMAL_TYPES
from .base import _FLOAT_TYPES
from .base import _INT_TYPES
@@ -256,6 +258,11 @@ class _PGBoolean(sqltypes.Boolean):
return dbapi.BOOLEAN
+class _PGARRAY(PGARRAY):
+ def bind_expression(self, bindvalue):
+ return _ColonCast(bindvalue, self)
+
+
_server_side_id = util.counter()
@@ -384,6 +391,7 @@ class PGDialect_pg8000(PGDialect):
sqltypes.SmallInteger: _PGSmallInteger,
sqltypes.BigInteger: _PGBigInteger,
sqltypes.Enum: _PGEnum,
+ sqltypes.ARRAY: _PGARRAY,
},
)
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 4f361be2c..2a283caad 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -797,7 +797,6 @@ class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
)
def _load_for_state(self, state, passive, loadopt=None, extra_criteria=()):
-
if not state.key and (
(
not self.parent_property.load_on_pending
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index acf88f0da..2a4688bcc 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -633,7 +633,8 @@ class TypeEngine(Traversible):
try:
return dialect._type_memos[self]["impl"]
except KeyError:
- return self._dialect_info(dialect)["impl"]
+ pass
+ return self._dialect_info(dialect)["impl"]
def _unwrapped_dialect_impl(self, dialect):
"""Return the 'unwrapped' dialect impl for this type.
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 8753399f3..ae2c7916d 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -74,8 +74,6 @@ def setup_filters():
# ORM Session
#
r"The Session.autocommit parameter is deprecated ",
- r".*object is being merged into a Session along the backref "
- "cascade path",
r"The merge_result\(\) method is superseded by the "
r"merge_frozen_result\(\)",
r"The Session.begin.subtransactions flag is deprecated",
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index dd0a1be0f..d1c0361e4 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -1443,7 +1443,6 @@ class ArrayRoundTripTest(object):
__only_on__ = "postgresql"
__backend__ = True
- __unsupported_on__ = ("postgresql+pg8000",)
ARRAY = postgresql.ARRAY
@@ -1962,14 +1961,8 @@ class ArrayRoundTripTest(object):
(sqltypes.Unicode, unicode_values),
(postgresql.JSONB, json_values),
(sqltypes.Boolean, lambda x: [False] + [True] * x),
- (
- sqltypes.LargeBinary,
- binary_values,
- ),
- (
- postgresql.BYTEA,
- binary_values,
- ),
+ (sqltypes.LargeBinary, binary_values),
+ (postgresql.BYTEA, binary_values),
(
postgresql.INET,
lambda x: [
@@ -2047,6 +2040,7 @@ class ArrayRoundTripTest(object):
(postgresql.ENUM(AnEnum), enum_values),
(sqltypes.Enum(AnEnum, native_enum=True), enum_values),
(sqltypes.Enum(AnEnum, native_enum=False), enum_values),
+ (postgresql.ENUM(AnEnum, native_enum=True), enum_values),
]
if not exclude_json:
@@ -2057,6 +2051,22 @@ class ArrayRoundTripTest(object):
]
)
+ _pg8000_skip_types = {
+ postgresql.HSTORE, # return not parsed returned as string
+ }
+ for i in range(len(elements)):
+ elem = elements[i]
+ if (
+ elem[0] in _pg8000_skip_types
+ or type(elem[0]) in _pg8000_skip_types
+ ):
+ elem += (
+ testing.skip_if(
+ "postgresql+pg8000", "type not supported by pg8000"
+ ),
+ )
+ elements[i] = elem
+
return testing.combinations_list(
elements, argnames="type_,gen", id_="na"
)
diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py
index 331213ffe..0b05fe038 100644
--- a/test/ext/test_associationproxy.py
+++ b/test/ext/test_associationproxy.py
@@ -146,7 +146,9 @@ class AutoFlushTest(fixtures.MappedTest):
collection_class, is_dict=is_dict
)
- session = Session(testing.db, autoflush=True, expire_on_commit=True)
+ session = Session(
+ testing.db, autoflush=True, expire_on_commit=True, future=True
+ )
p1 = Parent()
c1 = Child("c1")
diff --git a/test/orm/test_backref_mutations.py b/test/orm/test_backref_mutations.py
index fd5d908cf..0f10cff24 100644
--- a/test/orm/test_backref_mutations.py
+++ b/test/orm/test_backref_mutations.py
@@ -43,7 +43,7 @@ class O2MCollectionTest(_fixtures.FixtureTest):
def test_collection_move_hitslazy(self):
User, Address = self.classes.User, self.classes.Address
- sess = fixture_session()
+ sess = fixture_session(future=True)
a1 = Address(email_address="address1")
a2 = Address(email_address="address2")
a3 = Address(email_address="address3")
@@ -667,7 +667,7 @@ class O2OScalarOrphanTest(_fixtures.FixtureTest):
def test_m2o_event(self):
User, Address = self.classes.User, self.classes.Address
- sess = fixture_session()
+ sess = fixture_session(future=True)
a1 = Address(email_address="address1")
u1 = User(name="jack", address=a1)
@@ -678,6 +678,7 @@ class O2OScalarOrphanTest(_fixtures.FixtureTest):
u2 = User(name="ed")
# the _SingleParent extension sets the backref get to "active" !
# u1 gets loaded and deleted
+ sess.add(u2)
u2.address = a1
sess.commit()
assert sess.query(User).count() == 1
@@ -712,7 +713,7 @@ class M2MCollectionMoveTest(_fixtures.FixtureTest):
Item, Keyword = (self.classes.Item, self.classes.Keyword)
- session = fixture_session(autoflush=False)
+ session = fixture_session(autoflush=False, future=True)
i1 = Item(description="i1")
session.add(i1)
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index 8749a0147..cd7e7c111 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -4485,7 +4485,15 @@ class ViewonlyFlagWarningTest(fixtures.MappedTest):
)
-class CollectionCascadesDespiteBackrefTest(fixtures.TestBase):
+class CollectionCascadesNoBackrefTest(fixtures.TestBase):
+ """test the removal of cascade_backrefs behavior
+
+
+ see test/orm/test_deprecations.py::CollectionCascadesDespiteBackrefTest
+ for the deprecated version
+
+ """
+
@testing.fixture
def cascade_fixture(self, registry):
def go(collection_class):
@@ -4495,7 +4503,10 @@ class CollectionCascadesDespiteBackrefTest(fixtures.TestBase):
id = Column(Integer, primary_key=True)
bs = relationship(
- "B", backref="a", collection_class=collection_class
+ "B",
+ backref="a",
+ collection_class=collection_class,
+ cascade_backrefs=False,
)
@registry.mapped
@@ -4536,12 +4547,8 @@ class CollectionCascadesDespiteBackrefTest(fixtures.TestBase):
b1.a = a1
b3.a = a1
- if future:
- assert b1 not in s
- assert b3 not in s
- else:
- assert b1 in s
- assert b3 in s
+ assert b1 not in s
+ assert b3 not in s
if methname == "__setitem__":
meth = getattr(a1.bs, methname)
@@ -4563,8 +4570,4 @@ class CollectionCascadesDespiteBackrefTest(fixtures.TestBase):
assert b1 in s
assert b2 in s
- if future:
- assert b3 not in s # the event never triggers from reverse
- else:
- # old behavior
- assert b3 in s
+ assert b3 not in s # the event never triggers from reverse
diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py
index 101dc693c..5abce4498 100644
--- a/test/orm/test_deprecations.py
+++ b/test/orm/test_deprecations.py
@@ -59,6 +59,7 @@ from sqlalchemy.orm import undefer
from sqlalchemy.orm import with_loader_criteria
from sqlalchemy.orm import with_parent
from sqlalchemy.orm import with_polymorphic
+from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.orm.collections import collection
from sqlalchemy.orm.util import polymorphic_union
from sqlalchemy.sql import elements
@@ -8559,3 +8560,249 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
"FROM addresses WHERE :param_2 = addresses.user_id) AS anon_1",
checkparams={"param_1": 7, "param_2": 8},
)
+
+
+class CollectionCascadesDespiteBackrefTest(fixtures.TestBase):
+ """test old cascade_backrefs behavior
+
+ see test/orm/test_cascade.py::class CollectionCascadesNoBackrefTest
+ for the future version
+
+ """
+
+ @testing.fixture
+ def cascade_fixture(self, registry):
+ def go(collection_class):
+ @registry.mapped
+ class A(object):
+ __tablename__ = "a"
+
+ id = Column(Integer, primary_key=True)
+ bs = relationship(
+ "B", backref="a", collection_class=collection_class
+ )
+
+ @registry.mapped
+ class B(object):
+ __tablename__ = "b_"
+ id = Column(Integer, primary_key=True)
+ a_id = Column(ForeignKey("a.id"))
+ key = Column(String)
+
+ return A, B
+
+ yield go
+
+ @testing.combinations(
+ (set, "add"),
+ (list, "append"),
+ (attribute_mapped_collection("key"), "__setitem__"),
+ (attribute_mapped_collection("key"), "setdefault"),
+ (attribute_mapped_collection("key"), "update_dict"),
+ (attribute_mapped_collection("key"), "update_kw"),
+ argnames="collection_class,methname",
+ )
+ @testing.combinations((True,), (False,), argnames="future")
+ def test_cascades_on_collection(
+ self, cascade_fixture, collection_class, methname, future
+ ):
+ A, B = cascade_fixture(collection_class)
+
+ s = Session(future=future)
+
+ a1 = A()
+ s.add(a1)
+
+ b1 = B(key="b1")
+ b2 = B(key="b2")
+ b3 = B(key="b3")
+
+ if future:
+ dep_ctx = util.nullcontext
+ else:
+
+ def dep_ctx():
+ return assertions.expect_deprecated_20(
+ '"B" object is being merged into a Session along the '
+ 'backref cascade path for relationship "A.bs"'
+ )
+
+ with dep_ctx():
+ b1.a = a1
+ with dep_ctx():
+ b3.a = a1
+
+ if future:
+ assert b1 not in s
+ assert b3 not in s
+ else:
+ assert b1 in s
+ assert b3 in s
+
+ if methname == "__setitem__":
+ meth = getattr(a1.bs, methname)
+ meth(b1.key, b1)
+ meth(b2.key, b2)
+ elif methname == "setdefault":
+ meth = getattr(a1.bs, methname)
+ meth(b1.key, b1)
+ meth(b2.key, b2)
+ elif methname == "update_dict" and isinstance(a1.bs, dict):
+ a1.bs.update({b1.key: b1, b2.key: b2})
+ elif methname == "update_kw" and isinstance(a1.bs, dict):
+ a1.bs.update(b1=b1, b2=b2)
+ else:
+ meth = getattr(a1.bs, methname)
+ meth(b1)
+ meth(b2)
+
+ assert b1 in s
+ assert b2 in s
+
+ # future version:
+ if future:
+ assert b3 not in s # the event never triggers from reverse
+ else:
+ # old behavior
+ assert b3 in s
+
+
+class LoadOnFKsTest(fixtures.DeclarativeMappedTest):
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class Parent(Base):
+ __tablename__ = "parent"
+ __table_args__ = {"mysql_engine": "InnoDB"}
+
+ id = Column(
+ Integer, primary_key=True, test_needs_autoincrement=True
+ )
+
+ class Child(Base):
+ __tablename__ = "child"
+ __table_args__ = {"mysql_engine": "InnoDB"}
+
+ id = Column(
+ Integer, primary_key=True, test_needs_autoincrement=True
+ )
+ parent_id = Column(Integer, ForeignKey("parent.id"))
+
+ parent = relationship(Parent, backref=backref("children"))
+
+ @testing.fixture
+ def parent_fixture(self, connection):
+ Parent, Child = self.classes("Parent", "Child")
+
+ sess = fixture_session(bind=connection, autoflush=False)
+ p1 = Parent()
+ p2 = Parent()
+ c1, c2 = Child(), Child()
+ c1.parent = p1
+ sess.add_all([p1, p2])
+ assert c1 in sess
+
+ yield sess, p1, p2, c1, c2
+
+ sess.close()
+
+ def test_enable_rel_loading_on_persistent_allows_backref_event(
+ self, parent_fixture
+ ):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
+ c3 = Child()
+ sess.enable_relationship_loading(c3)
+ c3.parent_id = p1.id
+ with assertions.expect_deprecated_20(
+ '"Child" object is being merged into a Session along the '
+ 'backref cascade path for relationship "Parent.children"'
+ ):
+ c3.parent = p1
+
+ # backref fired off when c3.parent was set,
+ # because the "old" value was None
+ # change as of [ticket:3708]
+ assert c3 in p1.children
+
+ def test_enable_rel_loading_allows_backref_event(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
+ c3 = Child()
+ sess.enable_relationship_loading(c3)
+ c3.parent_id = p1.id
+
+ with assertions.expect_deprecated_20(
+ '"Child" object is being merged into a Session along the '
+ 'backref cascade path for relationship "Parent.children"'
+ ):
+ c3.parent = p1
+
+ # backref fired off when c3.parent was set,
+ # because the "old" value was None
+ # change as of [ticket:3708]
+ assert c3 in p1.children
+
+
+class LazyTest(_fixtures.FixtureTest):
+ run_inserts = "once"
+ run_deletes = None
+
+ def test_backrefs_dont_lazyload(self):
+ users, Address, addresses, User = (
+ self.tables.users,
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User,
+ )
+
+ self.mapper_registry.map_imperatively(
+ User,
+ users,
+ properties={"addresses": relationship(Address, backref="user")},
+ )
+ self.mapper_registry.map_imperatively(Address, addresses)
+ sess = fixture_session(autoflush=False)
+ ad = sess.query(Address).filter_by(id=1).one()
+ assert ad.user.id == 7
+
+ def go():
+ ad.user = None
+ assert ad.user is None
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ u1 = sess.query(User).filter_by(id=7).one()
+
+ def go():
+ assert ad not in u1.addresses
+
+ self.assert_sql_count(testing.db, go, 1)
+
+ sess.expire(u1, ["addresses"])
+
+ def go():
+ assert ad in u1.addresses
+
+ self.assert_sql_count(testing.db, go, 1)
+
+ sess.expire(u1, ["addresses"])
+ ad2 = Address()
+
+ def go():
+ with assertions.expect_deprecated_20(
+ ".* object is being merged into a Session along the "
+ "backref cascade path for relationship "
+ ):
+ ad2.user = u1
+ assert ad2.user is u1
+
+ self.assert_sql_count(testing.db, go, 0)
+
+ def go():
+ assert ad2 in u1.addresses
+
+ self.assert_sql_count(testing.db, go, 1)
diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py
index c2fec626b..5a12a7da4 100644
--- a/test/orm/test_expire.py
+++ b/test/orm/test_expire.py
@@ -1683,7 +1683,7 @@ class ExpiredPendingTest(_fixtures.FixtureTest):
)
self.mapper_registry.map_imperatively(Address, addresses)
- sess = fixture_session(autoflush=False)
+ sess = fixture_session(autoflush=False, future=True)
a1 = Address(email_address="a1")
sess.add(a1)
sess.flush()
@@ -1701,6 +1701,9 @@ class ExpiredPendingTest(_fixtures.FixtureTest):
a2 = Address(email_address="a2")
a2.user = u1
+ # needed now that cascade backrefs is disabled
+ sess.add(a2)
+
# expire u1.addresses again. this expires
# "pending" as well.
sess.expire(u1, ["addresses"])
diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py
index 6b6789f30..3ebff5f43 100644
--- a/test/orm/test_lazy_relations.py
+++ b/test/orm/test_lazy_relations.py
@@ -954,7 +954,7 @@ class LazyTest(_fixtures.FixtureTest):
properties={"addresses": relationship(Address, backref="user")},
)
self.mapper_registry.map_imperatively(Address, addresses)
- sess = fixture_session(autoflush=False)
+ sess = fixture_session(autoflush=False, future=True)
ad = sess.query(Address).filter_by(id=1).one()
assert ad.user.id == 7
diff --git a/test/orm/test_load_on_fks.py b/test/orm/test_load_on_fks.py
index 02de9b2bb..fda8be423 100644
--- a/test/orm/test_load_on_fks.py
+++ b/test/orm/test_load_on_fks.py
@@ -5,7 +5,6 @@ from sqlalchemy import testing
from sqlalchemy.orm import backref
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import relationship
-from sqlalchemy.orm import Session
from sqlalchemy.orm.attributes import instance_state
from sqlalchemy.testing import AssertsExecutionResults
from sqlalchemy.testing import fixtures
@@ -65,12 +64,10 @@ class FlushOnPendingTest(AssertsExecutionResults, fixtures.TestBase):
self.assert_sql_count(testing.db, go, 0)
-class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
- __leave_connections_for_teardown__ = True
-
- def setup_test(self):
- global Parent, Child, Base
- Base = declarative_base()
+class LoadOnFKsTest(fixtures.DeclarativeMappedTest):
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
class Parent(Base):
__tablename__ = "parent"
@@ -91,11 +88,11 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
parent = relationship(Parent, backref=backref("children"))
- Base.metadata.create_all(testing.db)
-
- global sess, p1, p2, c1, c2
- sess = Session(bind=testing.db)
+ @testing.fixture
+ def parent_fixture(self, connection):
+ Parent, Child = self.classes("Parent", "Child")
+ sess = fixture_session(bind=connection)
p1 = Parent()
p2 = Parent()
c1, c2 = Child(), Child()
@@ -103,38 +100,22 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
sess.add_all([p1, p2])
assert c1 in sess
- sess.commit()
+ sess.flush()
- def teardown_test(self):
- sess.rollback()
- Base.metadata.drop_all(testing.db)
+ Child.parent.property.load_on_pending = False
- def test_load_on_pending_allows_backref_event(self):
- Child.parent.property.load_on_pending = True
- sess.autoflush = False
- c3 = Child()
- sess.add(c3)
- c3.parent_id = p1.id
- c3.parent = p1
+ sess.expire_all()
- # backref fired off when c3.parent was set,
- # because the "old" value was None.
- # change as of [ticket:3708]
- assert c3 in p1.children
+ yield sess, p1, p2, c1, c2
- def test_enable_rel_loading_allows_backref_event(self):
- sess.autoflush = False
- c3 = Child()
- sess.enable_relationship_loading(c3)
- c3.parent_id = p1.id
- c3.parent = p1
+ sess.close()
- # backref fired off when c3.parent was set,
- # because the "old" value was None
- # change as of [ticket:3708]
- assert c3 in p1.children
+ def test_m2o_history_on_persistent_allows_backref_event(
+ self, parent_fixture
+ ):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
- def test_m2o_history_on_persistent_allows_backref_event(self):
c3 = Child()
sess.add(c3)
c3.parent_id = p1.id
@@ -142,7 +123,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
assert c3 in p1.children
- def test_load_on_persistent_allows_backref_event(self):
+ def test_load_on_persistent_allows_backref_event(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
Child.parent.property.load_on_pending = True
c3 = Child()
sess.add(c3)
@@ -151,18 +135,28 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
assert c3 in p1.children
- def test_enable_rel_loading_on_persistent_allows_backref_event(self):
+ def test_load_on_pending_allows_backref_event(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
+ sess.autoflush = False
+
+ Child.parent.property.load_on_pending = True
c3 = Child()
- sess.enable_relationship_loading(c3)
+ sess.add(c3)
c3.parent_id = p1.id
+
c3.parent = p1
# backref fired off when c3.parent was set,
- # because the "old" value was None
+ # because the "old" value was None.
# change as of [ticket:3708]
assert c3 in p1.children
- def test_no_load_on_pending_allows_backref_event(self):
+ def test_no_load_on_pending_allows_backref_event(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
# users who stick with the program and don't use
# 'load_on_pending' get expected behavior
@@ -175,7 +169,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
assert c3 in p1.children
- def test_autoflush_on_pending(self):
+ def test_autoflush_on_pending(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
# ensure p1.id is not expired
p1.id
@@ -186,7 +183,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
# pendings don't autoflush
assert c3.parent is None
- def test_autoflush_load_on_pending_on_pending(self):
+ def test_autoflush_load_on_pending_on_pending(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
# ensure p1.id is not expired
p1.id
@@ -198,7 +198,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
# ...unless the flag is on
assert c3.parent is p1
- def test_collection_load_from_pending_populated(self):
+ def test_collection_load_from_pending_populated(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
Parent.children.property.load_on_pending = True
p2 = Parent(id=p1.id)
sess.add(p2)
@@ -209,7 +212,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
self.assert_sql_count(testing.db, go, 1)
- def test_collection_load_from_pending_no_sql(self):
+ def test_collection_load_from_pending_no_sql(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
Parent.children.property.load_on_pending = True
p2 = Parent(id=None)
sess.add(p2)
@@ -221,7 +227,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
self.assert_sql_count(testing.db, go, 0)
- def test_load_on_pending_with_set(self):
+ def test_load_on_pending_with_set(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
Child.parent.property.load_on_pending = True
p1.children
@@ -236,7 +245,10 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
self.assert_sql_count(testing.db, go, 0)
- def test_backref_doesnt_double(self):
+ def test_backref_doesnt_double(self, parent_fixture):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
Child.parent.property.load_on_pending = True
sess.autoflush = False
p1.children
@@ -248,116 +260,133 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
c3.parent = p1
assert len(p1.children) == 2
- def test_m2o_lazy_loader_on_persistent(self):
+ @testing.combinations(True, False, argnames="loadfk")
+ @testing.combinations(True, False, argnames="loadrel")
+ @testing.combinations(True, False, argnames="autoflush")
+ @testing.combinations(True, False, argnames="manualflush")
+ @testing.combinations(True, False, argnames="fake_autoexpire")
+ def test_m2o_lazy_loader_on_persistent(
+ self,
+ parent_fixture,
+ loadfk,
+ loadrel,
+ autoflush,
+ manualflush,
+ fake_autoexpire,
+ ):
"""Compare the behaviors from the lazyloader using
the "committed" state in all cases, vs. the lazyloader
using the "current" state in all cases except during flush.
"""
- for loadfk in (True, False):
- for loadrel in (True, False):
- for autoflush in (True, False):
- for manualflush in (True, False):
- for fake_autoexpire in (True, False):
- sess.autoflush = autoflush
-
- if loadfk:
- c1.parent_id
- if loadrel:
- c1.parent
-
- c1.parent_id = p2.id
-
- if manualflush:
- sess.flush()
-
- # fake_autoexpire refers to the eventual
- # auto-expire of 'parent' when c1.parent_id
- # is altered.
- if fake_autoexpire:
- sess.expire(c1, ["parent"])
-
- # old 0.6 behavior
- # if manualflush and (not loadrel or
- # fake_autoexpire):
- # # a flush occurs, we get p2
- # assert c1.parent is p2
- # elif not loadrel and not loadfk:
- # # problematically - we get None since
- # # committed state
- # # is empty when c1.parent_id was mutated,
- # # since we want
- # # to save on selects. this is
- # # why the patch goes in in 0.6 - this is
- # # mostly a bug.
- # assert c1.parent is None
- # else:
- # # if things were loaded, autoflush doesn't
- # # even happen.
- # assert c1.parent is p1
-
- # new behavior
- if loadrel and not fake_autoexpire:
- assert c1.parent is p1
- else:
- assert c1.parent is p2
-
- sess.rollback()
-
- def test_m2o_lazy_loader_on_pending(self):
- for loadonpending in (False, True):
- for autoflush in (False, True):
- for manualflush in (False, True):
- Child.parent.property.load_on_pending = loadonpending
- sess.autoflush = autoflush
-
- # ensure p2.id not expired
- p2.id
-
- c2 = Child()
- sess.add(c2)
- c2.parent_id = p2.id
-
- if manualflush:
- sess.flush()
-
- if loadonpending or manualflush:
- assert c2.parent is p2
- else:
- assert c2.parent is None
-
- sess.rollback()
-
- def test_m2o_lazy_loader_on_transient(self):
- for loadonpending in (False, True):
- for attach in (False, True):
- for autoflush in (False, True):
- for manualflush in (False, True):
- for enable_relationship_rel in (False, True):
- Child.parent.property.load_on_pending = (
- loadonpending
- )
- sess.autoflush = autoflush
- c2 = Child()
-
- if attach:
- state = instance_state(c2)
- state.session_id = sess.hash_key
-
- if enable_relationship_rel:
- sess.enable_relationship_loading(c2)
-
- c2.parent_id = p2.id
-
- if manualflush:
- sess.flush()
-
- if (
- loadonpending and attach
- ) or enable_relationship_rel:
- assert c2.parent is p2
- else:
- assert c2.parent is None
-
- sess.rollback()
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
+ sess.autoflush = autoflush
+
+ if loadfk:
+ c1.parent_id
+ if loadrel:
+ c1.parent
+
+ c1.parent_id = p2.id
+
+ if manualflush:
+ sess.flush()
+
+ # fake_autoexpire refers to the eventual
+ # auto-expire of 'parent' when c1.parent_id
+ # is altered.
+ if fake_autoexpire:
+ sess.expire(c1, ["parent"])
+
+ # old 0.6 behavior
+ # if manualflush and (not loadrel or
+ # fake_autoexpire):
+ # # a flush occurs, we get p2
+ # assert c1.parent is p2
+ # elif not loadrel and not loadfk:
+ # # problematically - we get None since
+ # # committed state
+ # # is empty when c1.parent_id was mutated,
+ # # since we want
+ # # to save on selects. this is
+ # # why the patch goes in in 0.6 - this is
+ # # mostly a bug.
+ # assert c1.parent is None
+ # else:
+ # # if things were loaded, autoflush doesn't
+ # # even happen.
+ # assert c1.parent is p1
+
+ # new behavior
+ if loadrel and not fake_autoexpire:
+ assert c1.parent is p1
+ else:
+ assert c1.parent is p2
+
+ @testing.combinations(True, False, argnames="loadonpending")
+ @testing.combinations(True, False, argnames="autoflush")
+ @testing.combinations(True, False, argnames="manualflush")
+ def test_m2o_lazy_loader_on_pending(
+ self, parent_fixture, loadonpending, autoflush, manualflush
+ ):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
+ Child.parent.property.load_on_pending = loadonpending
+ sess.autoflush = autoflush
+
+ # ensure p2.id not expired
+ p2.id
+
+ c2 = Child()
+ sess.add(c2)
+ c2.parent_id = p2.id
+
+ if manualflush:
+ sess.flush()
+
+ if loadonpending or manualflush:
+ assert c2.parent is p2
+ else:
+ assert c2.parent is None
+
+ @testing.combinations(True, False, argnames="loadonpending")
+ @testing.combinations(True, False, argnames="attach")
+ @testing.combinations(True, False, argnames="autoflush")
+ @testing.combinations(True, False, argnames="manualflush")
+ @testing.combinations(True, False, argnames="enable_relationship_rel")
+ def test_m2o_lazy_loader_on_transient(
+ self,
+ parent_fixture,
+ loadonpending,
+ attach,
+ autoflush,
+ manualflush,
+ enable_relationship_rel,
+ ):
+ sess, p1, p2, c1, c2 = parent_fixture
+ Parent, Child = self.classes("Parent", "Child")
+
+ Child.parent.property.load_on_pending = loadonpending
+ sess.autoflush = autoflush
+ c2 = Child()
+
+ if attach:
+ state = instance_state(c2)
+ state.session_id = sess.hash_key
+
+ if enable_relationship_rel:
+ sess.enable_relationship_loading(c2)
+
+ c2.parent_id = p2.id
+
+ if manualflush:
+ sess.flush()
+
+ if (loadonpending and attach) or enable_relationship_rel:
+ assert c2.parent is p2
+ else:
+ assert c2.parent is None
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index af90da1dd..0f84923ac 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -2349,7 +2349,7 @@ class RequirementsTest(fixtures.MappedTest):
self.mapper(H3, ht3)
self.mapper(H6, ht6)
- s = fixture_session()
+ s = fixture_session(future=True)
s.add_all([H1("abc"), H1("def")])
h1 = H1("ghi")
s.add(h1)
@@ -2367,7 +2367,7 @@ class RequirementsTest(fixtures.MappedTest):
h6 = H6()
h6.h1a = h1
h6.h1b = x = H1()
- assert x in s
+ s.add(x)
h6.h1b.h2s.append(H2("def"))
diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py
index b07c26cb7..d2eade0ea 100644
--- a/test/orm/test_merge.py
+++ b/test/orm/test_merge.py
@@ -1430,7 +1430,7 @@ class MergeTest(_fixtures.FixtureTest):
self.tables.users,
)
- s = fixture_session(autoflush=True, autocommit=False)
+ s = fixture_session(autoflush=True, autocommit=False, future=True)
self.mapper_registry.map_imperatively(
User,
users,
@@ -1443,8 +1443,10 @@ class MergeTest(_fixtures.FixtureTest):
)
a1 = Address(user=s.merge(User(id=1, name="ed")), email_address="x")
+ s.add(a1)
before_id = id(a1.user)
a2 = Address(user=s.merge(User(id=1, name="jack")), email_address="x")
+ s.add(a2)
after_id = id(a1.user)
other_id = id(a2.user)
eq_(before_id, other_id)
diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py
index c80756401..05df71c6a 100644
--- a/test/orm/test_naturalpks.py
+++ b/test/orm/test_naturalpks.py
@@ -926,7 +926,7 @@ class SelfReferentialTest(fixtures.MappedTest):
},
)
- sess = fixture_session()
+ sess = fixture_session(future=True)
n1 = Node(name="n1")
sess.add(n1)
n2 = Node(name="n11", parentnode=n1)
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index f66f22e33..d6e9df6cd 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -922,7 +922,7 @@ class SessionStateTest(_fixtures.FixtureTest):
)
self.mapper_registry.map_imperatively(Address, addresses)
- session = fixture_session()
+ session = fixture_session(future=True)
@event.listens_for(session, "after_flush")
def load_collections(session, flush_context):
@@ -943,6 +943,9 @@ class SessionStateTest(_fixtures.FixtureTest):
assert "addresses" not in inspect(u1).dict
assert a2 in inspect(u1)._pending_mutations["addresses"].added_items
+ # this is needed now that cascade_backrefs is turned off
+ session.add(a2)
+
with assertions.expect_warnings(
r"Identity map already had an identity "
r"for \(.*Address.*\), replacing"
diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py
index 8990abe14..45fad9ab7 100644
--- a/test/orm/test_versioning.py
+++ b/test/orm/test_versioning.py
@@ -835,7 +835,7 @@ class NoBumpOnRelationshipTest(fixtures.MappedTest):
def _run_test(self, auto_version_counter=True):
A, B = self.classes("A", "B")
- s = fixture_session()
+ s = fixture_session(future=True)
if auto_version_counter:
a1 = A()
else: