diff options
| author | Mike Bayer <mike_mp@zzzcomputing.com> | 2022-09-26 14:38:44 -0400 |
|---|---|---|
| committer | mike bayer <mike_mp@zzzcomputing.com> | 2022-10-06 00:36:25 +0000 |
| commit | 276349200c486eee108471b888acfc47ea19201b (patch) | |
| tree | 7441fa3219f21b18c6e532bd85b25c2bbdae86f8 /lib | |
| parent | 566cccc8645be99a23811c39d43481d7248628b0 (diff) | |
| download | sqlalchemy-276349200c486eee108471b888acfc47ea19201b.tar.gz | |
implement write-only colletions, typing for dynamic
For 2.0, we provide a truly "larger than memory collection"
implementation, a write-only collection that will never
under any circumstances implicitly load the entire
collection, even during flush.
This is essentially a much more "strict" version
of the "dynamic" loader, which in fact has a lot of
scenarios that it loads the full backing collection
into memory, mostly defeating its purpose.
Typing constructs are added that support
both the new feature WriteOnlyMapping as well as the
legacy feature DynamicMapping. These have been
integrated with "annotion based mapping" so that
relationship() uses these annotations to configure
the loader strategy as well.
additional changes:
* the docs triggered a conflict in hybrid's
"transformers" section, this section is hard-coded
to Query using a pattern that doesnt seem to have
any use and isn't part of the current select()
interface, so just removed this section
* As the docs for WriteOnlyMapping are very long,
collections.rst is broken up into two pages now.
Fixes: #6229
Fixes: #7123
Change-Id: I6929f3da6e441cad92285e7309030a9bac4e429d
Diffstat (limited to 'lib')
| -rw-r--r-- | lib/sqlalchemy/ext/hybrid.py | 149 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/__init__.py | 3 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/_orm_constructors.py | 62 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/attributes.py | 12 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/base.py | 108 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/bulk_persistence.py | 7 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/decl_base.py | 19 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/descriptor_props.py | 1 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/dynamic.py | 487 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/evaluator.py | 10 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/interfaces.py | 2 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/properties.py | 3 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/query.py | 2 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/relationships.py | 37 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/util.py | 19 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/writeonly.py | 619 | ||||
| -rw-r--r-- | lib/sqlalchemy/testing/assertsql.py | 4 | ||||
| -rw-r--r-- | lib/sqlalchemy/testing/entities.py | 5 | ||||
| -rw-r--r-- | lib/sqlalchemy/util/typing.py | 13 |
19 files changed, 994 insertions, 568 deletions
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index cfc6bd73b..de9ab52be 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -657,155 +657,6 @@ measurement, currencies and encrypted passwords. <https://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ - on the techspot.zzzeek.org blog -.. _hybrid_transformers: - -Building Transformers ----------------------- - -A *transformer* is an object which can receive a :class:`_query.Query` -object and -return a new one. The :class:`_query.Query` object includes a method -:meth:`.with_transformation` that returns a new :class:`_query.Query` -transformed by -the given function. - -We can combine this with the :class:`.Comparator` class to produce one type -of recipe which can both set up the FROM clause of a query as well as assign -filtering criterion. - -Consider a mapped class ``Node``, which assembles using adjacency list into a -hierarchical tree pattern:: - - from sqlalchemy import Column, Integer, ForeignKey - from sqlalchemy.orm import relationship - from sqlalchemy.ext.declarative import declarative_base - Base = declarative_base() - - class Node(Base): - __tablename__ = 'node' - id = Column(Integer, primary_key=True) - parent_id = Column(Integer, ForeignKey('node.id')) - parent = relationship("Node", remote_side=id) - -Suppose we wanted to add an accessor ``grandparent``. This would return the -``parent`` of ``Node.parent``. When we have an instance of ``Node``, this is -simple:: - - from sqlalchemy.ext.hybrid import hybrid_property - - class Node(Base): - # ... - - @hybrid_property - def grandparent(self): - return self.parent.parent - -For the expression, things are not so clear. We'd need to construct a -:class:`_query.Query` where we :meth:`_query.Query.join` twice along -``Node.parent`` to get to the ``grandparent``. We can instead return a -transforming callable that we'll combine with the :class:`.Comparator` class to -receive any :class:`_query.Query` object, and return a new one that's joined to -the ``Node.parent`` attribute and filtered based on the given criterion:: - - from sqlalchemy.ext.hybrid import Comparator - - class GrandparentTransformer(Comparator): - def operate(self, op, other, **kwargs): - def transform(q): - cls = self.__clause_element__() - parent_alias = aliased(cls) - return q.join(parent_alias, cls.parent).filter( - op(parent_alias.parent, other, **kwargs) - ) - - return transform - - Base = declarative_base() - - class Node(Base): - __tablename__ = 'node' - id = Column(Integer, primary_key=True) - parent_id = Column(Integer, ForeignKey('node.id')) - parent = relationship("Node", remote_side=id) - - @hybrid_property - def grandparent(self): - return self.parent.parent - - @grandparent.comparator - def grandparent(cls): - return GrandparentTransformer(cls) - -The ``GrandparentTransformer`` overrides the core :meth:`.Operators.operate` -method at the base of the :class:`.Comparator` hierarchy to return a query- -transforming callable, which then runs the given comparison operation in a -particular context. Such as, in the example above, the ``operate`` method is -called, given the :attr:`.Operators.eq` callable as well as the right side of -the comparison ``Node(id=5)``. A function ``transform`` is then returned which -will transform a :class:`_query.Query` first to join to ``Node.parent``, -then to -compare ``parent_alias`` using :attr:`.Operators.eq` against the left and right -sides, passing into :meth:`_query.Query.filter`: - -.. sourcecode:: pycon+sql - - >>> from sqlalchemy.orm import Session - >>> session = Session() - {sql}>>> session.query(Node).\ - ... with_transformation(Node.grandparent==Node(id=5)).\ - ... all() - SELECT node.id AS node_id, node.parent_id AS node_parent_id - FROM node JOIN node AS node_1 ON node_1.id = node.parent_id - WHERE :param_1 = node_1.parent_id - {stop} - -We can modify the pattern to be more verbose but flexible by separating the -"join" step from the "filter" step. The tricky part here is ensuring that -successive instances of ``GrandparentTransformer`` use the same -:class:`.AliasedClass` object against ``Node``. Below we use a simple -memoizing approach that associates a ``GrandparentTransformer`` with each -class:: - - class Node(Base): - - # ... - - @grandparent.comparator - def grandparent(cls): - # memoize a GrandparentTransformer - # per class - if '_gp' not in cls.__dict__: - cls._gp = GrandparentTransformer(cls) - return cls._gp - - class GrandparentTransformer(Comparator): - - def __init__(self, cls): - self.parent_alias = aliased(cls) - - @property - def join(self): - def go(q): - return q.join(self.parent_alias, Node.parent) - return go - - def operate(self, op, other, **kwargs): - return op(self.parent_alias.parent, other, **kwargs) - -.. sourcecode:: pycon+sql - - {sql}>>> session.query(Node).\ - ... with_transformation(Node.grandparent.join).\ - ... filter(Node.grandparent==Node(id=5)) - SELECT node.id AS node_id, node.parent_id AS node_parent_id - FROM node JOIN node AS node_1 ON node_1.id = node.parent_id - WHERE :param_1 = node_1.parent_id - {stop} - -The "transformer" pattern is an experimental pattern that starts to make usage -of some functional programming paradigms. While it's only recommended for -advanced and/or patient developers, there's probably a whole lot of amazing -things it can be used for. """ # noqa diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 6bfda6e2e..8523e520b 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -42,12 +42,14 @@ from .attributes import AttributeEventToken as AttributeEventToken from .attributes import InstrumentedAttribute as InstrumentedAttribute from .attributes import QueryableAttribute as QueryableAttribute from .base import class_mapper as class_mapper +from .base import DynamicMapped as DynamicMapped from .base import InspectionAttrExtensionType as InspectionAttrExtensionType from .base import LoaderCallableStatus as LoaderCallableStatus from .base import Mapped as Mapped from .base import NotExtension as NotExtension from .base import ORMDescriptor as ORMDescriptor from .base import PassiveFlag as PassiveFlag +from .base import WriteOnlyMapped as WriteOnlyMapped from .context import FromStatement as FromStatement from .context import QueryContext as QueryContext from .decl_api import add_mapped_attribute as add_mapped_attribute @@ -147,6 +149,7 @@ from .util import object_mapper as object_mapper from .util import polymorphic_union as polymorphic_union from .util import was_deleted as was_deleted from .util import with_parent as with_parent +from .writeonly import WriteOnlyCollection as WriteOnlyCollection from .. import util as _sa_util diff --git a/lib/sqlalchemy/orm/_orm_constructors.py b/lib/sqlalchemy/orm/_orm_constructors.py index 0b4861af3..38d984438 100644 --- a/lib/sqlalchemy/orm/_orm_constructors.py +++ b/lib/sqlalchemy/orm/_orm_constructors.py @@ -1172,11 +1172,10 @@ def relationship( issues a JOIN to the immediate parent object, specifying primary key identifiers using an IN clause. - .. versionadded:: 1.2 - - * ``noload`` - no loading should occur at any time. This is to - support "write-only" attributes, or attributes which are - populated in some manner specific to the application. + * ``noload`` - no loading should occur at any time. The related + collection will remain empty. The ``noload`` strategy is not + recommended for general use. For a general use "never load" + approach, see :ref:`write_only_relationship` * ``raise`` - lazy loading is disallowed; accessing the attribute, if its value were not already loaded via eager @@ -1184,8 +1183,6 @@ def relationship( This strategy can be used when objects are to be detached from their attached :class:`.Session` after they are loaded. - .. versionadded:: 1.1 - * ``raise_on_sql`` - lazy loading that emits SQL is disallowed; accessing the attribute, if its value were not already loaded via eager loading, will raise an @@ -1198,11 +1195,51 @@ def relationship( .. versionadded:: 1.1 + * ``write_only`` - the attribute will be configured with a special + "virtual collection" that may receive + :meth:`_orm.WriteOnlyCollection.add` and + :meth:`_orm.WriteOnlyCollection.remove` commands to add or remove + individual objects, but will not under any circumstances load or + iterate the full set of objects from the database directly. Instead, + methods such as :meth:`_orm.WriteOnlyCollection.select`, + :meth:`_orm.WriteOnlyCollection.insert`, + :meth:`_orm.WriteOnlyCollection.update` and + :meth:`_orm.WriteOnlyCollection.delete` are provided which generate SQL + constructs that may be used to load and modify rows in bulk. Used for + large collections that are never appropriate to load at once into + memory. + + The ``write_only`` loader style is configured automatically when + the :class:`_orm.WriteOnlyMapped` annotation is provided on the + left hand side within a Declarative mapping. See the section + :ref:`write_only_relationship` for examples. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`write_only_relationship` - in the :ref:`queryguide_toplevel` + * ``dynamic`` - the attribute will return a pre-configured :class:`_query.Query` object for all read operations, onto which further filtering operations can be - applied before iterating the results. See - the section :ref:`dynamic_relationship` for more details. + applied before iterating the results. + + The ``dynamic`` loader style is configured automatically when + the :class:`_orm.DynamicMapped` annotation is provided on the + left hand side within a Declarative mapping. See the section + :ref:`dynamic_relationship` for examples. + + .. legacy:: The "dynamic" lazy loader strategy is the legacy form of + what is now the "write_only" strategy described in the section + :ref:`write_only_relationship`. + + .. seealso:: + + :ref:`dynamic_relationship` - in the :ref:`queryguide_toplevel` + + :ref:`write_only_relationship` - more generally useful approach + for large collections that should not fully load into memory * True - a synonym for 'select' @@ -1212,12 +1249,9 @@ def relationship( .. seealso:: - :doc:`/orm/loading_relationships` - Full documentation on - relationship loader configuration. - - :ref:`dynamic_relationship` - detail on the ``dynamic`` option. + :ref:`orm_queryguide_relationship_loaders` - Full documentation on + relationship loader configuration in the :ref:`queryguide_toplevel`. - :ref:`collections_noload_raiseload` - notes on "noload" and "raise" :param load_on_pending=False: Indicates loading behavior for transient or pending parent objects. diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index fcc016f54..854bad986 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -94,11 +94,11 @@ if TYPE_CHECKING: from ._typing import _O from .collections import _AdaptedCollectionProtocol from .collections import CollectionAdapter - from .dynamic import DynamicAttributeImpl from .interfaces import MapperProperty from .relationships import RelationshipProperty from .state import InstanceState from .util import AliasedInsp + from .writeonly import WriteOnlyAttributeImpl from ..event.base import _Dispatch from ..sql._typing import _ColumnExpressionArgument from ..sql._typing import _DMLColumnArgument @@ -2581,9 +2581,9 @@ def register_attribute_impl( impl: AttributeImpl if impl_class: - # TODO: this appears to be the DynamicAttributeImpl constructor - # which is hardcoded - impl = cast("Type[DynamicAttributeImpl]", impl_class)( + # TODO: this appears to be the WriteOnlyAttributeImpl / + # DynamicAttributeImpl constructor which is hardcoded + impl = cast("Type[WriteOnlyAttributeImpl]", impl_class)( class_, key, typecallable, dispatch, **kw ) elif uselist: @@ -2672,7 +2672,9 @@ def init_state_collection( attr._dispose_previous_collection(state, old, old_collection, False) user_data = attr._default_value(state, dict_) - adapter: CollectionAdapter = attr.get_collection(state, dict_, user_data) + adapter: CollectionAdapter = attr.get_collection( + state, dict_, user_data, passive=PassiveFlag.PASSIVE_NO_FETCH + ) adapter._reset_empty() return adapter diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index d3814abd5..20a683d8c 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -42,11 +42,13 @@ if typing.TYPE_CHECKING: from ._typing import _ExternalEntityType from ._typing import _InternalEntityType from .attributes import InstrumentedAttribute + from .dynamic import AppenderQuery from .instrumentation import ClassManager from .interfaces import PropComparator from .mapper import Mapper from .state import InstanceState from .util import AliasedClass + from .writeonly import WriteOnlyCollection from ..sql._typing import _ColumnExpressionArgument from ..sql._typing import _InfoType from ..sql.elements import ColumnElement @@ -726,7 +728,23 @@ class ORMDescriptor(Generic[_T], TypingOnly): ... -class Mapped(ORMDescriptor[_T], roles.TypedColumnsClauseRole[_T], TypingOnly): +class _MappedAnnotationBase(Generic[_T], TypingOnly): + """common class for Mapped and similar ORM container classes. + + these are classes that can appear on the left side of an ORM declarative + mapping, containing a mapped class or in some cases a collection + surrounding a mapped class. + + """ + + __slots__ = () + + +class Mapped( + ORMDescriptor[_T], + roles.TypedColumnsClauseRole[_T], + _MappedAnnotationBase[_T], +): """Represent an ORM mapped attribute on a mapped class. This class represents the complete descriptor interface for any class @@ -811,3 +829,91 @@ class _DeclarativeMapped(Mapped[_T], _MappedAttribute[_T]): """ __slots__ = () + + +class DynamicMapped(_MappedAnnotationBase[_T]): + """Represent the ORM mapped attribute type for a "dynamic" relationship. + + The :class:`_orm.DynamicMapped` type annotation may be used in an + :ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping + to indicate that the ``lazy="dynamic"`` loader strategy should be used + for a particular :func:`_orm.relationship`. + + .. legacy:: The "dynamic" lazy loader strategy is the legacy form of what + is now the "write_only" strategy described in the section + :ref:`write_only_relationship`. + + E.g.:: + + class User(Base): + __tablename__ = "user" + id: Mapped[int] = mapped_column(primary_key=True) + addresses: DynamicMapped[Address] = relationship( + cascade="all,delete-orphan" + ) + + See the section :ref:`dynamic_relationship` for background. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`dynamic_relationship` - complete background + + :class:`.WriteOnlyMapped` - fully 2.0 style version + + """ + + __slots__ = () + + if TYPE_CHECKING: + + def __get__( + self, instance: Optional[object], owner: Any + ) -> AppenderQuery[_T]: + ... + + def __set__(self, instance: Any, value: typing.Collection[_T]) -> None: + ... + + +class WriteOnlyMapped(_MappedAnnotationBase[_T]): + """Represent the ORM mapped attribute type for a "write only" relationship. + + The :class:`_orm.WriteOnlyMapped` type annotation may be used in an + :ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping + to indicate that the ``lazy="write_only"`` loader strategy should be used + for a particular :func:`_orm.relationship`. + + E.g.:: + + class User(Base): + __tablename__ = "user" + id: Mapped[int] = mapped_column(primary_key=True) + addresses: WriteOnlyMapped[Address] = relationship( + cascade="all,delete-orphan" + ) + + See the section :ref:`write_only_relationship` for background. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`write_only_relationship` - complete background + + :class:`.DynamicMapped` - includes legacy :class:`_orm.Query` support + + """ + + __slots__ = () + + if TYPE_CHECKING: + + def __get__( + self, instance: Optional[object], owner: Any + ) -> WriteOnlyCollection[_T]: + ... + + def __set__(self, instance: Any, value: typing.Collection[_T]) -> None: + ... diff --git a/lib/sqlalchemy/orm/bulk_persistence.py b/lib/sqlalchemy/orm/bulk_persistence.py index 3ed34a57a..b407fcdca 100644 --- a/lib/sqlalchemy/orm/bulk_persistence.py +++ b/lib/sqlalchemy/orm/bulk_persistence.py @@ -591,6 +591,7 @@ class BulkUDCompileState(ORMDMLState): "_sa_orm_update_options", { "synchronize_session", + "autoflush", "is_delete_using", "is_update_from", "dml_strategy", @@ -1079,6 +1080,7 @@ class BulkORMInsert(ORMDMLState, InsertDMLState): _render_nulls: bool = False _return_defaults: bool = False _subject_mapper: Optional[Mapper[Any]] = None + _autoflush: bool = True select_statement: Optional[FromStatement] = None @@ -1098,7 +1100,7 @@ class BulkORMInsert(ORMDMLState, InsertDMLState): execution_options, ) = BulkORMInsert.default_insert_options.from_execution_options( "_sa_orm_insert_options", - {"dml_strategy"}, + {"dml_strategy", "autoflush"}, execution_options, statement._execution_options, ) @@ -1142,6 +1144,9 @@ class BulkORMInsert(ORMDMLState, InsertDMLState): context._orm_load_exec_options ) + if insert_options._autoflush: + session._autoflush() + statement = statement._annotate( {"dml_strategy": insert_options._dml_strategy} ) diff --git a/lib/sqlalchemy/orm/decl_base.py b/lib/sqlalchemy/orm/decl_base.py index a383e92ca..eed04025d 100644 --- a/lib/sqlalchemy/orm/decl_base.py +++ b/lib/sqlalchemy/orm/decl_base.py @@ -420,7 +420,7 @@ class _ClassScanMapperConfig(_MapperConfig): registry: _RegistryType clsdict_view: _ClassDict - collected_annotations: Dict[str, Tuple[Any, Any, bool]] + collected_annotations: Dict[str, Tuple[Any, Any, Any, bool]] collected_attributes: Dict[str, Any] local_table: Optional[FromClause] persist_selectable: Optional[FromClause] @@ -997,6 +997,7 @@ class _ClassScanMapperConfig(_MapperConfig): (key, mapped_anno if mapped_anno else raw_anno) for key, ( raw_anno, + mapped_container, mapped_anno, is_dc, ) in self.collected_annotations.items() @@ -1075,7 +1076,7 @@ class _ClassScanMapperConfig(_MapperConfig): is_dataclass_field = False is_dataclass_field = False - extracted_mapped_annotation = _extract_mapped_subtype( + extracted = _extract_mapped_subtype( raw_annotation, self.cls, name, @@ -1086,10 +1087,13 @@ class _ClassScanMapperConfig(_MapperConfig): and not is_dataclass, # self.allow_dataclass_fields, ) - if extracted_mapped_annotation is None: + if extracted is None: # ClassVar can come out here return attr_value - elif attr_value is None: + + extracted_mapped_annotation, mapped_container = extracted + + if attr_value is None: for elem in typing_get_args(extracted_mapped_annotation): # look in Annotated[...] for an ORM construct, # such as Annotated[int, mapped_column(primary_key=True)] @@ -1098,6 +1102,7 @@ class _ClassScanMapperConfig(_MapperConfig): self.collected_annotations[name] = ( raw_annotation, + mapped_container, extracted_mapped_annotation, is_dataclass, ) @@ -1252,13 +1257,17 @@ class _ClassScanMapperConfig(_MapperConfig): if isinstance(value, _IntrospectsAnnotations): ( annotation, + mapped_container, extracted_mapped_annotation, is_dataclass, - ) = self.collected_annotations.get(k, (None, None, False)) + ) = self.collected_annotations.get( + k, (None, None, None, False) + ) value.declarative_scan( self.registry, cls, k, + mapped_container, annotation, extracted_mapped_annotation, is_dataclass, diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index 35b12b2ed..a15cd86f4 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -333,6 +333,7 @@ class CompositeProperty( registry: _RegistryType, cls: Type[Any], key: str, + mapped_container: Optional[Type[Mapped[Any]]], annotation: Optional[_AnnotationScanType], extracted_mapped_annotation: Optional[_AnnotationScanType], is_dataclass_field: bool, diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index 8cc4c6c04..be31af1e9 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -12,83 +12,58 @@ Dynamic collections act like Query() objects for read operations and support basic add/delete mutation. +.. legacy:: the "dynamic" loader is a legacy feature, superseded by the + "write_only" loader. + + """ from __future__ import annotations from typing import Any -from typing import Optional -from typing import overload +from typing import Iterable +from typing import Iterator from typing import TYPE_CHECKING -from typing import Union +from typing import TypeVar from . import attributes from . import exc as orm_exc -from . import interfaces from . import relationships -from . import strategies from . import util as orm_util -from .base import object_mapper -from .base import PassiveFlag from .query import Query from .session import object_session -from .. import exc -from .. import log +from .writeonly import AbstractCollectionWriter +from .writeonly import WriteOnlyAttributeImpl +from .writeonly import WriteOnlyHistory +from .writeonly import WriteOnlyLoader from .. import util from ..engine import result -from ..util.typing import Literal if TYPE_CHECKING: - from ._typing import _InstanceDict - from .attributes import _AdaptedCollectionProtocol - from .attributes import AttributeEventToken - from .attributes import CollectionAdapter - from .base import LoaderCallableStatus - from .state import InstanceState + from .session import Session -@log.class_logger -@relationships.RelationshipProperty.strategy_for(lazy="dynamic") -class DynaLoader(strategies.AbstractRelationshipLoader, log.Identified): - def init_class_attribute(self, mapper): - self.is_class_level = True - if not self.uselist: - raise exc.InvalidRequestError( - "On relationship %s, 'dynamic' loaders cannot be used with " - "many-to-one/one-to-one relationships and/or " - "uselist=False." % self.parent_property - ) - elif self.parent_property.direction not in ( - interfaces.ONETOMANY, - interfaces.MANYTOMANY, - ): - util.warn( - "On relationship %s, 'dynamic' loaders cannot be used with " - "many-to-one/one-to-one relationships and/or " - "uselist=False. This warning will be an exception in a " - "future release." % self.parent_property - ) +_T = TypeVar("_T", bound=Any) - strategies._register_attribute( - self.parent_property, - mapper, - useobject=True, - impl_class=DynamicAttributeImpl, - target_mapper=self.parent_property.mapper, - order_by=self.parent_property.order_by, - query_class=self.parent_property.query_class, - ) + +class DynamicCollectionHistory(WriteOnlyHistory): + def __init__(self, attr, state, passive, apply_to=None): + if apply_to: + coll = AppenderQuery(attr, state).autoflush(False) + self.unchanged_items = util.OrderedIdentitySet(coll) + self.added_items = apply_to.added_items + self.deleted_items = apply_to.deleted_items + self._reconcile_collection = True + else: + self.deleted_items = util.OrderedIdentitySet() + self.added_items = util.OrderedIdentitySet() + self.unchanged_items = util.OrderedIdentitySet() + self._reconcile_collection = False -class DynamicAttributeImpl( - attributes.HasCollectionAdapter, attributes.AttributeImpl -): - uses_objects = True - default_accepts_scalar_loader = False - supports_population = False - collection = False - dynamic = True - order_by = () +class DynamicAttributeImpl(WriteOnlyAttributeImpl): + _supports_dynamic_iteration = True + collection_history_cls = DynamicCollectionHistory def __init__( self, @@ -101,8 +76,8 @@ class DynamicAttributeImpl( query_class=None, **kw, ): - super(DynamicAttributeImpl, self).__init__( - class_, key, typecallable, dispatch, **kw + attributes.AttributeImpl.__init__( + self, class_, key, typecallable, dispatch, **kw ) self.target_mapper = target_mapper if order_by: @@ -114,261 +89,27 @@ class DynamicAttributeImpl( else: self.query_class = mixin_user_query(query_class) - def get(self, state, dict_, passive=attributes.PASSIVE_OFF): - if not passive & attributes.SQL_OK: - return self._get_collection_history( - state, attributes.PASSIVE_NO_INITIALIZE - ).added_items - else: - return self.query_class(self, state) - - @overload - def get_collection( - self, - state: InstanceState[Any], - dict_: _InstanceDict, - user_data: Literal[None] = ..., - passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., - ) -> CollectionAdapter: - ... - - @overload - def get_collection( - self, - state: InstanceState[Any], - dict_: _InstanceDict, - user_data: _AdaptedCollectionProtocol = ..., - passive: PassiveFlag = ..., - ) -> CollectionAdapter: - ... - - @overload - def get_collection( - self, - state: InstanceState[Any], - dict_: _InstanceDict, - user_data: Optional[_AdaptedCollectionProtocol] = ..., - passive: PassiveFlag = ..., - ) -> Union[ - Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter - ]: - ... - - def get_collection( - self, - state: InstanceState[Any], - dict_: _InstanceDict, - user_data: Optional[_AdaptedCollectionProtocol] = None, - passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, - ) -> Union[ - Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter - ]: - if not passive & attributes.SQL_OK: - data = self._get_collection_history(state, passive).added_items - else: - history = self._get_collection_history(state, passive) - data = history.added_plus_unchanged - return DynamicCollectionAdapter(data) - - @util.memoized_property - def _append_token(self): - return attributes.AttributeEventToken(self, attributes.OP_APPEND) - - @util.memoized_property - def _remove_token(self): - return attributes.AttributeEventToken(self, attributes.OP_REMOVE) - - def fire_append_event( - self, state, dict_, value, initiator, collection_history=None - ): - if collection_history is None: - collection_history = self._modified_event(state, dict_) - - collection_history.add_added(value) - - for fn in self.dispatch.append: - value = fn(state, value, initiator or self._append_token) - - if self.trackparent and value is not None: - self.sethasparent(attributes.instance_state(value), state, True) - - def fire_remove_event( - self, state, dict_, value, initiator, collection_history=None - ): - if collection_history is None: - collection_history = self._modified_event(state, dict_) - - collection_history.add_removed(value) - - if self.trackparent and value is not None: - self.sethasparent(attributes.instance_state(value), state, False) - - for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) - - def _modified_event(self, state, dict_): - - if self.key not in state.committed_state: - state.committed_state[self.key] = CollectionHistory(self, state) - - state._modified_event(dict_, self, attributes.NEVER_SET) - - # this is a hack to allow the fixtures.ComparableEntity fixture - # to work - dict_[self.key] = True - return state.committed_state[self.key] - - def set( - self, - state: InstanceState[Any], - dict_: _InstanceDict, - value: Any, - initiator: Optional[AttributeEventToken] = None, - passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, - check_old: Any = None, - pop: bool = False, - _adapt: bool = True, - ) -> None: - if initiator and initiator.parent_token is self.parent_token: - return - - if pop and value is None: - return - - iterable = value - new_values = list(iterable) - if state.has_identity: - old_collection = util.IdentitySet(self.get(state, dict_)) - - collection_history = self._modified_event(state, dict_) - if not state.has_identity: - old_collection = collection_history.added_items - else: - old_collection = old_collection.union( - collection_history.added_items - ) - - idset = util.IdentitySet - constants = old_collection.intersection(new_values) - additions = idset(new_values).difference(constants) - removals = old_collection.difference(constants) - - for member in new_values: - if member in additions: - self.fire_append_event( - state, - dict_, - member, - None, - collection_history=collection_history, - ) - - for member in removals: - self.fire_remove_event( - state, - dict_, - member, - None, - collection_history=collection_history, - ) - - def delete(self, *args, **kwargs): - raise NotImplementedError() - - def set_committed_value(self, state, dict_, value): - raise NotImplementedError( - "Dynamic attributes don't support " "collection population." - ) - - def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF): - c = self._get_collection_history(state, passive) - return c.as_history() - - def get_all_pending( - self, state, dict_, passive=attributes.PASSIVE_NO_INITIALIZE - ): - c = self._get_collection_history(state, passive) - return [(attributes.instance_state(x), x) for x in c.all_items] - - def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF): - if self.key in state.committed_state: - c = state.committed_state[self.key] - else: - c = CollectionHistory(self, state) - if state.has_identity and (passive & attributes.INIT_OK): - return CollectionHistory(self, state, apply_to=c) - else: - return c - - def append( - self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF - ): - if initiator is not self: - self.fire_append_event(state, dict_, value, initiator) - - def remove( - self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF - ): - if initiator is not self: - self.fire_remove_event(state, dict_, value, initiator) - - def pop( - self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF - ): - self.remove(state, dict_, value, initiator, passive=passive) - - -class DynamicCollectionAdapter: - """simplified CollectionAdapter for internal API consistency""" - - def __init__(self, data): - self.data = data - - def __iter__(self): - return iter(self.data) +@relationships.RelationshipProperty.strategy_for(lazy="dynamic") +class DynaLoader(WriteOnlyLoader): + impl_class = DynamicAttributeImpl - def _reset_empty(self): - pass - def __len__(self): - return len(self.data) +class AppenderMixin(AbstractCollectionWriter[_T]): + """A mixin that expects to be mixing in a Query class with + AbstractAppender. - def __bool__(self): - return True + """ -class AppenderMixin: query_class = None def __init__(self, attr, state): - super(AppenderMixin, self).__init__(attr.target_mapper, None) - self.instance = instance = state.obj() - self.attr = attr - - mapper = object_mapper(instance) - prop = mapper._props[self.attr.key] - - if prop.secondary is not None: - # this is a hack right now. The Query only knows how to - # make subsequent joins() without a given left-hand side - # from self._from_obj[0]. We need to ensure prop.secondary - # is in the FROM. So we purposely put the mapper selectable - # in _from_obj[0] to ensure a user-defined join() later on - # doesn't fail, and secondary is then in _from_obj[1]. - - # note also, we are using the official ORM-annotated selectable - # from __clause_element__(), see #7868 - self._from_obj = (prop.mapper.__clause_element__(), prop.secondary) - - self._where_criteria = ( - prop._with_parent(instance, alias_secondary=False), - ) - - if self.attr.order_by: - self._order_by_clauses = self.attr.order_by + Query.__init__(self, attr.target_mapper, None) + super().__init__(attr, state) - def session(self): + @property + def session(self) -> Session: sess = object_session(self.instance) if ( sess is not None @@ -382,7 +123,9 @@ class AppenderMixin: else: return sess - session = property(session, lambda s, x: None) + @session.setter + def session(self, session: Session) -> None: + self.sess = session def _iter(self): sess = self.session @@ -407,7 +150,12 @@ class AppenderMixin: else: return self._generate(sess)._iter() - def __getitem__(self, index): + if TYPE_CHECKING: + + def __iter__(self) -> Iterator[_T]: + ... + + def __getitem__(self, index: Any) -> _T: sess = self.session if sess is None: return self.attr._get_collection_history( @@ -417,7 +165,7 @@ class AppenderMixin: else: return self._generate(sess).__getitem__(index) - def count(self): + def count(self) -> int: sess = self.session if sess is None: return len( @@ -455,91 +203,74 @@ class AppenderMixin: return query - def extend(self, iterator): - for item in iterator: - self.attr.append( - attributes.instance_state(self.instance), - attributes.instance_dict(self.instance), - item, - None, - ) + def add_all(self, iterator: Iterable[_T]) -> None: + """Add an iterable of items to this :class:`_orm.AppenderQuery`. - def append(self, item): - self.attr.append( - attributes.instance_state(self.instance), - attributes.instance_dict(self.instance), - item, - None, - ) + The given items will be persisted to the database in terms of + the parent instance's collection on the next flush. - def remove(self, item): - self.attr.remove( - attributes.instance_state(self.instance), - attributes.instance_dict(self.instance), - item, - None, - ) + This method is provided to assist in delivering forwards-compatibility + with the :class:`_orm.WriteOnlyCollection` collection class. + .. versionadded:: 2.0 -class AppenderQuery(AppenderMixin, Query): - """A dynamic query that supports basic collection storage operations.""" + """ + self._add_all_impl(iterator) + def add(self, item: _T) -> None: + """Add an item to this :class:`_orm.AppenderQuery`. -def mixin_user_query(cls): - """Return a new class with AppenderQuery functionality layered over.""" - name = "Appender" + cls.__name__ - return type(name, (AppenderMixin, cls), {"query_class": cls}) + The given item will be persisted to the database in terms of + the parent instance's collection on the next flush. + This method is provided to assist in delivering forwards-compatibility + with the :class:`_orm.WriteOnlyCollection` collection class. -class CollectionHistory: - """Overrides AttributeHistory to receive append/remove events directly.""" + .. versionadded:: 2.0 - def __init__(self, attr, state, apply_to=None): - if apply_to: - coll = AppenderQuery(attr, state).autoflush(False) - self.unchanged_items = util.OrderedIdentitySet(coll) - self.added_items = apply_to.added_items - self.deleted_items = apply_to.deleted_items - self._reconcile_collection = True - else: - self.deleted_items = util.OrderedIdentitySet() - self.added_items = util.OrderedIdentitySet() - self.unchanged_items = util.OrderedIdentitySet() - self._reconcile_collection = False + """ + self._add_all_impl([item]) - @property - def added_plus_unchanged(self): - return list(self.added_items.union(self.unchanged_items)) + def extend(self, iterator: Iterable[_T]) -> None: + """Add an iterable of items to this :class:`_orm.AppenderQuery`. - @property - def all_items(self): - return list( - self.added_items.union(self.unchanged_items).union( - self.deleted_items - ) - ) + The given items will be persisted to the database in terms of + the parent instance's collection on the next flush. - def as_history(self): - if self._reconcile_collection: - added = self.added_items.difference(self.unchanged_items) - deleted = self.deleted_items.intersection(self.unchanged_items) - unchanged = self.unchanged_items.difference(deleted) - else: - added, unchanged, deleted = ( - self.added_items, - self.unchanged_items, - self.deleted_items, - ) - return attributes.History(list(added), list(unchanged), list(deleted)) + """ + self._add_all_impl(iterator) - def indexed(self, index): - return list(self.added_items)[index] + def append(self, item: _T) -> None: + """Append an item to this :class:`_orm.AppenderQuery`. - def add_added(self, value): - self.added_items.add(value) + The given item will be removed from the parent instance's collection on + the next flush. - def add_removed(self, value): - if value in self.added_items: - self.added_items.remove(value) - else: - self.deleted_items.add(value) + """ + self._add_all_impl([item]) + + def remove(self, item: _T) -> None: + """Remove an item from this :class:`_orm.AppenderQuery`. + + The given item will be removed from the parent instance's collection on + the next flush. + + """ + self._remove_impl(item) + + +class AppenderQuery(AppenderMixin[_T], Query[_T]): + """A dynamic query that supports basic collection storage operations. + + Methods on :class:`.AppenderQuery` include all methods of + :class:`_orm.Query`, plus additional methods used for collection + persistence. + + + """ + + +def mixin_user_query(cls): + """Return a new class with AppenderQuery functionality layered over.""" + name = "Appender" + cls.__name__ + return type(name, (AppenderMixin, cls), {"query_class": cls}) diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index 5af14cc00..3c0e62ef5 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -9,6 +9,7 @@ from __future__ import annotations +from . import exc as orm_exc from .base import LoaderCallableStatus from .base import PassiveFlag from .. import exc @@ -81,7 +82,14 @@ class EvaluatorCompiler: "Can't evaluate criteria against " f"alternate class {parentmapper.class_}" ) - key = parentmapper._columntoproperty[clause].key + + try: + key = parentmapper._columntoproperty[clause].key + except orm_exc.UnmappedColumnError as err: + raise UnevaluatableError( + f"Cannot evaluate expression: {err}" + ) from err + impl = parentmapper.class_manager[key].impl if impl is not None: diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index b3fbe6ba7..9903c5f4a 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -77,6 +77,7 @@ if typing.TYPE_CHECKING: from ._typing import _InternalEntityType from ._typing import _ORMAdapterProto from .attributes import InstrumentedAttribute + from .base import Mapped from .context import _MapperEntity from .context import ORMCompileState from .context import QueryContext @@ -157,6 +158,7 @@ class _IntrospectsAnnotations: registry: RegistryType, cls: Type[Any], key: str, + mapped_container: Optional[Type[Mapped[Any]]], annotation: Optional[_AnnotationScanType], extracted_mapped_annotation: Optional[_AnnotationScanType], is_dataclass_field: bool, diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 1f2e9706b..841f29d15 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -65,6 +65,7 @@ if TYPE_CHECKING: from ._typing import _InstanceDict from ._typing import _ORMColumnExprArgument from ._typing import _RegistryType + from .base import Mapped from .mapper import Mapper from .session import Session from .state import _InstallLoaderCallableProto @@ -196,6 +197,7 @@ class ColumnProperty( registry: _RegistryType, cls: Type[Any], key: str, + mapped_container: Optional[Type[Mapped[Any]]], annotation: Optional[_AnnotationScanType], extracted_mapped_annotation: Optional[_AnnotationScanType], is_dataclass_field: bool, @@ -634,6 +636,7 @@ class MappedColumn( registry: _RegistryType, cls: Type[Any], key: str, + mapped_container: Optional[Type[Mapped[Any]]], annotation: Optional[_AnnotationScanType], extracted_mapped_annotation: Optional[_AnnotationScanType], is_dataclass_field: bool, diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 30b0f41cf..86137e00e 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1638,7 +1638,7 @@ class Query( q = q.with_transformation(filter_something(x==5)) This allows ad-hoc recipes to be created for :class:`_query.Query` - objects. See the example at :ref:`hybrid_transformers`. + objects. """ return fn(self) diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index c215623e2..bae381961 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -47,9 +47,11 @@ from ._typing import is_has_collection_adapter from .base import _DeclarativeMapped from .base import _is_mapped_class from .base import class_mapper +from .base import DynamicMapped from .base import LoaderCallableStatus from .base import PassiveFlag from .base import state_str +from .base import WriteOnlyMapped from .interfaces import _AttributeOptions from .interfaces import _IntrospectsAnnotations from .interfaces import MANYTOMANY @@ -94,6 +96,7 @@ if typing.TYPE_CHECKING: from ._typing import _InternalEntityType from ._typing import _O from ._typing import _RegistryType + from .base import Mapped from .clsregistry import _class_resolver from .clsregistry import _ModNS from .dependency import DependencyProcessor @@ -144,6 +147,7 @@ _LazyLoadArgumentType = Literal[ "raise_on_sql", "noload", "immediate", + "write_only", "dynamic", True, False, @@ -1708,6 +1712,7 @@ class RelationshipProperty( registry: _RegistryType, cls: Type[Any], key: str, + mapped_container: Optional[Type[Mapped[Any]]], annotation: Optional[_AnnotationScanType], extracted_mapped_annotation: Optional[_AnnotationScanType], is_dataclass_field: bool, @@ -1723,13 +1728,27 @@ class RelationshipProperty( argument = extracted_mapped_annotation + is_write_only = mapped_container is not None and issubclass( + mapped_container, WriteOnlyMapped + ) + if is_write_only: + self.lazy = "write_only" + self.strategy_key = (("lazy", self.lazy),) + + is_dynamic = mapped_container is not None and issubclass( + mapped_container, DynamicMapped + ) + if is_dynamic: + self.lazy = "dynamic" + self.strategy_key = (("lazy", self.lazy),) + if hasattr(argument, "__origin__"): collection_class = argument.__origin__ # type: ignore if issubclass(collection_class, abc.Collection): if self.collection_class is None: self.collection_class = collection_class - else: + elif not is_write_only and not is_dynamic: self.uselist = False if argument.__args__: # type: ignore @@ -1754,7 +1773,11 @@ class RelationshipProperty( # we don't allow the collection class to be a # __forward_arg__ right now, so if we see a forward arg here, # we know there was no collection class either - if self.collection_class is None: + if ( + self.collection_class is None + and not is_write_only + and not is_dynamic + ): self.uselist = False self.argument = argument @@ -3344,8 +3367,14 @@ class _ColInAnnotations: return self.name in c._annotations -class Relationship(RelationshipProperty[_T], _DeclarativeMapped[_T]): - """Declarative front-end for the :class:`.RelationshipProperty` class. +class Relationship( # type: ignore + RelationshipProperty[_T], + _DeclarativeMapped[_T], + WriteOnlyMapped[_T], # not compatible with Mapped[_T] + DynamicMapped[_T], # not compatible with Mapped[_T] +): + """Describes an object property that holds a single item or list + of items that correspond to a related database table. Public constructor is the :func:`_orm.relationship` function. diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index b8c2f6e9e..0f16df9c8 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -37,6 +37,7 @@ from ._typing import insp_is_aliased_class from ._typing import insp_is_mapper from ._typing import prop_is_relationship from .base import _class_to_mapper as _class_to_mapper +from .base import _MappedAnnotationBase from .base import _never_set as _never_set # noqa: F401 from .base import _none_set as _none_set # noqa: F401 from .base import attribute_str as attribute_str # noqa: F401 @@ -76,7 +77,7 @@ from ..sql.elements import KeyedColumnElement from ..sql.selectable import FromClause from ..util.langhelpers import MemoizedSlots from ..util.typing import de_stringify_annotation -from ..util.typing import is_origin_of +from ..util.typing import is_origin_of_cls from ..util.typing import Literal if typing.TYPE_CHECKING: @@ -1994,7 +1995,7 @@ def _is_mapped_annotation( except NameError: return False else: - return is_origin_of(annotated, "Mapped", module="sqlalchemy.orm") + return is_origin_of_cls(annotated, _MappedAnnotationBase) def _cleanup_mapped_str_annotation(annotation: str) -> str: @@ -2006,7 +2007,7 @@ def _cleanup_mapped_str_annotation(annotation: str) -> str: inner: Optional[Match[str]] mm = re.match(r"^(.+?)\[(.+)\]$", annotation) - if mm and mm.group(1) == "Mapped": + if mm and mm.group(1) in ("Mapped", "WriteOnlyMapped", "DynamicMapped"): stack = [] inner = mm while True: @@ -2038,7 +2039,7 @@ def _extract_mapped_subtype( is_dataclass_field: bool, expect_mapped: bool = True, raiseerr: bool = True, -) -> Optional[Union[type, str]]: +) -> Optional[Tuple[Union[type, str], Optional[type]]]: """given an annotation, figure out if it's ``Mapped[something]`` and if so, return the ``something`` part. @@ -2071,10 +2072,10 @@ def _extract_mapped_subtype( annotated = raw_annotation # type: ignore if is_dataclass_field: - return annotated + return annotated, None else: - if not hasattr(annotated, "__origin__") or not is_origin_of( - annotated, "Mapped", module="sqlalchemy.orm" + if not hasattr(annotated, "__origin__") or not is_origin_of_cls( + annotated, _MappedAnnotationBase ): anno_name = ( getattr(annotated, "__name__", None) @@ -2118,11 +2119,11 @@ def _extract_mapped_subtype( ) else: - return annotated + return annotated, None if len(annotated.__args__) != 1: # type: ignore raise sa_exc.ArgumentError( "Expected sub-type for Mapped[] annotation" ) - return annotated.__args__[0] # type: ignore + return annotated.__args__[0], annotated.__origin__ # type: ignore diff --git a/lib/sqlalchemy/orm/writeonly.py b/lib/sqlalchemy/orm/writeonly.py new file mode 100644 index 000000000..5814cef65 --- /dev/null +++ b/lib/sqlalchemy/orm/writeonly.py @@ -0,0 +1,619 @@ +# orm/writeonly.py +# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors +# <see AUTHORS file> +# +# This module is part of SQLAlchemy and is released under +# the MIT License: https://www.opensource.org/licenses/mit-license.php +# mypy: ignore-errors + + +"""Write-only collection API. + +This is an alternate mapped attribute style that only supports single-item +collection mutation operations. To read the collection, a select() +object must be executed each time. + +.. versionadded:: 2.0 + + +""" + +from __future__ import annotations + +from typing import Any +from typing import Generic +from typing import Iterable +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy.sql import bindparam +from . import attributes +from . import interfaces +from . import relationships +from . import strategies +from .base import object_mapper +from .base import PassiveFlag +from .relationships import RelationshipDirection +from .. import exc +from .. import inspect +from .. import log +from .. import util +from ..sql import delete +from ..sql import insert +from ..sql import select +from ..sql import update +from ..sql.dml import Delete +from ..sql.dml import Insert +from ..sql.dml import Update +from ..util.typing import Literal + +if TYPE_CHECKING: + from ._typing import _InstanceDict + from .attributes import _AdaptedCollectionProtocol + from .attributes import AttributeEventToken + from .attributes import CollectionAdapter + from .base import LoaderCallableStatus + from .state import InstanceState + from ..sql.selectable import Select + + +_T = TypeVar("_T", bound=Any) + + +class WriteOnlyHistory: + """Overrides AttributeHistory to receive append/remove events directly.""" + + def __init__(self, attr, state, passive, apply_to=None): + if apply_to: + if passive & PassiveFlag.SQL_OK: + raise exc.InvalidRequestError( + f"Attribute {attr} can't load the existing state from the " + "database for this operation; full iteration is not " + "permitted. If this is a delete operation, configure " + f"passive_deletes=True on the {attr} relationship in " + "order to resolve this error." + ) + + self.unchanged_items = apply_to.unchanged_items + self.added_items = apply_to.added_items + self.deleted_items = apply_to.deleted_items + self._reconcile_collection = apply_to._reconcile_collection + else: + self.deleted_items = util.OrderedIdentitySet() + self.added_items = util.OrderedIdentitySet() + self.unchanged_items = util.OrderedIdentitySet() + self._reconcile_collection = False + + @property + def added_plus_unchanged(self): + return list(self.added_items.union(self.unchanged_items)) + + @property + def all_items(self): + return list( + self.added_items.union(self.unchanged_items).union( + self.deleted_items + ) + ) + + def as_history(self): + if self._reconcile_collection: + added = self.added_items.difference(self.unchanged_items) + deleted = self.deleted_items.intersection(self.unchanged_items) + unchanged = self.unchanged_items.difference(deleted) + else: + added, unchanged, deleted = ( + self.added_items, + self.unchanged_items, + self.deleted_items, + ) + return attributes.History(list(added), list(unchanged), list(deleted)) + + def indexed(self, index): + return list(self.added_items)[index] + + def add_added(self, value): + self.added_items.add(value) + + def add_removed(self, value): + if value in self.added_items: + self.added_items.remove(value) + else: + self.deleted_items.add(value) + + +class WriteOnlyAttributeImpl( + attributes.HasCollectionAdapter, attributes.AttributeImpl +): + uses_objects = True + default_accepts_scalar_loader = False + supports_population = False + _supports_dynamic_iteration = False + collection = False + dynamic = True + order_by = () + collection_history_cls = WriteOnlyHistory + + def __init__( + self, + class_, + key, + typecallable, + dispatch, + target_mapper, + order_by, + **kw, + ): + super().__init__(class_, key, typecallable, dispatch, **kw) + self.target_mapper = target_mapper + self.query_class = WriteOnlyCollection + if order_by: + self.order_by = tuple(order_by) + + def get(self, state, dict_, passive=attributes.PASSIVE_OFF): + if not passive & attributes.SQL_OK: + return self._get_collection_history( + state, attributes.PASSIVE_NO_INITIALIZE + ).added_items + else: + return self.query_class(self, state) + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Literal[None] = ..., + passive: Literal[PassiveFlag.PASSIVE_OFF] = ..., + ) -> CollectionAdapter: + ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: _AdaptedCollectionProtocol = ..., + passive: PassiveFlag = ..., + ) -> CollectionAdapter: + ... + + @overload + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = ..., + passive: PassiveFlag = ..., + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: + ... + + def get_collection( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + user_data: Optional[_AdaptedCollectionProtocol] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + ) -> Union[ + Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter + ]: + if not passive & attributes.SQL_OK: + data = self._get_collection_history(state, passive).added_items + else: + history = self._get_collection_history(state, passive) + data = history.added_plus_unchanged + return DynamicCollectionAdapter(data) # type: ignore + + @util.memoized_property + def _append_token(self): + return attributes.AttributeEventToken(self, attributes.OP_APPEND) + + @util.memoized_property + def _remove_token(self): + return attributes.AttributeEventToken(self, attributes.OP_REMOVE) + + def fire_append_event( + self, state, dict_, value, initiator, collection_history=None + ): + if collection_history is None: + collection_history = self._modified_event(state, dict_) + + collection_history.add_added(value) + + for fn in self.dispatch.append: + value = fn(state, value, initiator or self._append_token) + + if self.trackparent and value is not None: + self.sethasparent(attributes.instance_state(value), state, True) + + def fire_remove_event( + self, state, dict_, value, initiator, collection_history=None + ): + if collection_history is None: + collection_history = self._modified_event(state, dict_) + + collection_history.add_removed(value) + + if self.trackparent and value is not None: + self.sethasparent(attributes.instance_state(value), state, False) + + for fn in self.dispatch.remove: + fn(state, value, initiator or self._remove_token) + + def _modified_event(self, state, dict_): + + if self.key not in state.committed_state: + state.committed_state[self.key] = self.collection_history_cls( + self, state, PassiveFlag.PASSIVE_NO_FETCH + ) + + state._modified_event(dict_, self, attributes.NEVER_SET) + + # this is a hack to allow the fixtures.ComparableEntity fixture + # to work + dict_[self.key] = True + return state.committed_state[self.key] + + def set( + self, + state: InstanceState[Any], + dict_: _InstanceDict, + value: Any, + initiator: Optional[AttributeEventToken] = None, + passive: PassiveFlag = PassiveFlag.PASSIVE_OFF, + check_old: Any = None, + pop: bool = False, + _adapt: bool = True, + ) -> None: + if initiator and initiator.parent_token is self.parent_token: + return + + if pop and value is None: + return + + iterable = value + new_values = list(iterable) + if state.has_identity: + if not self._supports_dynamic_iteration: + raise exc.InvalidRequestError( + f'Collection "{self}" does not support implicit ' + "iteration; collection replacement operations " + "can't be used" + ) + old_collection = util.IdentitySet( + self.get(state, dict_, passive=passive) + ) + + collection_history = self._modified_event(state, dict_) + if not state.has_identity: + old_collection = collection_history.added_items + else: + old_collection = old_collection.union( + collection_history.added_items + ) + + constants = old_collection.intersection(new_values) + additions = util.IdentitySet(new_values).difference(constants) + removals = old_collection.difference(constants) + + for member in new_values: + if member in additions: + self.fire_append_event( + state, + dict_, + member, + None, + collection_history=collection_history, + ) + + for member in removals: + self.fire_remove_event( + state, + dict_, + member, + None, + collection_history=collection_history, + ) + + def delete(self, *args, **kwargs): + raise NotImplementedError() + + def set_committed_value(self, state, dict_, value): + raise NotImplementedError( + "Dynamic attributes don't support collection population." + ) + + def get_history(self, state, dict_, passive=attributes.PASSIVE_NO_FETCH): + c = self._get_collection_history(state, passive) + return c.as_history() + + def get_all_pending( + self, state, dict_, passive=attributes.PASSIVE_NO_INITIALIZE + ): + c = self._get_collection_history(state, passive) + return [(attributes.instance_state(x), x) for x in c.all_items] + + def _get_collection_history(self, state, passive): + if self.key in state.committed_state: + c = state.committed_state[self.key] + else: + c = self.collection_history_cls( + self, state, PassiveFlag.PASSIVE_NO_FETCH + ) + + if state.has_identity and (passive & attributes.INIT_OK): + return self.collection_history_cls( + self, state, passive, apply_to=c + ) + else: + return c + + def append( + self, + state, + dict_, + value, + initiator, + passive=attributes.PASSIVE_NO_FETCH, + ): + if initiator is not self: + self.fire_append_event(state, dict_, value, initiator) + + def remove( + self, + state, + dict_, + value, + initiator, + passive=attributes.PASSIVE_NO_FETCH, + ): + if initiator is not self: + self.fire_remove_event(state, dict_, value, initiator) + + def pop( + self, + state, + dict_, + value, + initiator, + passive=attributes.PASSIVE_NO_FETCH, + ): + self.remove(state, dict_, value, initiator, passive=passive) + + +@log.class_logger +@relationships.RelationshipProperty.strategy_for(lazy="write_only") +class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified): + impl_class = WriteOnlyAttributeImpl + + def init_class_attribute(self, mapper): + self.is_class_level = True + if not self.uselist or self.parent_property.direction not in ( + interfaces.ONETOMANY, + interfaces.MANYTOMANY, + ): + raise exc.InvalidRequestError( + "On relationship %s, 'dynamic' loaders cannot be used with " + "many-to-one/one-to-one relationships and/or " + "uselist=False." % self.parent_property + ) + + strategies._register_attribute( + self.parent_property, + mapper, + useobject=True, + impl_class=self.impl_class, + target_mapper=self.parent_property.mapper, + order_by=self.parent_property.order_by, + query_class=self.parent_property.query_class, + ) + + +class DynamicCollectionAdapter: + """simplified CollectionAdapter for internal API consistency""" + + def __init__(self, data): + self.data = data + + def __iter__(self): + return iter(self.data) + + def _reset_empty(self): + pass + + def __len__(self): + return len(self.data) + + def __bool__(self): + return True + + +class AbstractCollectionWriter(Generic[_T]): + """Virtual collection which includes append/remove methods that synchronize + into the attribute event system. + + """ + + if not TYPE_CHECKING: + __slots__ = () + + def __init__(self, attr, state): + + self.instance = instance = state.obj() + self.attr = attr + + mapper = object_mapper(instance) + prop = mapper._props[self.attr.key] + + if prop.secondary is not None: + # this is a hack right now. The Query only knows how to + # make subsequent joins() without a given left-hand side + # from self._from_obj[0]. We need to ensure prop.secondary + # is in the FROM. So we purposely put the mapper selectable + # in _from_obj[0] to ensure a user-defined join() later on + # doesn't fail, and secondary is then in _from_obj[1]. + + # note also, we are using the official ORM-annotated selectable + # from __clause_element__(), see #7868 + self._from_obj = (prop.mapper.__clause_element__(), prop.secondary) + else: + self._from_obj = () + + self._where_criteria = ( + prop._with_parent(instance, alias_secondary=False), + ) + + if self.attr.order_by: + self._order_by_clauses = self.attr.order_by + else: + self._order_by_clauses = () + + def _add_all_impl(self, iterator: Iterable[_T]) -> None: + for item in iterator: + self.attr.append( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), + item, + None, + ) + + def _remove_impl(self, item: _T) -> None: + self.attr.remove( + attributes.instance_state(self.instance), + attributes.instance_dict(self.instance), + item, + None, + ) + + +class WriteOnlyCollection(AbstractCollectionWriter[_T]): + """Write-only collection which can synchronize changes into the + attribute event system. + + The :class:`.WriteOnlyCollection` is used in a mapping by + using the ``"write_only"`` lazy loading strategy with + :func:`_orm.relationship`. For background on this configuration, + see :ref:`write_only_relationship`. + + .. versionadded:: 2.0 + + .. seealso:: + + :ref:`write_only_relationship` + + """ + + __slots__ = ( + "instance", + "attr", + "_where_criteria", + "_from_obj", + "_order_by_clauses", + ) + + def __iter__(self) -> NoReturn: + raise TypeError( + "WriteOnly collections don't support iteration in-place; " + "to query for collection items, use the select() method to " + "produce a SQL statement and execute it with session.scalars()." + ) + + def select(self) -> Select[Tuple[_T]]: + """Produce a :class:`_sql.Select` construct that represents the + rows within this instance-local :class:`_orm.WriteOnlyCollection`. + + """ + stmt = select(self.attr.target_mapper).where(*self._where_criteria) + if self._from_obj: + stmt = stmt.select_from(*self._from_obj) + if self._order_by_clauses: + stmt = stmt.order_by(*self._order_by_clauses) + return stmt + + def insert(self) -> Insert[_T]: + """For one-to-many collections, produce a :class:`_dml.Insert` which + will insert new rows in terms of this this instance-local + :class:`_orm.WriteOnlyCollection`. + + This construct is only supported for a :class:`_orm.Relationship` + that does **not** include the :paramref:`_orm.relationship.secondary` + parameter. For relationships that refer to a many-to-many table, + use ordinary bulk insert techniques to produce new objects, then + use :meth:`_orm.AbstractCollectionWriter.add_all` to associate them + with the collection. + + + """ + + state = inspect(self.instance) + mapper = state.mapper + prop = mapper._props[self.attr.key] + + if prop.direction is not RelationshipDirection.ONETOMANY: + raise exc.InvalidRequestError( + "Write only bulk INSERT only supported for one-to-many " + "collections; for many-to-many, use a separate bulk " + "INSERT along with add_all()." + ) + + dict_ = {} + + for l, r in prop.synchronize_pairs: + fn = prop._get_attr_w_warn_on_none( + mapper, + state, + state.dict, + l, + ) + + dict_[r.key] = bindparam(None, callable_=fn) + + return insert(self.attr.target_mapper).values(**dict_) + + def update(self) -> Update[_T]: + """Produce a :class:`_dml.Update` which will refer to rows in terms + of this instance-local :class:`_orm.WriteOnlyCollection`. + + """ + return update(self.attr.target_mapper).where(*self._where_criteria) + + def delete(self) -> Delete[_T]: + """Produce a :class:`_dml.Delete` which will refer to rows in terms + of this instance-local :class:`_orm.WriteOnlyCollection`. + + """ + return delete(self.attr.target_mapper).where(*self._where_criteria) + + def add_all(self, iterator: Iterable[_T]) -> None: + """Add an iterable of items to this :class:`_orm.WriteOnlyCollection`. + + The given items will be persisted to the database in terms of + the parent instance's collection on the next flush. + + """ + self._add_all_impl(iterator) + + def add(self, item: _T) -> None: + """Add an item to this :class:`_orm.WriteOnlyCollection`. + + The given item will be persisted to the database in terms of + the parent instance's collection on the next flush. + + """ + self._add_all_impl([item]) + + def remove(self, item: _T) -> None: + """Remove an item from this :class:`_orm.WriteOnlyCollection`. + + The given item will be removed from the parent instance's collection on + the next flush. + + """ + self._remove_impl(item) diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index b3a71dbff..2fda1e9cb 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -371,6 +371,10 @@ class EachOf(AssertRule): self.rules = list(rules) def process_statement(self, execute_observed): + if not self.rules: + self.is_consumed = True + self.consume_statement = False + while self.rules: rule = self.rules[0] rule.process_statement(execute_observed) diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py index cf3138835..15352c8c8 100644 --- a/lib/sqlalchemy/testing/entities.py +++ b/lib/sqlalchemy/testing/entities.py @@ -11,6 +11,7 @@ from __future__ import annotations import sqlalchemy as sa from .. import exc as sa_exc +from ..orm.writeonly import WriteOnlyCollection _repr_stack = set() @@ -82,8 +83,12 @@ class ComparableMixin: for attr in list(a.__dict__): if attr.startswith("_"): continue + value = getattr(a, attr) + if isinstance(value, WriteOnlyCollection): + continue + try: # handle lazy loader errors battr = getattr(b, attr) diff --git a/lib/sqlalchemy/util/typing.py b/lib/sqlalchemy/util/typing.py index a0d59a630..85ef4bb45 100644 --- a/lib/sqlalchemy/util/typing.py +++ b/lib/sqlalchemy/util/typing.py @@ -216,6 +216,19 @@ def is_union(type_: Any) -> bool: return is_origin_of(type_, "Union") +def is_origin_of_cls( + type_: Any, class_obj: Union[Tuple[Type[Any], ...], Type[Any]] +) -> bool: + """return True if the given type has an __origin__ that shares a base + with the given class""" + + origin = typing_get_origin(type_) + if origin is None: + return False + + return isinstance(origin, type) and issubclass(origin, class_obj) + + def is_origin_of( type_: Any, *names: str, module: Optional[str] = None ) -> bool: |
