about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/sqlalchemy/orm
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/sqlalchemy/orm')
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py170
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py2590
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py179
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py2835
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py973
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py2123
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py571
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py1627
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py3336
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py1917
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py2188
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py1304
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py1077
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py300
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py379
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py3271
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py228
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py302
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py754
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py1490
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py1682
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py557
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py4431
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py811
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py1782
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py877
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py3454
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py3514
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py2163
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py5302
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py1143
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py198
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py3473
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py2550
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py164
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py796
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py2402
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py678
38 files changed, 63591 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py
new file mode 100644
index 00000000..7771de47
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/__init__.py
@@ -0,0 +1,170 @@
+# orm/__init__.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""
+Functional constructs for ORM configuration.
+
+See the SQLAlchemy object relational tutorial and mapper configuration
+documentation for an overview of how this module is used.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+
+from . import exc as exc
+from . import mapper as mapperlib
+from . import strategy_options as strategy_options
+from ._orm_constructors import _mapper_fn as mapper
+from ._orm_constructors import aliased as aliased
+from ._orm_constructors import backref as backref
+from ._orm_constructors import clear_mappers as clear_mappers
+from ._orm_constructors import column_property as column_property
+from ._orm_constructors import composite as composite
+from ._orm_constructors import contains_alias as contains_alias
+from ._orm_constructors import create_session as create_session
+from ._orm_constructors import deferred as deferred
+from ._orm_constructors import dynamic_loader as dynamic_loader
+from ._orm_constructors import join as join
+from ._orm_constructors import mapped_column as mapped_column
+from ._orm_constructors import orm_insert_sentinel as orm_insert_sentinel
+from ._orm_constructors import outerjoin as outerjoin
+from ._orm_constructors import query_expression as query_expression
+from ._orm_constructors import relationship as relationship
+from ._orm_constructors import synonym as synonym
+from ._orm_constructors import with_loader_criteria as with_loader_criteria
+from ._orm_constructors import with_polymorphic as with_polymorphic
+from .attributes import AttributeEventToken as AttributeEventToken
+from .attributes import InstrumentedAttribute as InstrumentedAttribute
+from .attributes import QueryableAttribute as QueryableAttribute
+from .base import class_mapper as class_mapper
+from .base import DynamicMapped as DynamicMapped
+from .base import InspectionAttrExtensionType as InspectionAttrExtensionType
+from .base import LoaderCallableStatus as LoaderCallableStatus
+from .base import Mapped as Mapped
+from .base import NotExtension as NotExtension
+from .base import ORMDescriptor as ORMDescriptor
+from .base import PassiveFlag as PassiveFlag
+from .base import SQLORMExpression as SQLORMExpression
+from .base import WriteOnlyMapped as WriteOnlyMapped
+from .context import FromStatement as FromStatement
+from .context import QueryContext as QueryContext
+from .decl_api import add_mapped_attribute as add_mapped_attribute
+from .decl_api import as_declarative as as_declarative
+from .decl_api import declarative_base as declarative_base
+from .decl_api import declarative_mixin as declarative_mixin
+from .decl_api import DeclarativeBase as DeclarativeBase
+from .decl_api import DeclarativeBaseNoMeta as DeclarativeBaseNoMeta
+from .decl_api import DeclarativeMeta as DeclarativeMeta
+from .decl_api import declared_attr as declared_attr
+from .decl_api import has_inherited_table as has_inherited_table
+from .decl_api import MappedAsDataclass as MappedAsDataclass
+from .decl_api import registry as registry
+from .decl_api import synonym_for as synonym_for
+from .decl_base import MappedClassProtocol as MappedClassProtocol
+from .descriptor_props import Composite as Composite
+from .descriptor_props import CompositeProperty as CompositeProperty
+from .descriptor_props import Synonym as Synonym
+from .descriptor_props import SynonymProperty as SynonymProperty
+from .dynamic import AppenderQuery as AppenderQuery
+from .events import AttributeEvents as AttributeEvents
+from .events import InstanceEvents as InstanceEvents
+from .events import InstrumentationEvents as InstrumentationEvents
+from .events import MapperEvents as MapperEvents
+from .events import QueryEvents as QueryEvents
+from .events import SessionEvents as SessionEvents
+from .identity import IdentityMap as IdentityMap
+from .instrumentation import ClassManager as ClassManager
+from .interfaces import EXT_CONTINUE as EXT_CONTINUE
+from .interfaces import EXT_SKIP as EXT_SKIP
+from .interfaces import EXT_STOP as EXT_STOP
+from .interfaces import InspectionAttr as InspectionAttr
+from .interfaces import InspectionAttrInfo as InspectionAttrInfo
+from .interfaces import MANYTOMANY as MANYTOMANY
+from .interfaces import MANYTOONE as MANYTOONE
+from .interfaces import MapperProperty as MapperProperty
+from .interfaces import NO_KEY as NO_KEY
+from .interfaces import NO_VALUE as NO_VALUE
+from .interfaces import ONETOMANY as ONETOMANY
+from .interfaces import PropComparator as PropComparator
+from .interfaces import RelationshipDirection as RelationshipDirection
+from .interfaces import UserDefinedOption as UserDefinedOption
+from .loading import merge_frozen_result as merge_frozen_result
+from .loading import merge_result as merge_result
+from .mapped_collection import attribute_keyed_dict as attribute_keyed_dict
+from .mapped_collection import (
+    attribute_mapped_collection as attribute_mapped_collection,
+)
+from .mapped_collection import column_keyed_dict as column_keyed_dict
+from .mapped_collection import (
+    column_mapped_collection as column_mapped_collection,
+)
+from .mapped_collection import keyfunc_mapping as keyfunc_mapping
+from .mapped_collection import KeyFuncDict as KeyFuncDict
+from .mapped_collection import mapped_collection as mapped_collection
+from .mapped_collection import MappedCollection as MappedCollection
+from .mapper import configure_mappers as configure_mappers
+from .mapper import Mapper as Mapper
+from .mapper import reconstructor as reconstructor
+from .mapper import validates as validates
+from .properties import ColumnProperty as ColumnProperty
+from .properties import MappedColumn as MappedColumn
+from .properties import MappedSQLExpression as MappedSQLExpression
+from .query import AliasOption as AliasOption
+from .query import Query as Query
+from .relationships import foreign as foreign
+from .relationships import Relationship as Relationship
+from .relationships import RelationshipProperty as RelationshipProperty
+from .relationships import remote as remote
+from .scoping import QueryPropertyDescriptor as QueryPropertyDescriptor
+from .scoping import scoped_session as scoped_session
+from .session import close_all_sessions as close_all_sessions
+from .session import make_transient as make_transient
+from .session import make_transient_to_detached as make_transient_to_detached
+from .session import object_session as object_session
+from .session import ORMExecuteState as ORMExecuteState
+from .session import Session as Session
+from .session import sessionmaker as sessionmaker
+from .session import SessionTransaction as SessionTransaction
+from .session import SessionTransactionOrigin as SessionTransactionOrigin
+from .state import AttributeState as AttributeState
+from .state import InstanceState as InstanceState
+from .strategy_options import contains_eager as contains_eager
+from .strategy_options import defaultload as defaultload
+from .strategy_options import defer as defer
+from .strategy_options import immediateload as immediateload
+from .strategy_options import joinedload as joinedload
+from .strategy_options import lazyload as lazyload
+from .strategy_options import Load as Load
+from .strategy_options import load_only as load_only
+from .strategy_options import noload as noload
+from .strategy_options import raiseload as raiseload
+from .strategy_options import selectin_polymorphic as selectin_polymorphic
+from .strategy_options import selectinload as selectinload
+from .strategy_options import subqueryload as subqueryload
+from .strategy_options import undefer as undefer
+from .strategy_options import undefer_group as undefer_group
+from .strategy_options import with_expression as with_expression
+from .unitofwork import UOWTransaction as UOWTransaction
+from .util import Bundle as Bundle
+from .util import CascadeOptions as CascadeOptions
+from .util import LoaderCriteriaOption as LoaderCriteriaOption
+from .util import object_mapper as object_mapper
+from .util import polymorphic_union as polymorphic_union
+from .util import was_deleted as was_deleted
+from .util import with_parent as with_parent
+from .writeonly import WriteOnlyCollection as WriteOnlyCollection
+from .. import util as _sa_util
+
+
+def __go(lcls: Any) -> None:
+    _sa_util.preloaded.import_prefix("sqlalchemy.orm")
+    _sa_util.preloaded.import_prefix("sqlalchemy.ext")
+
+
+__go(locals())
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py
new file mode 100644
index 00000000..d9e3ec37
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_orm_constructors.py
@@ -0,0 +1,2590 @@
+# orm/_orm_constructors.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Callable
+from typing import Collection
+from typing import Iterable
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import mapperlib as mapperlib
+from ._typing import _O
+from .descriptor_props import Composite
+from .descriptor_props import Synonym
+from .interfaces import _AttributeOptions
+from .properties import MappedColumn
+from .properties import MappedSQLExpression
+from .query import AliasOption
+from .relationships import _RelationshipArgumentType
+from .relationships import _RelationshipDeclared
+from .relationships import _RelationshipSecondaryArgument
+from .relationships import RelationshipProperty
+from .session import Session
+from .util import _ORMJoin
+from .util import AliasedClass
+from .util import AliasedInsp
+from .util import LoaderCriteriaOption
+from .. import sql
+from .. import util
+from ..exc import InvalidRequestError
+from ..sql._typing import _no_kw
+from ..sql.base import _NoArg
+from ..sql.base import SchemaEventTarget
+from ..sql.schema import _InsertSentinelColumnDefault
+from ..sql.schema import SchemaConst
+from ..sql.selectable import FromClause
+from ..util.typing import Annotated
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _ORMColumnExprArgument
+    from .descriptor_props import _CC
+    from .descriptor_props import _CompositeAttrType
+    from .interfaces import PropComparator
+    from .mapper import Mapper
+    from .query import Query
+    from .relationships import _LazyLoadArgumentType
+    from .relationships import _ORMColCollectionArgument
+    from .relationships import _ORMOrderByArgument
+    from .relationships import _RelationshipJoinConditionArgument
+    from .relationships import ORMBackrefArgument
+    from .session import _SessionBind
+    from ..sql._typing import _AutoIncrementType
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _FromClauseArgument
+    from ..sql._typing import _InfoType
+    from ..sql._typing import _OnClauseArgument
+    from ..sql._typing import _TypeEngineArgument
+    from ..sql.elements import ColumnElement
+    from ..sql.schema import _ServerDefaultArgument
+    from ..sql.schema import _ServerOnUpdateArgument
+    from ..sql.selectable import Alias
+    from ..sql.selectable import Subquery
+
+
+_T = typing.TypeVar("_T")
+
+
+@util.deprecated(
+    "1.4",
+    "The :class:`.AliasOption` object is not necessary "
+    "for entities to be matched up to a query that is established "
+    "via :meth:`.Query.from_statement` and now does nothing.",
+    enable_warnings=False,  # AliasOption itself warns
+)
+def contains_alias(alias: Union[Alias, Subquery]) -> AliasOption:
+    r"""Return a :class:`.MapperOption` that will indicate to the
+    :class:`_query.Query`
+    that the main table has been aliased.
+
+    """
+    return AliasOption(alias)
+
+
+def mapped_column(
+    __name_pos: Optional[
+        Union[str, _TypeEngineArgument[Any], SchemaEventTarget]
+    ] = None,
+    __type_pos: Optional[
+        Union[_TypeEngineArgument[Any], SchemaEventTarget]
+    ] = None,
+    *args: SchemaEventTarget,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    nullable: Optional[
+        Union[bool, Literal[SchemaConst.NULL_UNSPECIFIED]]
+    ] = SchemaConst.NULL_UNSPECIFIED,
+    primary_key: Optional[bool] = False,
+    deferred: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    deferred_group: Optional[str] = None,
+    deferred_raiseload: Optional[bool] = None,
+    use_existing_column: bool = False,
+    name: Optional[str] = None,
+    type_: Optional[_TypeEngineArgument[Any]] = None,
+    autoincrement: _AutoIncrementType = "auto",
+    doc: Optional[str] = None,
+    key: Optional[str] = None,
+    index: Optional[bool] = None,
+    unique: Optional[bool] = None,
+    info: Optional[_InfoType] = None,
+    onupdate: Optional[Any] = None,
+    insert_default: Optional[Any] = _NoArg.NO_ARG,
+    server_default: Optional[_ServerDefaultArgument] = None,
+    server_onupdate: Optional[_ServerOnUpdateArgument] = None,
+    active_history: bool = False,
+    quote: Optional[bool] = None,
+    system: bool = False,
+    comment: Optional[str] = None,
+    sort_order: Union[_NoArg, int] = _NoArg.NO_ARG,
+    **kw: Any,
+) -> MappedColumn[Any]:
+    r"""declare a new ORM-mapped :class:`_schema.Column` construct
+    for use within :ref:`Declarative Table <orm_declarative_table>`
+    configuration.
+
+    The :func:`_orm.mapped_column` function provides an ORM-aware and
+    Python-typing-compatible construct which is used with
+    :ref:`declarative <orm_declarative_mapping>` mappings to indicate an
+    attribute that's mapped to a Core :class:`_schema.Column` object.  It
+    provides the equivalent feature as mapping an attribute to a
+    :class:`_schema.Column` object directly when using Declarative,
+    specifically when using :ref:`Declarative Table <orm_declarative_table>`
+    configuration.
+
+    .. versionadded:: 2.0
+
+    :func:`_orm.mapped_column` is normally used with explicit typing along with
+    the :class:`_orm.Mapped` annotation type, where it can derive the SQL
+    type and nullability for the column based on what's present within the
+    :class:`_orm.Mapped` annotation.   It also may be used without annotations
+    as a drop-in replacement for how :class:`_schema.Column` is used in
+    Declarative mappings in SQLAlchemy 1.x style.
+
+    For usage examples of :func:`_orm.mapped_column`, see the documentation
+    at :ref:`orm_declarative_table`.
+
+    .. seealso::
+
+        :ref:`orm_declarative_table` - complete documentation
+
+        :ref:`whatsnew_20_orm_declarative_typing` - migration notes for
+        Declarative mappings using 1.x style mappings
+
+    :param __name: String name to give to the :class:`_schema.Column`.  This
+     is an optional, positional only argument that if present must be the
+     first positional argument passed.  If omitted, the attribute name to
+     which the :func:`_orm.mapped_column`  is mapped will be used as the SQL
+     column name.
+    :param __type: :class:`_types.TypeEngine` type or instance which will
+     indicate the datatype to be associated with the :class:`_schema.Column`.
+     This is an optional, positional-only argument that if present must
+     immediately follow the ``__name`` parameter if present also, or otherwise
+     be the first positional parameter.  If omitted, the ultimate type for
+     the column may be derived either from the annotated type, or if a
+     :class:`_schema.ForeignKey` is present, from the datatype of the
+     referenced column.
+    :param \*args: Additional positional arguments include constructs such
+     as :class:`_schema.ForeignKey`, :class:`_schema.CheckConstraint`,
+     and :class:`_schema.Identity`, which are passed through to the constructed
+     :class:`_schema.Column`.
+    :param nullable: Optional bool, whether the column should be "NULL" or
+     "NOT NULL". If omitted, the nullability is derived from the type
+     annotation based on whether or not ``typing.Optional`` is present.
+     ``nullable`` defaults to ``True`` otherwise for non-primary key columns,
+     and ``False`` for primary key columns.
+    :param primary_key: optional bool, indicates the :class:`_schema.Column`
+     would be part of the table's primary key or not.
+    :param deferred: Optional bool - this keyword argument is consumed by the
+     ORM declarative process, and is not part of the :class:`_schema.Column`
+     itself; instead, it indicates that this column should be "deferred" for
+     loading as though mapped by :func:`_orm.deferred`.
+
+     .. seealso::
+
+        :ref:`orm_queryguide_deferred_declarative`
+
+    :param deferred_group: Implies :paramref:`_orm.mapped_column.deferred`
+     to ``True``, and set the :paramref:`_orm.deferred.group` parameter.
+
+     .. seealso::
+
+        :ref:`orm_queryguide_deferred_group`
+
+    :param deferred_raiseload: Implies :paramref:`_orm.mapped_column.deferred`
+     to ``True``, and set the :paramref:`_orm.deferred.raiseload` parameter.
+
+     .. seealso::
+
+        :ref:`orm_queryguide_deferred_raiseload`
+
+    :param use_existing_column: if True, will attempt to locate the given
+     column name on an inherited superclass (typically single inheriting
+     superclass), and if present, will not produce a new column, mapping
+     to the superclass column as though it were omitted from this class.
+     This is used for mixins that add new columns to an inherited superclass.
+
+     .. seealso::
+
+        :ref:`orm_inheritance_column_conflicts`
+
+     .. versionadded:: 2.0.0b4
+
+    :param default: Passed directly to the
+     :paramref:`_schema.Column.default` parameter if the
+     :paramref:`_orm.mapped_column.insert_default` parameter is not present.
+     Additionally, when used with :ref:`orm_declarative_native_dataclasses`,
+     indicates a default Python value that should be applied to the keyword
+     constructor within the generated ``__init__()`` method.
+
+     Note that in the case of dataclass generation when
+     :paramref:`_orm.mapped_column.insert_default` is not present, this means
+     the :paramref:`_orm.mapped_column.default` value is used in **two**
+     places, both the ``__init__()`` method as well as the
+     :paramref:`_schema.Column.default` parameter. While this behavior may
+     change in a future release, for the moment this tends to "work out"; a
+     default of ``None`` will mean that the :class:`_schema.Column` gets no
+     default generator, whereas a default that refers to a non-``None`` Python
+     or SQL expression value will be assigned up front on the object when
+     ``__init__()`` is called, which is the same value that the Core
+     :class:`_sql.Insert` construct would use in any case, leading to the same
+     end result.
+
+     .. note:: When using Core level column defaults that are callables to
+        be interpreted by the underlying :class:`_schema.Column` in conjunction
+        with :ref:`ORM-mapped dataclasses
+        <orm_declarative_native_dataclasses>`, especially those that are
+        :ref:`context-aware default functions <context_default_functions>`,
+        **the** :paramref:`_orm.mapped_column.insert_default` **parameter must
+        be used instead**.  This is necessary to disambiguate the callable from
+        being interpreted as a dataclass level default.
+
+     .. seealso::
+
+        :ref:`defaults_default_factory_insert_default`
+
+        :paramref:`_orm.mapped_column.insert_default`
+
+        :paramref:`_orm.mapped_column.default_factory`
+
+    :param insert_default: Passed directly to the
+     :paramref:`_schema.Column.default` parameter; will supersede the value
+     of :paramref:`_orm.mapped_column.default` when present, however
+     :paramref:`_orm.mapped_column.default` will always apply to the
+     constructor default for a dataclasses mapping.
+
+     .. seealso::
+
+        :ref:`defaults_default_factory_insert_default`
+
+        :paramref:`_orm.mapped_column.default`
+
+        :paramref:`_orm.mapped_column.default_factory`
+
+    :param sort_order: An integer that indicates how this mapped column
+     should be sorted compared to the others when the ORM is creating a
+     :class:`_schema.Table`. Among mapped columns that have the same
+     value the default ordering is used, placing first the mapped columns
+     defined in the main class, then the ones in the super classes.
+     Defaults to 0. The sort is ascending.
+
+     .. versionadded:: 2.0.4
+
+    :param active_history=False:
+
+        When ``True``, indicates that the "previous" value for a
+        scalar attribute should be loaded when replaced, if not
+        already loaded. Normally, history tracking logic for
+        simple non-primary-key scalar values only needs to be
+        aware of the "new" value in order to perform a flush. This
+        flag is available for applications that make use of
+        :func:`.attributes.get_history` or :meth:`.Session.is_modified`
+        which also need to know the "previous" value of the attribute.
+
+        .. versionadded:: 2.0.10
+
+
+    :param init: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__init__()``
+     method as generated by the dataclass process.
+    :param repr: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__repr__()``
+     method as generated by the dataclass process.
+    :param default_factory: Specific to
+     :ref:`orm_declarative_native_dataclasses`,
+     specifies a default-value generation function that will take place
+     as part of the ``__init__()``
+     method as generated by the dataclass process.
+
+     .. seealso::
+
+        :ref:`defaults_default_factory_insert_default`
+
+        :paramref:`_orm.mapped_column.default`
+
+        :paramref:`_orm.mapped_column.insert_default`
+
+    :param compare: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be included in comparison operations when generating the
+     ``__eq__()`` and ``__ne__()`` methods for the mapped class.
+
+     .. versionadded:: 2.0.0b4
+
+    :param kw_only: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be marked as keyword-only when generating the ``__init__()``.
+
+    :param hash: Specific to
+     :ref:`orm_declarative_native_dataclasses`, controls if this field
+     is included when generating the ``__hash__()`` method for the mapped
+     class.
+
+     .. versionadded:: 2.0.36
+
+    :param \**kw: All remaining keyword arguments are passed through to the
+     constructor for the :class:`_schema.Column`.
+
+    """
+
+    return MappedColumn(
+        __name_pos,
+        __type_pos,
+        *args,
+        name=name,
+        type_=type_,
+        autoincrement=autoincrement,
+        insert_default=insert_default,
+        attribute_options=_AttributeOptions(
+            init, repr, default, default_factory, compare, kw_only, hash
+        ),
+        doc=doc,
+        key=key,
+        index=index,
+        unique=unique,
+        info=info,
+        active_history=active_history,
+        nullable=nullable,
+        onupdate=onupdate,
+        primary_key=primary_key,
+        server_default=server_default,
+        server_onupdate=server_onupdate,
+        use_existing_column=use_existing_column,
+        quote=quote,
+        comment=comment,
+        system=system,
+        deferred=deferred,
+        deferred_group=deferred_group,
+        deferred_raiseload=deferred_raiseload,
+        sort_order=sort_order,
+        **kw,
+    )
+
+
+def orm_insert_sentinel(
+    name: Optional[str] = None,
+    type_: Optional[_TypeEngineArgument[Any]] = None,
+    *,
+    default: Optional[Any] = None,
+    omit_from_statements: bool = True,
+) -> MappedColumn[Any]:
+    """Provides a surrogate :func:`_orm.mapped_column` that generates
+    a so-called :term:`sentinel` column, allowing efficient bulk
+    inserts with deterministic RETURNING sorting for tables that don't
+    otherwise have qualifying primary key configurations.
+
+    Use of :func:`_orm.orm_insert_sentinel` is analogous to the use of the
+    :func:`_schema.insert_sentinel` construct within a Core
+    :class:`_schema.Table` construct.
+
+    Guidelines for adding this construct to a Declarative mapped class
+    are the same as that of the :func:`_schema.insert_sentinel` construct;
+    the database table itself also needs to have a column with this name
+    present.
+
+    For background on how this object is used, see the section
+    :ref:`engine_insertmanyvalues_sentinel_columns` as part of the
+    section :ref:`engine_insertmanyvalues`.
+
+    .. seealso::
+
+        :func:`_schema.insert_sentinel`
+
+        :ref:`engine_insertmanyvalues`
+
+        :ref:`engine_insertmanyvalues_sentinel_columns`
+
+
+    .. versionadded:: 2.0.10
+
+    """
+
+    return mapped_column(
+        name=name,
+        default=(
+            default if default is not None else _InsertSentinelColumnDefault()
+        ),
+        _omit_from_statements=omit_from_statements,
+        insert_sentinel=True,
+        use_existing_column=True,
+        nullable=True,
+    )
+
+
+@util.deprecated_params(
+    **{
+        arg: (
+            "2.0",
+            f"The :paramref:`_orm.column_property.{arg}` parameter is "
+            "deprecated for :func:`_orm.column_property`.  This parameter "
+            "applies to a writeable-attribute in a Declarative Dataclasses "
+            "configuration only, and :func:`_orm.column_property` is treated "
+            "as a read-only attribute in this context.",
+        )
+        for arg in ("init", "kw_only", "default", "default_factory")
+    }
+)
+def column_property(
+    column: _ORMColumnExprArgument[_T],
+    *additional_columns: _ORMColumnExprArgument[Any],
+    group: Optional[str] = None,
+    deferred: bool = False,
+    raiseload: bool = False,
+    comparator_factory: Optional[Type[PropComparator[_T]]] = None,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    active_history: bool = False,
+    expire_on_flush: bool = True,
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+) -> MappedSQLExpression[_T]:
+    r"""Provide a column-level property for use with a mapping.
+
+    With Declarative mappings, :func:`_orm.column_property` is used to
+    map read-only SQL expressions to a mapped class.
+
+    When using Imperative mappings, :func:`_orm.column_property` also
+    takes on the role of mapping table columns with additional features.
+    When using fully Declarative mappings, the :func:`_orm.mapped_column`
+    construct should be used for this purpose.
+
+    With Declarative Dataclass mappings, :func:`_orm.column_property`
+    is considered to be **read only**, and will not be included in the
+    Dataclass ``__init__()`` constructor.
+
+    The :func:`_orm.column_property` function returns an instance of
+    :class:`.ColumnProperty`.
+
+    .. seealso::
+
+        :ref:`mapper_column_property_sql_expressions` - general use of
+        :func:`_orm.column_property` to map SQL expressions
+
+        :ref:`orm_imperative_table_column_options` - usage of
+        :func:`_orm.column_property` with Imperative Table mappings to apply
+        additional options to a plain :class:`_schema.Column` object
+
+    :param \*cols:
+        list of Column objects to be mapped.
+
+    :param active_history=False:
+
+        Used only for Imperative Table mappings, or legacy-style Declarative
+        mappings (i.e. which have not been upgraded to
+        :func:`_orm.mapped_column`), for column-based attributes that are
+        expected to be writeable; use :func:`_orm.mapped_column` with
+        :paramref:`_orm.mapped_column.active_history` for Declarative mappings.
+        See that parameter for functional details.
+
+    :param comparator_factory: a class which extends
+        :class:`.ColumnProperty.Comparator` which provides custom SQL
+        clause generation for comparison operations.
+
+    :param group:
+        a group name for this property when marked as deferred.
+
+    :param deferred:
+        when True, the column property is "deferred", meaning that
+        it does not load immediately, and is instead loaded when the
+        attribute is first accessed on an instance.  See also
+        :func:`~sqlalchemy.orm.deferred`.
+
+    :param doc:
+        optional string that will be applied as the doc on the
+        class-bound descriptor.
+
+    :param expire_on_flush=True:
+        Disable expiry on flush.   A column_property() which refers
+        to a SQL expression (and not a single table-bound column)
+        is considered to be a "read only" property; populating it
+        has no effect on the state of data, and it can only return
+        database state.   For this reason a column_property()'s value
+        is expired whenever the parent object is involved in a
+        flush, that is, has any kind of "dirty" state within a flush.
+        Setting this parameter to ``False`` will have the effect of
+        leaving any existing value present after the flush proceeds.
+        Note that the :class:`.Session` with default expiration
+        settings still expires
+        all attributes after a :meth:`.Session.commit` call, however.
+
+    :param info: Optional data dictionary which will be populated into the
+        :attr:`.MapperProperty.info` attribute of this object.
+
+    :param raiseload: if True, indicates the column should raise an error
+        when undeferred, rather than loading the value.  This can be
+        altered at query time by using the :func:`.deferred` option with
+        raiseload=False.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :ref:`orm_queryguide_deferred_raiseload`
+
+    :param init: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__init__()``
+     method as generated by the dataclass process.
+    :param repr: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__repr__()``
+     method as generated by the dataclass process.
+    :param default_factory: Specific to
+     :ref:`orm_declarative_native_dataclasses`,
+     specifies a default-value generation function that will take place
+     as part of the ``__init__()``
+     method as generated by the dataclass process.
+
+     .. seealso::
+
+        :ref:`defaults_default_factory_insert_default`
+
+        :paramref:`_orm.mapped_column.default`
+
+        :paramref:`_orm.mapped_column.insert_default`
+
+    :param compare: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be included in comparison operations when generating the
+     ``__eq__()`` and ``__ne__()`` methods for the mapped class.
+
+     .. versionadded:: 2.0.0b4
+
+    :param kw_only: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be marked as keyword-only when generating the ``__init__()``.
+
+    :param hash: Specific to
+     :ref:`orm_declarative_native_dataclasses`, controls if this field
+     is included when generating the ``__hash__()`` method for the mapped
+     class.
+
+     .. versionadded:: 2.0.36
+
+    """
+    return MappedSQLExpression(
+        column,
+        *additional_columns,
+        attribute_options=_AttributeOptions(
+            False if init is _NoArg.NO_ARG else init,
+            repr,
+            default,
+            default_factory,
+            compare,
+            kw_only,
+            hash,
+        ),
+        group=group,
+        deferred=deferred,
+        raiseload=raiseload,
+        comparator_factory=comparator_factory,
+        active_history=active_history,
+        expire_on_flush=expire_on_flush,
+        info=info,
+        doc=doc,
+        _assume_readonly_dc_attributes=True,
+    )
+
+
+@overload
+def composite(
+    _class_or_attr: _CompositeAttrType[Any],
+    *attrs: _CompositeAttrType[Any],
+    group: Optional[str] = None,
+    deferred: bool = False,
+    raiseload: bool = False,
+    comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None,
+    active_history: bool = False,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+    **__kw: Any,
+) -> Composite[Any]: ...
+
+
+@overload
+def composite(
+    _class_or_attr: Type[_CC],
+    *attrs: _CompositeAttrType[Any],
+    group: Optional[str] = None,
+    deferred: bool = False,
+    raiseload: bool = False,
+    comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None,
+    active_history: bool = False,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+    **__kw: Any,
+) -> Composite[_CC]: ...
+
+
+@overload
+def composite(
+    _class_or_attr: Callable[..., _CC],
+    *attrs: _CompositeAttrType[Any],
+    group: Optional[str] = None,
+    deferred: bool = False,
+    raiseload: bool = False,
+    comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None,
+    active_history: bool = False,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+    **__kw: Any,
+) -> Composite[_CC]: ...
+
+
+def composite(
+    _class_or_attr: Union[
+        None, Type[_CC], Callable[..., _CC], _CompositeAttrType[Any]
+    ] = None,
+    *attrs: _CompositeAttrType[Any],
+    group: Optional[str] = None,
+    deferred: bool = False,
+    raiseload: bool = False,
+    comparator_factory: Optional[Type[Composite.Comparator[_T]]] = None,
+    active_history: bool = False,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+    **__kw: Any,
+) -> Composite[Any]:
+    r"""Return a composite column-based property for use with a Mapper.
+
+    See the mapping documentation section :ref:`mapper_composite` for a
+    full usage example.
+
+    The :class:`.MapperProperty` returned by :func:`.composite`
+    is the :class:`.Composite`.
+
+    :param class\_:
+      The "composite type" class, or any classmethod or callable which
+      will produce a new instance of the composite object given the
+      column values in order.
+
+    :param \*attrs:
+      List of elements to be mapped, which may include:
+
+      * :class:`_schema.Column` objects
+      * :func:`_orm.mapped_column` constructs
+      * string names of other attributes on the mapped class, which may be
+        any other SQL or object-mapped attribute.  This can for
+        example allow a composite that refers to a many-to-one relationship
+
+    :param active_history=False:
+      When ``True``, indicates that the "previous" value for a
+      scalar attribute should be loaded when replaced, if not
+      already loaded.  See the same flag on :func:`.column_property`.
+
+    :param group:
+      A group name for this property when marked as deferred.
+
+    :param deferred:
+      When True, the column property is "deferred", meaning that it does
+      not load immediately, and is instead loaded when the attribute is
+      first accessed on an instance.  See also
+      :func:`~sqlalchemy.orm.deferred`.
+
+    :param comparator_factory:  a class which extends
+      :class:`.Composite.Comparator` which provides custom SQL
+      clause generation for comparison operations.
+
+    :param doc:
+      optional string that will be applied as the doc on the
+      class-bound descriptor.
+
+    :param info: Optional data dictionary which will be populated into the
+        :attr:`.MapperProperty.info` attribute of this object.
+
+    :param init: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__init__()``
+     method as generated by the dataclass process.
+    :param repr: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__repr__()``
+     method as generated by the dataclass process.
+    :param default_factory: Specific to
+     :ref:`orm_declarative_native_dataclasses`,
+     specifies a default-value generation function that will take place
+     as part of the ``__init__()``
+     method as generated by the dataclass process.
+
+    :param compare: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be included in comparison operations when generating the
+     ``__eq__()`` and ``__ne__()`` methods for the mapped class.
+
+     .. versionadded:: 2.0.0b4
+
+    :param kw_only: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be marked as keyword-only when generating the ``__init__()``.
+
+    :param hash: Specific to
+     :ref:`orm_declarative_native_dataclasses`, controls if this field
+     is included when generating the ``__hash__()`` method for the mapped
+     class.
+
+     .. versionadded:: 2.0.36
+    """
+    if __kw:
+        raise _no_kw()
+
+    return Composite(
+        _class_or_attr,
+        *attrs,
+        attribute_options=_AttributeOptions(
+            init, repr, default, default_factory, compare, kw_only, hash
+        ),
+        group=group,
+        deferred=deferred,
+        raiseload=raiseload,
+        comparator_factory=comparator_factory,
+        active_history=active_history,
+        info=info,
+        doc=doc,
+    )
+
+
+def with_loader_criteria(
+    entity_or_base: _EntityType[Any],
+    where_criteria: Union[
+        _ColumnExpressionArgument[bool],
+        Callable[[Any], _ColumnExpressionArgument[bool]],
+    ],
+    loader_only: bool = False,
+    include_aliases: bool = False,
+    propagate_to_loaders: bool = True,
+    track_closure_variables: bool = True,
+) -> LoaderCriteriaOption:
+    """Add additional WHERE criteria to the load for all occurrences of
+    a particular entity.
+
+    .. versionadded:: 1.4
+
+    The :func:`_orm.with_loader_criteria` option is intended to add
+    limiting criteria to a particular kind of entity in a query,
+    **globally**, meaning it will apply to the entity as it appears
+    in the SELECT query as well as within any subqueries, join
+    conditions, and relationship loads, including both eager and lazy
+    loaders, without the need for it to be specified in any particular
+    part of the query.    The rendering logic uses the same system used by
+    single table inheritance to ensure a certain discriminator is applied
+    to a table.
+
+    E.g., using :term:`2.0-style` queries, we can limit the way the
+    ``User.addresses`` collection is loaded, regardless of the kind
+    of loading used::
+
+        from sqlalchemy.orm import with_loader_criteria
+
+        stmt = select(User).options(
+            selectinload(User.addresses),
+            with_loader_criteria(Address, Address.email_address != "foo"),
+        )
+
+    Above, the "selectinload" for ``User.addresses`` will apply the
+    given filtering criteria to the WHERE clause.
+
+    Another example, where the filtering will be applied to the
+    ON clause of the join, in this example using :term:`1.x style`
+    queries::
+
+        q = (
+            session.query(User)
+            .outerjoin(User.addresses)
+            .options(with_loader_criteria(Address, Address.email_address != "foo"))
+        )
+
+    The primary purpose of :func:`_orm.with_loader_criteria` is to use
+    it in the :meth:`_orm.SessionEvents.do_orm_execute` event handler
+    to ensure that all occurrences of a particular entity are filtered
+    in a certain way, such as filtering for access control roles.    It
+    also can be used to apply criteria to relationship loads.  In the
+    example below, we can apply a certain set of rules to all queries
+    emitted by a particular :class:`_orm.Session`::
+
+        session = Session(bind=engine)
+
+
+        @event.listens_for("do_orm_execute", session)
+        def _add_filtering_criteria(execute_state):
+
+            if (
+                execute_state.is_select
+                and not execute_state.is_column_load
+                and not execute_state.is_relationship_load
+            ):
+                execute_state.statement = execute_state.statement.options(
+                    with_loader_criteria(
+                        SecurityRole,
+                        lambda cls: cls.role.in_(["some_role"]),
+                        include_aliases=True,
+                    )
+                )
+
+    In the above example, the :meth:`_orm.SessionEvents.do_orm_execute`
+    event will intercept all queries emitted using the
+    :class:`_orm.Session`. For those queries which are SELECT statements
+    and are not attribute or relationship loads a custom
+    :func:`_orm.with_loader_criteria` option is added to the query.    The
+    :func:`_orm.with_loader_criteria` option will be used in the given
+    statement and will also be automatically propagated to all relationship
+    loads that descend from this query.
+
+    The criteria argument given is a ``lambda`` that accepts a ``cls``
+    argument.  The given class will expand to include all mapped subclass
+    and need not itself be a mapped class.
+
+    .. tip::
+
+       When using :func:`_orm.with_loader_criteria` option in
+       conjunction with the :func:`_orm.contains_eager` loader option,
+       it's important to note that :func:`_orm.with_loader_criteria` only
+       affects the part of the query that determines what SQL is rendered
+       in terms of the WHERE and FROM clauses. The
+       :func:`_orm.contains_eager` option does not affect the rendering of
+       the SELECT statement outside of the columns clause, so does not have
+       any interaction with the :func:`_orm.with_loader_criteria` option.
+       However, the way things "work" is that :func:`_orm.contains_eager`
+       is meant to be used with a query that is already selecting from the
+       additional entities in some way, where
+       :func:`_orm.with_loader_criteria` can apply it's additional
+       criteria.
+
+       In the example below, assuming a mapping relationship as
+       ``A -> A.bs -> B``, the given :func:`_orm.with_loader_criteria`
+       option will affect the way in which the JOIN is rendered::
+
+            stmt = (
+                select(A)
+                .join(A.bs)
+                .options(contains_eager(A.bs), with_loader_criteria(B, B.flag == 1))
+            )
+
+       Above, the given :func:`_orm.with_loader_criteria` option will
+       affect the ON clause of the JOIN that is specified by
+       ``.join(A.bs)``, so is applied as expected. The
+       :func:`_orm.contains_eager` option has the effect that columns from
+       ``B`` are added to the columns clause:
+
+       .. sourcecode:: sql
+
+            SELECT
+                b.id, b.a_id, b.data, b.flag,
+                a.id AS id_1,
+                a.data AS data_1
+            FROM a JOIN b ON a.id = b.a_id AND b.flag = :flag_1
+
+
+       The use of the :func:`_orm.contains_eager` option within the above
+       statement has no effect on the behavior of the
+       :func:`_orm.with_loader_criteria` option. If the
+       :func:`_orm.contains_eager` option were omitted, the SQL would be
+       the same as regards the FROM and WHERE clauses, where
+       :func:`_orm.with_loader_criteria` continues to add its criteria to
+       the ON clause of the JOIN. The addition of
+       :func:`_orm.contains_eager` only affects the columns clause, in that
+       additional columns against ``b`` are added which are then consumed
+       by the ORM to produce ``B`` instances.
+
+    .. warning:: The use of a lambda inside of the call to
+      :func:`_orm.with_loader_criteria` is only invoked **once per unique
+      class**. Custom functions should not be invoked within this lambda.
+      See :ref:`engine_lambda_caching` for an overview of the "lambda SQL"
+      feature, which is for advanced use only.
+
+    :param entity_or_base: a mapped class, or a class that is a super
+     class of a particular set of mapped classes, to which the rule
+     will apply.
+
+    :param where_criteria: a Core SQL expression that applies limiting
+     criteria.   This may also be a "lambda:" or Python function that
+     accepts a target class as an argument, when the given class is
+     a base with many different mapped subclasses.
+
+     .. note:: To support pickling, use a module-level Python function to
+        produce the SQL expression instead of a lambda or a fixed SQL
+        expression, which tend to not be picklable.
+
+    :param include_aliases: if True, apply the rule to :func:`_orm.aliased`
+     constructs as well.
+
+    :param propagate_to_loaders: defaults to True, apply to relationship
+     loaders such as lazy loaders.   This indicates that the
+     option object itself including SQL expression is carried along with
+     each loaded instance.  Set to ``False`` to prevent the object from
+     being assigned to individual instances.
+
+
+     .. seealso::
+
+        :ref:`examples_session_orm_events` - includes examples of using
+        :func:`_orm.with_loader_criteria`.
+
+        :ref:`do_orm_execute_global_criteria` - basic example on how to
+        combine :func:`_orm.with_loader_criteria` with the
+        :meth:`_orm.SessionEvents.do_orm_execute` event.
+
+    :param track_closure_variables: when False, closure variables inside
+     of a lambda expression will not be used as part of
+     any cache key.    This allows more complex expressions to be used
+     inside of a lambda expression but requires that the lambda ensures
+     it returns the identical SQL every time given a particular class.
+
+     .. versionadded:: 1.4.0b2
+
+    """  # noqa: E501
+    return LoaderCriteriaOption(
+        entity_or_base,
+        where_criteria,
+        loader_only,
+        include_aliases,
+        propagate_to_loaders,
+        track_closure_variables,
+    )
+
+
+def relationship(
+    argument: Optional[_RelationshipArgumentType[Any]] = None,
+    secondary: Optional[_RelationshipSecondaryArgument] = None,
+    *,
+    uselist: Optional[bool] = None,
+    collection_class: Optional[
+        Union[Type[Collection[Any]], Callable[[], Collection[Any]]]
+    ] = None,
+    primaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
+    secondaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
+    back_populates: Optional[str] = None,
+    order_by: _ORMOrderByArgument = False,
+    backref: Optional[ORMBackrefArgument] = None,
+    overlaps: Optional[str] = None,
+    post_update: bool = False,
+    cascade: str = "save-update, merge",
+    viewonly: bool = False,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Union[_NoArg, _T] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    lazy: _LazyLoadArgumentType = "select",
+    passive_deletes: Union[Literal["all"], bool] = False,
+    passive_updates: bool = True,
+    active_history: bool = False,
+    enable_typechecks: bool = True,
+    foreign_keys: Optional[_ORMColCollectionArgument] = None,
+    remote_side: Optional[_ORMColCollectionArgument] = None,
+    join_depth: Optional[int] = None,
+    comparator_factory: Optional[
+        Type[RelationshipProperty.Comparator[Any]]
+    ] = None,
+    single_parent: bool = False,
+    innerjoin: bool = False,
+    distinct_target_key: Optional[bool] = None,
+    load_on_pending: bool = False,
+    query_class: Optional[Type[Query[Any]]] = None,
+    info: Optional[_InfoType] = None,
+    omit_join: Literal[None, False] = None,
+    sync_backref: Optional[bool] = None,
+    **kw: Any,
+) -> _RelationshipDeclared[Any]:
+    """Provide a relationship between two mapped classes.
+
+    This corresponds to a parent-child or associative table relationship.
+    The constructed class is an instance of :class:`.Relationship`.
+
+    .. seealso::
+
+        :ref:`tutorial_orm_related_objects` - tutorial introduction
+        to :func:`_orm.relationship` in the :ref:`unified_tutorial`
+
+        :ref:`relationship_config_toplevel` - narrative documentation
+
+    :param argument:
+      This parameter refers to the class that is to be related.   It
+      accepts several forms, including a direct reference to the target
+      class itself, the :class:`_orm.Mapper` instance for the target class,
+      a Python callable / lambda that will return a reference to the
+      class or :class:`_orm.Mapper` when called, and finally a string
+      name for the class, which will be resolved from the
+      :class:`_orm.registry` in use in order to locate the class, e.g.::
+
+            class SomeClass(Base):
+                # ...
+
+                related = relationship("RelatedClass")
+
+      The :paramref:`_orm.relationship.argument` may also be omitted from the
+      :func:`_orm.relationship` construct entirely, and instead placed inside
+      a :class:`_orm.Mapped` annotation on the left side, which should
+      include a Python collection type if the relationship is expected
+      to be a collection, such as::
+
+            class SomeClass(Base):
+                # ...
+
+                related_items: Mapped[List["RelatedItem"]] = relationship()
+
+      Or for a many-to-one or one-to-one relationship::
+
+            class SomeClass(Base):
+                # ...
+
+                related_item: Mapped["RelatedItem"] = relationship()
+
+      .. seealso::
+
+        :ref:`orm_declarative_properties` - further detail
+        on relationship configuration when using Declarative.
+
+    :param secondary:
+      For a many-to-many relationship, specifies the intermediary
+      table, and is typically an instance of :class:`_schema.Table`.
+      In less common circumstances, the argument may also be specified
+      as an :class:`_expression.Alias` construct, or even a
+      :class:`_expression.Join` construct.
+
+      :paramref:`_orm.relationship.secondary` may
+      also be passed as a callable function which is evaluated at
+      mapper initialization time.  When using Declarative, it may also
+      be a string argument noting the name of a :class:`_schema.Table`
+      that is
+      present in the :class:`_schema.MetaData`
+      collection associated with the
+      parent-mapped :class:`_schema.Table`.
+
+      .. warning:: When passed as a Python-evaluable string, the
+         argument is interpreted using Python's ``eval()`` function.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+         See :ref:`declarative_relationship_eval` for details on
+         declarative evaluation of :func:`_orm.relationship` arguments.
+
+      The :paramref:`_orm.relationship.secondary` keyword argument is
+      typically applied in the case where the intermediary
+      :class:`_schema.Table`
+      is not otherwise expressed in any direct class mapping. If the
+      "secondary" table is also explicitly mapped elsewhere (e.g. as in
+      :ref:`association_pattern`), one should consider applying the
+      :paramref:`_orm.relationship.viewonly` flag so that this
+      :func:`_orm.relationship`
+      is not used for persistence operations which
+      may conflict with those of the association object pattern.
+
+      .. seealso::
+
+          :ref:`relationships_many_to_many` - Reference example of "many
+          to many".
+
+          :ref:`self_referential_many_to_many` - Specifics on using
+          many-to-many in a self-referential case.
+
+          :ref:`declarative_many_to_many` - Additional options when using
+          Declarative.
+
+          :ref:`association_pattern` - an alternative to
+          :paramref:`_orm.relationship.secondary`
+          when composing association
+          table relationships, allowing additional attributes to be
+          specified on the association table.
+
+          :ref:`composite_secondary_join` - a lesser-used pattern which
+          in some cases can enable complex :func:`_orm.relationship` SQL
+          conditions to be used.
+
+    :param active_history=False:
+      When ``True``, indicates that the "previous" value for a
+      many-to-one reference should be loaded when replaced, if
+      not already loaded. Normally, history tracking logic for
+      simple many-to-ones only needs to be aware of the "new"
+      value in order to perform a flush. This flag is available
+      for applications that make use of
+      :func:`.attributes.get_history` which also need to know
+      the "previous" value of the attribute.
+
+    :param backref:
+      A reference to a string relationship name, or a :func:`_orm.backref`
+      construct, which will be used to automatically generate a new
+      :func:`_orm.relationship` on the related class, which then refers to this
+      one using a bi-directional :paramref:`_orm.relationship.back_populates`
+      configuration.
+
+      In modern Python, explicit use of :func:`_orm.relationship`
+      with :paramref:`_orm.relationship.back_populates` should be preferred,
+      as it is more robust in terms of mapper configuration as well as
+      more conceptually straightforward.  It also integrates with
+      new :pep:`484` typing features introduced in SQLAlchemy 2.0 which
+      is not possible with dynamically generated attributes.
+
+      .. seealso::
+
+        :ref:`relationships_backref` - notes on using
+        :paramref:`_orm.relationship.backref`
+
+        :ref:`tutorial_orm_related_objects` - in the :ref:`unified_tutorial`,
+        presents an overview of bi-directional relationship configuration
+        and behaviors using :paramref:`_orm.relationship.back_populates`
+
+        :func:`.backref` - allows control over :func:`_orm.relationship`
+        configuration when using :paramref:`_orm.relationship.backref`.
+
+
+    :param back_populates:
+      Indicates the name of a :func:`_orm.relationship` on the related
+      class that will be synchronized with this one.   It is usually
+      expected that the :func:`_orm.relationship` on the related class
+      also refer to this one.  This allows objects on both sides of
+      each :func:`_orm.relationship` to synchronize in-Python state
+      changes and also provides directives to the :term:`unit of work`
+      flush process how changes along these relationships should
+      be persisted.
+
+      .. seealso::
+
+        :ref:`tutorial_orm_related_objects` - in the :ref:`unified_tutorial`,
+        presents an overview of bi-directional relationship configuration
+        and behaviors.
+
+        :ref:`relationship_patterns` - includes many examples of
+        :paramref:`_orm.relationship.back_populates`.
+
+        :paramref:`_orm.relationship.backref` - legacy form which allows
+        more succinct configuration, but does not support explicit typing
+
+    :param overlaps:
+       A string name or comma-delimited set of names of other relationships
+       on either this mapper, a descendant mapper, or a target mapper with
+       which this relationship may write to the same foreign keys upon
+       persistence.   The only effect this has is to eliminate the
+       warning that this relationship will conflict with another upon
+       persistence.   This is used for such relationships that are truly
+       capable of conflicting with each other on write, but the application
+       will ensure that no such conflicts occur.
+
+       .. versionadded:: 1.4
+
+       .. seealso::
+
+            :ref:`error_qzyx` - usage example
+
+    :param cascade:
+      A comma-separated list of cascade rules which determines how
+      Session operations should be "cascaded" from parent to child.
+      This defaults to ``False``, which means the default cascade
+      should be used - this default cascade is ``"save-update, merge"``.
+
+      The available cascades are ``save-update``, ``merge``,
+      ``expunge``, ``delete``, ``delete-orphan``, and ``refresh-expire``.
+      An additional option, ``all`` indicates shorthand for
+      ``"save-update, merge, refresh-expire,
+      expunge, delete"``, and is often used as in ``"all, delete-orphan"``
+      to indicate that related objects should follow along with the
+      parent object in all cases, and be deleted when de-associated.
+
+      .. seealso::
+
+        :ref:`unitofwork_cascades` - Full detail on each of the available
+        cascade options.
+
+    :param cascade_backrefs=False:
+      Legacy; this flag is always False.
+
+      .. versionchanged:: 2.0 "cascade_backrefs" functionality has been
+         removed.
+
+    :param collection_class:
+      A class or callable that returns a new list-holding object. will
+      be used in place of a plain list for storing elements.
+
+      .. seealso::
+
+        :ref:`custom_collections` - Introductory documentation and
+        examples.
+
+    :param comparator_factory:
+      A class which extends :class:`.Relationship.Comparator`
+      which provides custom SQL clause generation for comparison
+      operations.
+
+      .. seealso::
+
+        :class:`.PropComparator` - some detail on redefining comparators
+        at this level.
+
+        :ref:`custom_comparators` - Brief intro to this feature.
+
+
+    :param distinct_target_key=None:
+      Indicate if a "subquery" eager load should apply the DISTINCT
+      keyword to the innermost SELECT statement.  When left as ``None``,
+      the DISTINCT keyword will be applied in those cases when the target
+      columns do not comprise the full primary key of the target table.
+      When set to ``True``, the DISTINCT keyword is applied to the
+      innermost SELECT unconditionally.
+
+      It may be desirable to set this flag to False when the DISTINCT is
+      reducing performance of the innermost subquery beyond that of what
+      duplicate innermost rows may be causing.
+
+      .. seealso::
+
+        :ref:`loading_toplevel` - includes an introduction to subquery
+        eager loading.
+
+    :param doc:
+      Docstring which will be applied to the resulting descriptor.
+
+    :param foreign_keys:
+
+      A list of columns which are to be used as "foreign key"
+      columns, or columns which refer to the value in a remote
+      column, within the context of this :func:`_orm.relationship`
+      object's :paramref:`_orm.relationship.primaryjoin` condition.
+      That is, if the :paramref:`_orm.relationship.primaryjoin`
+      condition of this :func:`_orm.relationship` is ``a.id ==
+      b.a_id``, and the values in ``b.a_id`` are required to be
+      present in ``a.id``, then the "foreign key" column of this
+      :func:`_orm.relationship` is ``b.a_id``.
+
+      In normal cases, the :paramref:`_orm.relationship.foreign_keys`
+      parameter is **not required.** :func:`_orm.relationship` will
+      automatically determine which columns in the
+      :paramref:`_orm.relationship.primaryjoin` condition are to be
+      considered "foreign key" columns based on those
+      :class:`_schema.Column` objects that specify
+      :class:`_schema.ForeignKey`,
+      or are otherwise listed as referencing columns in a
+      :class:`_schema.ForeignKeyConstraint` construct.
+      :paramref:`_orm.relationship.foreign_keys` is only needed when:
+
+        1. There is more than one way to construct a join from the local
+           table to the remote table, as there are multiple foreign key
+           references present.  Setting ``foreign_keys`` will limit the
+           :func:`_orm.relationship`
+           to consider just those columns specified
+           here as "foreign".
+
+        2. The :class:`_schema.Table` being mapped does not actually have
+           :class:`_schema.ForeignKey` or
+           :class:`_schema.ForeignKeyConstraint`
+           constructs present, often because the table
+           was reflected from a database that does not support foreign key
+           reflection (MySQL MyISAM).
+
+        3. The :paramref:`_orm.relationship.primaryjoin`
+           argument is used to
+           construct a non-standard join condition, which makes use of
+           columns or expressions that do not normally refer to their
+           "parent" column, such as a join condition expressed by a
+           complex comparison using a SQL function.
+
+      The :func:`_orm.relationship` construct will raise informative
+      error messages that suggest the use of the
+      :paramref:`_orm.relationship.foreign_keys` parameter when
+      presented with an ambiguous condition.   In typical cases,
+      if :func:`_orm.relationship` doesn't raise any exceptions, the
+      :paramref:`_orm.relationship.foreign_keys` parameter is usually
+      not needed.
+
+      :paramref:`_orm.relationship.foreign_keys` may also be passed as a
+      callable function which is evaluated at mapper initialization time,
+      and may be passed as a Python-evaluable string when using
+      Declarative.
+
+      .. warning:: When passed as a Python-evaluable string, the
+         argument is interpreted using Python's ``eval()`` function.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+         See :ref:`declarative_relationship_eval` for details on
+         declarative evaluation of :func:`_orm.relationship` arguments.
+
+      .. seealso::
+
+        :ref:`relationship_foreign_keys`
+
+        :ref:`relationship_custom_foreign`
+
+        :func:`.foreign` - allows direct annotation of the "foreign"
+        columns within a :paramref:`_orm.relationship.primaryjoin`
+        condition.
+
+    :param info: Optional data dictionary which will be populated into the
+        :attr:`.MapperProperty.info` attribute of this object.
+
+    :param innerjoin=False:
+      When ``True``, joined eager loads will use an inner join to join
+      against related tables instead of an outer join.  The purpose
+      of this option is generally one of performance, as inner joins
+      generally perform better than outer joins.
+
+      This flag can be set to ``True`` when the relationship references an
+      object via many-to-one using local foreign keys that are not
+      nullable, or when the reference is one-to-one or a collection that
+      is guaranteed to have one or at least one entry.
+
+      The option supports the same "nested" and "unnested" options as
+      that of :paramref:`_orm.joinedload.innerjoin`.  See that flag
+      for details on nested / unnested behaviors.
+
+      .. seealso::
+
+        :paramref:`_orm.joinedload.innerjoin` - the option as specified by
+        loader option, including detail on nesting behavior.
+
+        :ref:`what_kind_of_loading` - Discussion of some details of
+        various loader options.
+
+
+    :param join_depth:
+      When non-``None``, an integer value indicating how many levels
+      deep "eager" loaders should join on a self-referring or cyclical
+      relationship.  The number counts how many times the same Mapper
+      shall be present in the loading condition along a particular join
+      branch.  When left at its default of ``None``, eager loaders
+      will stop chaining when they encounter a the same target mapper
+      which is already higher up in the chain.  This option applies
+      both to joined- and subquery- eager loaders.
+
+      .. seealso::
+
+        :ref:`self_referential_eager_loading` - Introductory documentation
+        and examples.
+
+    :param lazy='select': specifies
+      How the related items should be loaded.  Default value is
+      ``select``.  Values include:
+
+      * ``select`` - items should be loaded lazily when the property is
+        first accessed, using a separate SELECT statement, or identity map
+        fetch for simple many-to-one references.
+
+      * ``immediate`` - items should be loaded as the parents are loaded,
+        using a separate SELECT statement, or identity map fetch for
+        simple many-to-one references.
+
+      * ``joined`` - items should be loaded "eagerly" in the same query as
+        that of the parent, using a JOIN or LEFT OUTER JOIN.  Whether
+        the join is "outer" or not is determined by the
+        :paramref:`_orm.relationship.innerjoin` parameter.
+
+      * ``subquery`` - items should be loaded "eagerly" as the parents are
+        loaded, using one additional SQL statement, which issues a JOIN to
+        a subquery of the original statement, for each collection
+        requested.
+
+      * ``selectin`` - items should be loaded "eagerly" as the parents
+        are loaded, using one or more additional SQL statements, which
+        issues a JOIN to the immediate parent object, specifying primary
+        key identifiers using an IN clause.
+
+      * ``noload`` - no loading should occur at any time.  The related
+        collection will remain empty.   The ``noload`` strategy is not
+        recommended for general use.  For a general use "never load"
+        approach, see :ref:`write_only_relationship`
+
+      * ``raise`` - lazy loading is disallowed; accessing
+        the attribute, if its value were not already loaded via eager
+        loading, will raise an :exc:`~sqlalchemy.exc.InvalidRequestError`.
+        This strategy can be used when objects are to be detached from
+        their attached :class:`.Session` after they are loaded.
+
+      * ``raise_on_sql`` - lazy loading that emits SQL is disallowed;
+        accessing the attribute, if its value were not already loaded via
+        eager loading, will raise an
+        :exc:`~sqlalchemy.exc.InvalidRequestError`, **if the lazy load
+        needs to emit SQL**.  If the lazy load can pull the related value
+        from the identity map or determine that it should be None, the
+        value is loaded.  This strategy can be used when objects will
+        remain associated with the attached :class:`.Session`, however
+        additional SELECT statements should be blocked.
+
+      * ``write_only`` - the attribute will be configured with a special
+        "virtual collection" that may receive
+        :meth:`_orm.WriteOnlyCollection.add` and
+        :meth:`_orm.WriteOnlyCollection.remove` commands to add or remove
+        individual objects, but will not under any circumstances load or
+        iterate the full set of objects from the database directly. Instead,
+        methods such as :meth:`_orm.WriteOnlyCollection.select`,
+        :meth:`_orm.WriteOnlyCollection.insert`,
+        :meth:`_orm.WriteOnlyCollection.update` and
+        :meth:`_orm.WriteOnlyCollection.delete` are provided which generate SQL
+        constructs that may be used to load and modify rows in bulk. Used for
+        large collections that are never appropriate to load at once into
+        memory.
+
+        The ``write_only`` loader style is configured automatically when
+        the :class:`_orm.WriteOnlyMapped` annotation is provided on the
+        left hand side within a Declarative mapping.  See the section
+        :ref:`write_only_relationship` for examples.
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :ref:`write_only_relationship` - in the :ref:`queryguide_toplevel`
+
+      * ``dynamic`` - the attribute will return a pre-configured
+        :class:`_query.Query` object for all read
+        operations, onto which further filtering operations can be
+        applied before iterating the results.
+
+        The ``dynamic`` loader style is configured automatically when
+        the :class:`_orm.DynamicMapped` annotation is provided on the
+        left hand side within a Declarative mapping.  See the section
+        :ref:`dynamic_relationship` for examples.
+
+        .. legacy::  The "dynamic" lazy loader strategy is the legacy form of
+           what is now the "write_only" strategy described in the section
+           :ref:`write_only_relationship`.
+
+        .. seealso::
+
+            :ref:`dynamic_relationship` - in the :ref:`queryguide_toplevel`
+
+            :ref:`write_only_relationship` - more generally useful approach
+            for large collections that should not fully load into memory
+
+      * True - a synonym for 'select'
+
+      * False - a synonym for 'joined'
+
+      * None - a synonym for 'noload'
+
+      .. seealso::
+
+        :ref:`orm_queryguide_relationship_loaders` - Full documentation on
+        relationship loader configuration in the :ref:`queryguide_toplevel`.
+
+
+    :param load_on_pending=False:
+      Indicates loading behavior for transient or pending parent objects.
+
+      When set to ``True``, causes the lazy-loader to
+      issue a query for a parent object that is not persistent, meaning it
+      has never been flushed.  This may take effect for a pending object
+      when autoflush is disabled, or for a transient object that has been
+      "attached" to a :class:`.Session` but is not part of its pending
+      collection.
+
+      The :paramref:`_orm.relationship.load_on_pending`
+      flag does not improve
+      behavior when the ORM is used normally - object references should be
+      constructed at the object level, not at the foreign key level, so
+      that they are present in an ordinary way before a flush proceeds.
+      This flag is not not intended for general use.
+
+      .. seealso::
+
+          :meth:`.Session.enable_relationship_loading` - this method
+          establishes "load on pending" behavior for the whole object, and
+          also allows loading on objects that remain transient or
+          detached.
+
+    :param order_by:
+      Indicates the ordering that should be applied when loading these
+      items.  :paramref:`_orm.relationship.order_by`
+      is expected to refer to
+      one of the :class:`_schema.Column`
+      objects to which the target class is
+      mapped, or the attribute itself bound to the target class which
+      refers to the column.
+
+      :paramref:`_orm.relationship.order_by`
+      may also be passed as a callable
+      function which is evaluated at mapper initialization time, and may
+      be passed as a Python-evaluable string when using Declarative.
+
+      .. warning:: When passed as a Python-evaluable string, the
+         argument is interpreted using Python's ``eval()`` function.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+         See :ref:`declarative_relationship_eval` for details on
+         declarative evaluation of :func:`_orm.relationship` arguments.
+
+    :param passive_deletes=False:
+       Indicates loading behavior during delete operations.
+
+       A value of True indicates that unloaded child items should not
+       be loaded during a delete operation on the parent.  Normally,
+       when a parent item is deleted, all child items are loaded so
+       that they can either be marked as deleted, or have their
+       foreign key to the parent set to NULL.  Marking this flag as
+       True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
+       place which will handle updating/deleting child rows on the
+       database side.
+
+       Additionally, setting the flag to the string value 'all' will
+       disable the "nulling out" of the child foreign keys, when the parent
+       object is deleted and there is no delete or delete-orphan cascade
+       enabled.  This is typically used when a triggering or error raise
+       scenario is in place on the database side.  Note that the foreign
+       key attributes on in-session child objects will not be changed after
+       a flush occurs so this is a very special use-case setting.
+       Additionally, the "nulling out" will still occur if the child
+       object is de-associated with the parent.
+
+       .. seealso::
+
+            :ref:`passive_deletes` - Introductory documentation
+            and examples.
+
+    :param passive_updates=True:
+      Indicates the persistence behavior to take when a referenced
+      primary key value changes in place, indicating that the referencing
+      foreign key columns will also need their value changed.
+
+      When True, it is assumed that ``ON UPDATE CASCADE`` is configured on
+      the foreign key in the database, and that the database will
+      handle propagation of an UPDATE from a source column to
+      dependent rows.  When False, the SQLAlchemy
+      :func:`_orm.relationship`
+      construct will attempt to emit its own UPDATE statements to
+      modify related targets.  However note that SQLAlchemy **cannot**
+      emit an UPDATE for more than one level of cascade.  Also,
+      setting this flag to False is not compatible in the case where
+      the database is in fact enforcing referential integrity, unless
+      those constraints are explicitly "deferred", if the target backend
+      supports it.
+
+      It is highly advised that an application which is employing
+      mutable primary keys keeps ``passive_updates`` set to True,
+      and instead uses the referential integrity features of the database
+      itself in order to handle the change efficiently and fully.
+
+      .. seealso::
+
+          :ref:`passive_updates` - Introductory documentation and
+          examples.
+
+          :paramref:`.mapper.passive_updates` - a similar flag which
+          takes effect for joined-table inheritance mappings.
+
+    :param post_update:
+      This indicates that the relationship should be handled by a
+      second UPDATE statement after an INSERT or before a
+      DELETE. This flag is used to handle saving bi-directional
+      dependencies between two individual rows (i.e. each row
+      references the other), where it would otherwise be impossible to
+      INSERT or DELETE both rows fully since one row exists before the
+      other. Use this flag when a particular mapping arrangement will
+      incur two rows that are dependent on each other, such as a table
+      that has a one-to-many relationship to a set of child rows, and
+      also has a column that references a single child row within that
+      list (i.e. both tables contain a foreign key to each other). If
+      a flush operation returns an error that a "cyclical
+      dependency" was detected, this is a cue that you might want to
+      use :paramref:`_orm.relationship.post_update` to "break" the cycle.
+
+      .. seealso::
+
+          :ref:`post_update` - Introductory documentation and examples.
+
+    :param primaryjoin:
+      A SQL expression that will be used as the primary
+      join of the child object against the parent object, or in a
+      many-to-many relationship the join of the parent object to the
+      association table. By default, this value is computed based on the
+      foreign key relationships of the parent and child tables (or
+      association table).
+
+      :paramref:`_orm.relationship.primaryjoin` may also be passed as a
+      callable function which is evaluated at mapper initialization time,
+      and may be passed as a Python-evaluable string when using
+      Declarative.
+
+      .. warning:: When passed as a Python-evaluable string, the
+         argument is interpreted using Python's ``eval()`` function.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+         See :ref:`declarative_relationship_eval` for details on
+         declarative evaluation of :func:`_orm.relationship` arguments.
+
+      .. seealso::
+
+          :ref:`relationship_primaryjoin`
+
+    :param remote_side:
+      Used for self-referential relationships, indicates the column or
+      list of columns that form the "remote side" of the relationship.
+
+      :paramref:`_orm.relationship.remote_side` may also be passed as a
+      callable function which is evaluated at mapper initialization time,
+      and may be passed as a Python-evaluable string when using
+      Declarative.
+
+      .. warning:: When passed as a Python-evaluable string, the
+         argument is interpreted using Python's ``eval()`` function.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+         See :ref:`declarative_relationship_eval` for details on
+         declarative evaluation of :func:`_orm.relationship` arguments.
+
+      .. seealso::
+
+        :ref:`self_referential` - in-depth explanation of how
+        :paramref:`_orm.relationship.remote_side`
+        is used to configure self-referential relationships.
+
+        :func:`.remote` - an annotation function that accomplishes the
+        same purpose as :paramref:`_orm.relationship.remote_side`,
+        typically
+        when a custom :paramref:`_orm.relationship.primaryjoin` condition
+        is used.
+
+    :param query_class:
+      A :class:`_query.Query`
+      subclass that will be used internally by the
+      ``AppenderQuery`` returned by a "dynamic" relationship, that
+      is, a relationship that specifies ``lazy="dynamic"`` or was
+      otherwise constructed using the :func:`_orm.dynamic_loader`
+      function.
+
+      .. seealso::
+
+        :ref:`dynamic_relationship` - Introduction to "dynamic"
+        relationship loaders.
+
+    :param secondaryjoin:
+      A SQL expression that will be used as the join of
+      an association table to the child object. By default, this value is
+      computed based on the foreign key relationships of the association
+      and child tables.
+
+      :paramref:`_orm.relationship.secondaryjoin` may also be passed as a
+      callable function which is evaluated at mapper initialization time,
+      and may be passed as a Python-evaluable string when using
+      Declarative.
+
+      .. warning:: When passed as a Python-evaluable string, the
+         argument is interpreted using Python's ``eval()`` function.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+         See :ref:`declarative_relationship_eval` for details on
+         declarative evaluation of :func:`_orm.relationship` arguments.
+
+      .. seealso::
+
+          :ref:`relationship_primaryjoin`
+
+    :param single_parent:
+      When True, installs a validator which will prevent objects
+      from being associated with more than one parent at a time.
+      This is used for many-to-one or many-to-many relationships that
+      should be treated either as one-to-one or one-to-many.  Its usage
+      is optional, except for :func:`_orm.relationship` constructs which
+      are many-to-one or many-to-many and also
+      specify the ``delete-orphan`` cascade option.  The
+      :func:`_orm.relationship` construct itself will raise an error
+      instructing when this option is required.
+
+      .. seealso::
+
+        :ref:`unitofwork_cascades` - includes detail on when the
+        :paramref:`_orm.relationship.single_parent`
+        flag may be appropriate.
+
+    :param uselist:
+      A boolean that indicates if this property should be loaded as a
+      list or a scalar. In most cases, this value is determined
+      automatically by :func:`_orm.relationship` at mapper configuration
+      time.  When using explicit :class:`_orm.Mapped` annotations,
+      :paramref:`_orm.relationship.uselist` may be derived from the
+      whether or not the annotation within :class:`_orm.Mapped` contains
+      a collection class.
+      Otherwise, :paramref:`_orm.relationship.uselist` may be derived from
+      the type and direction
+      of the relationship - one to many forms a list, many to one
+      forms a scalar, many to many is a list. If a scalar is desired
+      where normally a list would be present, such as a bi-directional
+      one-to-one relationship, use an appropriate :class:`_orm.Mapped`
+      annotation or set :paramref:`_orm.relationship.uselist` to False.
+
+      The :paramref:`_orm.relationship.uselist`
+      flag is also available on an
+      existing :func:`_orm.relationship`
+      construct as a read-only attribute,
+      which can be used to determine if this :func:`_orm.relationship`
+      deals
+      with collections or scalar attributes::
+
+          >>> User.addresses.property.uselist
+          True
+
+      .. seealso::
+
+          :ref:`relationships_one_to_one` - Introduction to the "one to
+          one" relationship pattern, which is typically when an alternate
+          setting for :paramref:`_orm.relationship.uselist` is involved.
+
+    :param viewonly=False:
+      When set to ``True``, the relationship is used only for loading
+      objects, and not for any persistence operation.  A
+      :func:`_orm.relationship` which specifies
+      :paramref:`_orm.relationship.viewonly` can work
+      with a wider range of SQL operations within the
+      :paramref:`_orm.relationship.primaryjoin` condition, including
+      operations that feature the use of a variety of comparison operators
+      as well as SQL functions such as :func:`_expression.cast`.  The
+      :paramref:`_orm.relationship.viewonly`
+      flag is also of general use when defining any kind of
+      :func:`_orm.relationship` that doesn't represent
+      the full set of related objects, to prevent modifications of the
+      collection from resulting in persistence operations.
+
+      .. seealso::
+
+        :ref:`relationship_viewonly_notes` - more details on best practices
+        when using :paramref:`_orm.relationship.viewonly`.
+
+    :param sync_backref:
+      A boolean that enables the events used to synchronize the in-Python
+      attributes when this relationship is target of either
+      :paramref:`_orm.relationship.backref` or
+      :paramref:`_orm.relationship.back_populates`.
+
+      Defaults to ``None``, which indicates that an automatic value should
+      be selected based on the value of the
+      :paramref:`_orm.relationship.viewonly` flag.  When left at its
+      default, changes in state will be back-populated only if neither
+      sides of a relationship is viewonly.
+
+      .. versionadded:: 1.3.17
+
+      .. versionchanged:: 1.4 - A relationship that specifies
+         :paramref:`_orm.relationship.viewonly` automatically implies
+         that :paramref:`_orm.relationship.sync_backref` is ``False``.
+
+      .. seealso::
+
+        :paramref:`_orm.relationship.viewonly`
+
+    :param omit_join:
+      Allows manual control over the "selectin" automatic join
+      optimization.  Set to ``False`` to disable the "omit join" feature
+      added in SQLAlchemy 1.3; or leave as ``None`` to leave automatic
+      optimization in place.
+
+      .. note:: This flag may only be set to ``False``.   It is not
+         necessary to set it to ``True`` as the "omit_join" optimization is
+         automatically detected; if it is not detected, then the
+         optimization is not supported.
+
+         .. versionchanged:: 1.3.11  setting ``omit_join`` to True will now
+            emit a warning as this was not the intended use of this flag.
+
+      .. versionadded:: 1.3
+
+    :param init: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__init__()``
+     method as generated by the dataclass process.
+    :param repr: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies if the mapped attribute should be part of the ``__repr__()``
+     method as generated by the dataclass process.
+    :param default_factory: Specific to
+     :ref:`orm_declarative_native_dataclasses`,
+     specifies a default-value generation function that will take place
+     as part of the ``__init__()``
+     method as generated by the dataclass process.
+    :param compare: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be included in comparison operations when generating the
+     ``__eq__()`` and ``__ne__()`` methods for the mapped class.
+
+     .. versionadded:: 2.0.0b4
+
+    :param kw_only: Specific to
+     :ref:`orm_declarative_native_dataclasses`, indicates if this field
+     should be marked as keyword-only when generating the ``__init__()``.
+
+    :param hash: Specific to
+     :ref:`orm_declarative_native_dataclasses`, controls if this field
+     is included when generating the ``__hash__()`` method for the mapped
+     class.
+
+     .. versionadded:: 2.0.36
+    """
+
+    return _RelationshipDeclared(
+        argument,
+        secondary=secondary,
+        uselist=uselist,
+        collection_class=collection_class,
+        primaryjoin=primaryjoin,
+        secondaryjoin=secondaryjoin,
+        back_populates=back_populates,
+        order_by=order_by,
+        backref=backref,
+        overlaps=overlaps,
+        post_update=post_update,
+        cascade=cascade,
+        viewonly=viewonly,
+        attribute_options=_AttributeOptions(
+            init, repr, default, default_factory, compare, kw_only, hash
+        ),
+        lazy=lazy,
+        passive_deletes=passive_deletes,
+        passive_updates=passive_updates,
+        active_history=active_history,
+        enable_typechecks=enable_typechecks,
+        foreign_keys=foreign_keys,
+        remote_side=remote_side,
+        join_depth=join_depth,
+        comparator_factory=comparator_factory,
+        single_parent=single_parent,
+        innerjoin=innerjoin,
+        distinct_target_key=distinct_target_key,
+        load_on_pending=load_on_pending,
+        query_class=query_class,
+        info=info,
+        omit_join=omit_join,
+        sync_backref=sync_backref,
+        **kw,
+    )
+
+
+def synonym(
+    name: str,
+    *,
+    map_column: Optional[bool] = None,
+    descriptor: Optional[Any] = None,
+    comparator_factory: Optional[Type[PropComparator[_T]]] = None,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Union[_NoArg, _T] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+) -> Synonym[Any]:
+    """Denote an attribute name as a synonym to a mapped property,
+    in that the attribute will mirror the value and expression behavior
+    of another attribute.
+
+    e.g.::
+
+        class MyClass(Base):
+            __tablename__ = "my_table"
+
+            id = Column(Integer, primary_key=True)
+            job_status = Column(String(50))
+
+            status = synonym("job_status")
+
+    :param name: the name of the existing mapped property.  This
+      can refer to the string name ORM-mapped attribute
+      configured on the class, including column-bound attributes
+      and relationships.
+
+    :param descriptor: a Python :term:`descriptor` that will be used
+      as a getter (and potentially a setter) when this attribute is
+      accessed at the instance level.
+
+    :param map_column: **For classical mappings and mappings against
+      an existing Table object only**.  if ``True``, the :func:`.synonym`
+      construct will locate the :class:`_schema.Column`
+      object upon the mapped
+      table that would normally be associated with the attribute name of
+      this synonym, and produce a new :class:`.ColumnProperty` that instead
+      maps this :class:`_schema.Column`
+      to the alternate name given as the "name"
+      argument of the synonym; in this way, the usual step of redefining
+      the mapping of the :class:`_schema.Column`
+      to be under a different name is
+      unnecessary. This is usually intended to be used when a
+      :class:`_schema.Column`
+      is to be replaced with an attribute that also uses a
+      descriptor, that is, in conjunction with the
+      :paramref:`.synonym.descriptor` parameter::
+
+        my_table = Table(
+            "my_table",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("job_status", String(50)),
+        )
+
+
+        class MyClass:
+            @property
+            def _job_status_descriptor(self):
+                return "Status: %s" % self._job_status
+
+
+        mapper(
+            MyClass,
+            my_table,
+            properties={
+                "job_status": synonym(
+                    "_job_status",
+                    map_column=True,
+                    descriptor=MyClass._job_status_descriptor,
+                )
+            },
+        )
+
+      Above, the attribute named ``_job_status`` is automatically
+      mapped to the ``job_status`` column::
+
+        >>> j1 = MyClass()
+        >>> j1._job_status = "employed"
+        >>> j1.job_status
+        Status: employed
+
+      When using Declarative, in order to provide a descriptor in
+      conjunction with a synonym, use the
+      :func:`sqlalchemy.ext.declarative.synonym_for` helper.  However,
+      note that the :ref:`hybrid properties <mapper_hybrids>` feature
+      should usually be preferred, particularly when redefining attribute
+      behavior.
+
+    :param info: Optional data dictionary which will be populated into the
+        :attr:`.InspectionAttr.info` attribute of this object.
+
+    :param comparator_factory: A subclass of :class:`.PropComparator`
+      that will provide custom comparison behavior at the SQL expression
+      level.
+
+      .. note::
+
+        For the use case of providing an attribute which redefines both
+        Python-level and SQL-expression level behavior of an attribute,
+        please refer to the Hybrid attribute introduced at
+        :ref:`mapper_hybrids` for a more effective technique.
+
+    .. seealso::
+
+        :ref:`synonyms` - Overview of synonyms
+
+        :func:`.synonym_for` - a helper oriented towards Declarative
+
+        :ref:`mapper_hybrids` - The Hybrid Attribute extension provides an
+        updated approach to augmenting attribute behavior more flexibly
+        than can be achieved with synonyms.
+
+    """
+    return Synonym(
+        name,
+        map_column=map_column,
+        descriptor=descriptor,
+        comparator_factory=comparator_factory,
+        attribute_options=_AttributeOptions(
+            init, repr, default, default_factory, compare, kw_only, hash
+        ),
+        doc=doc,
+        info=info,
+    )
+
+
+def create_session(
+    bind: Optional[_SessionBind] = None, **kwargs: Any
+) -> Session:
+    r"""Create a new :class:`.Session`
+    with no automation enabled by default.
+
+    This function is used primarily for testing.   The usual
+    route to :class:`.Session` creation is via its constructor
+    or the :func:`.sessionmaker` function.
+
+    :param bind: optional, a single Connectable to use for all
+      database access in the created
+      :class:`~sqlalchemy.orm.session.Session`.
+
+    :param \*\*kwargs: optional, passed through to the
+      :class:`.Session` constructor.
+
+    :returns: an :class:`~sqlalchemy.orm.session.Session` instance
+
+    The defaults of create_session() are the opposite of that of
+    :func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are
+    False.
+
+    Usage::
+
+      >>> from sqlalchemy.orm import create_session
+      >>> session = create_session()
+
+    It is recommended to use :func:`sessionmaker` instead of
+    create_session().
+
+    """
+
+    kwargs.setdefault("autoflush", False)
+    kwargs.setdefault("expire_on_commit", False)
+    return Session(bind=bind, **kwargs)
+
+
+def _mapper_fn(*arg: Any, **kw: Any) -> NoReturn:
+    """Placeholder for the now-removed ``mapper()`` function.
+
+    Classical mappings should be performed using the
+    :meth:`_orm.registry.map_imperatively` method.
+
+    This symbol remains in SQLAlchemy 2.0 to suit the deprecated use case
+    of using the ``mapper()`` function as a target for ORM event listeners,
+    which failed to be marked as deprecated in the 1.4 series.
+
+    Global ORM mapper listeners should instead use the :class:`_orm.Mapper`
+    class as the target.
+
+    .. versionchanged:: 2.0  The ``mapper()`` function was removed; the
+       symbol remains temporarily as a placeholder for the event listening
+       use case.
+
+    """
+    raise InvalidRequestError(
+        "The 'sqlalchemy.orm.mapper()' function is removed as of "
+        "SQLAlchemy 2.0.  Use the "
+        "'sqlalchemy.orm.registry.map_imperatively()` "
+        "method of the ``sqlalchemy.orm.registry`` class to perform "
+        "classical mapping."
+    )
+
+
+def dynamic_loader(
+    argument: Optional[_RelationshipArgumentType[Any]] = None, **kw: Any
+) -> RelationshipProperty[Any]:
+    """Construct a dynamically-loading mapper property.
+
+    This is essentially the same as
+    using the ``lazy='dynamic'`` argument with :func:`relationship`::
+
+        dynamic_loader(SomeClass)
+
+        # is the same as
+
+        relationship(SomeClass, lazy="dynamic")
+
+    See the section :ref:`dynamic_relationship` for more details
+    on dynamic loading.
+
+    """
+    kw["lazy"] = "dynamic"
+    return relationship(argument, **kw)
+
+
+def backref(name: str, **kwargs: Any) -> ORMBackrefArgument:
+    """When using the :paramref:`_orm.relationship.backref` parameter,
+    provides specific parameters to be used when the new
+    :func:`_orm.relationship` is generated.
+
+    E.g.::
+
+        "items": relationship(SomeItem, backref=backref("parent", lazy="subquery"))
+
+    The :paramref:`_orm.relationship.backref` parameter is generally
+    considered to be legacy; for modern applications, using
+    explicit :func:`_orm.relationship` constructs linked together using
+    the :paramref:`_orm.relationship.back_populates` parameter should be
+    preferred.
+
+    .. seealso::
+
+        :ref:`relationships_backref` - background on backrefs
+
+    """  # noqa: E501
+
+    return (name, kwargs)
+
+
+def deferred(
+    column: _ORMColumnExprArgument[_T],
+    *additional_columns: _ORMColumnExprArgument[Any],
+    group: Optional[str] = None,
+    raiseload: bool = False,
+    comparator_factory: Optional[Type[PropComparator[_T]]] = None,
+    init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    default: Optional[Any] = _NoArg.NO_ARG,
+    default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+    hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG,  # noqa: A002
+    active_history: bool = False,
+    expire_on_flush: bool = True,
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+) -> MappedSQLExpression[_T]:
+    r"""Indicate a column-based mapped attribute that by default will
+    not load unless accessed.
+
+    When using :func:`_orm.mapped_column`, the same functionality as
+    that of :func:`_orm.deferred` construct is provided by using the
+    :paramref:`_orm.mapped_column.deferred` parameter.
+
+    :param \*columns: columns to be mapped.  This is typically a single
+     :class:`_schema.Column` object,
+     however a collection is supported in order
+     to support multiple columns mapped under the same attribute.
+
+    :param raiseload: boolean, if True, indicates an exception should be raised
+     if the load operation is to take place.
+
+     .. versionadded:: 1.4
+
+
+    Additional arguments are the same as that of :func:`_orm.column_property`.
+
+    .. seealso::
+
+        :ref:`orm_queryguide_deferred_imperative`
+
+    """
+    return MappedSQLExpression(
+        column,
+        *additional_columns,
+        attribute_options=_AttributeOptions(
+            init, repr, default, default_factory, compare, kw_only, hash
+        ),
+        group=group,
+        deferred=True,
+        raiseload=raiseload,
+        comparator_factory=comparator_factory,
+        active_history=active_history,
+        expire_on_flush=expire_on_flush,
+        info=info,
+        doc=doc,
+    )
+
+
+def query_expression(
+    default_expr: _ORMColumnExprArgument[_T] = sql.null(),
+    *,
+    repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    compare: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+    expire_on_flush: bool = True,
+    info: Optional[_InfoType] = None,
+    doc: Optional[str] = None,
+) -> MappedSQLExpression[_T]:
+    """Indicate an attribute that populates from a query-time SQL expression.
+
+    :param default_expr: Optional SQL expression object that will be used in
+        all cases if not assigned later with :func:`_orm.with_expression`.
+
+    .. versionadded:: 1.2
+
+    .. seealso::
+
+        :ref:`orm_queryguide_with_expression` - background and usage examples
+
+    """
+    prop = MappedSQLExpression(
+        default_expr,
+        attribute_options=_AttributeOptions(
+            False,
+            repr,
+            _NoArg.NO_ARG,
+            _NoArg.NO_ARG,
+            compare,
+            _NoArg.NO_ARG,
+            _NoArg.NO_ARG,
+        ),
+        expire_on_flush=expire_on_flush,
+        info=info,
+        doc=doc,
+        _assume_readonly_dc_attributes=True,
+    )
+
+    prop.strategy_key = (("query_expression", True),)
+    return prop
+
+
+def clear_mappers() -> None:
+    """Remove all mappers from all classes.
+
+    .. versionchanged:: 1.4  This function now locates all
+       :class:`_orm.registry` objects and calls upon the
+       :meth:`_orm.registry.dispose` method of each.
+
+    This function removes all instrumentation from classes and disposes
+    of their associated mappers.  Once called, the classes are unmapped
+    and can be later re-mapped with new mappers.
+
+    :func:`.clear_mappers` is *not* for normal use, as there is literally no
+    valid usage for it outside of very specific testing scenarios. Normally,
+    mappers are permanent structural components of user-defined classes, and
+    are never discarded independently of their class.  If a mapped class
+    itself is garbage collected, its mapper is automatically disposed of as
+    well. As such, :func:`.clear_mappers` is only for usage in test suites
+    that re-use the same classes with different mappings, which is itself an
+    extremely rare use case - the only such use case is in fact SQLAlchemy's
+    own test suite, and possibly the test suites of other ORM extension
+    libraries which intend to test various combinations of mapper construction
+    upon a fixed set of classes.
+
+    """
+
+    mapperlib._dispose_registries(mapperlib._all_registries(), False)
+
+
+# I would really like a way to get the Type[] here that shows up
+# in a different way in typing tools, however there is no current method
+# that is accepted by mypy (subclass of Type[_O] works in pylance, rejected
+# by mypy).
+AliasedType = Annotated[Type[_O], "aliased"]
+
+
+@overload
+def aliased(
+    element: Type[_O],
+    alias: Optional[FromClause] = None,
+    name: Optional[str] = None,
+    flat: bool = False,
+    adapt_on_names: bool = False,
+) -> AliasedType[_O]: ...
+
+
+@overload
+def aliased(
+    element: Union[AliasedClass[_O], Mapper[_O], AliasedInsp[_O]],
+    alias: Optional[FromClause] = None,
+    name: Optional[str] = None,
+    flat: bool = False,
+    adapt_on_names: bool = False,
+) -> AliasedClass[_O]: ...
+
+
+@overload
+def aliased(
+    element: FromClause,
+    alias: None = None,
+    name: Optional[str] = None,
+    flat: bool = False,
+    adapt_on_names: bool = False,
+) -> FromClause: ...
+
+
+def aliased(
+    element: Union[_EntityType[_O], FromClause],
+    alias: Optional[FromClause] = None,
+    name: Optional[str] = None,
+    flat: bool = False,
+    adapt_on_names: bool = False,
+) -> Union[AliasedClass[_O], FromClause, AliasedType[_O]]:
+    """Produce an alias of the given element, usually an :class:`.AliasedClass`
+    instance.
+
+    E.g.::
+
+        my_alias = aliased(MyClass)
+
+        stmt = select(MyClass, my_alias).filter(MyClass.id > my_alias.id)
+        result = session.execute(stmt)
+
+    The :func:`.aliased` function is used to create an ad-hoc mapping of a
+    mapped class to a new selectable.  By default, a selectable is generated
+    from the normally mapped selectable (typically a :class:`_schema.Table`
+    ) using the
+    :meth:`_expression.FromClause.alias` method. However, :func:`.aliased`
+    can also be
+    used to link the class to a new :func:`_expression.select` statement.
+    Also, the :func:`.with_polymorphic` function is a variant of
+    :func:`.aliased` that is intended to specify a so-called "polymorphic
+    selectable", that corresponds to the union of several joined-inheritance
+    subclasses at once.
+
+    For convenience, the :func:`.aliased` function also accepts plain
+    :class:`_expression.FromClause` constructs, such as a
+    :class:`_schema.Table` or
+    :func:`_expression.select` construct.   In those cases, the
+    :meth:`_expression.FromClause.alias`
+    method is called on the object and the new
+    :class:`_expression.Alias` object returned.  The returned
+    :class:`_expression.Alias` is not
+    ORM-mapped in this case.
+
+    .. seealso::
+
+        :ref:`tutorial_orm_entity_aliases` - in the :ref:`unified_tutorial`
+
+        :ref:`orm_queryguide_orm_aliases` - in the :ref:`queryguide_toplevel`
+
+    :param element: element to be aliased.  Is normally a mapped class,
+     but for convenience can also be a :class:`_expression.FromClause`
+     element.
+
+    :param alias: Optional selectable unit to map the element to.  This is
+     usually used to link the object to a subquery, and should be an aliased
+     select construct as one would produce from the
+     :meth:`_query.Query.subquery` method or
+     the :meth:`_expression.Select.subquery` or
+     :meth:`_expression.Select.alias` methods of the :func:`_expression.select`
+     construct.
+
+    :param name: optional string name to use for the alias, if not specified
+     by the ``alias`` parameter.  The name, among other things, forms the
+     attribute name that will be accessible via tuples returned by a
+     :class:`_query.Query` object.  Not supported when creating aliases
+     of :class:`_sql.Join` objects.
+
+    :param flat: Boolean, will be passed through to the
+     :meth:`_expression.FromClause.alias` call so that aliases of
+     :class:`_expression.Join` objects will alias the individual tables
+     inside the join, rather than creating a subquery.  This is generally
+     supported by all modern databases with regards to right-nested joins
+     and generally produces more efficient queries.
+
+     When :paramref:`_orm.aliased.flat` is combined with
+     :paramref:`_orm.aliased.name`, the resulting joins will alias individual
+     tables using a naming scheme similar to ``<prefix>_<tablename>``.  This
+     naming scheme is for visibility / debugging purposes only and the
+     specific scheme is subject to change without notice.
+
+     .. versionadded:: 2.0.32 added support for combining
+        :paramref:`_orm.aliased.name` with :paramref:`_orm.aliased.flat`.
+        Previously, this would raise ``NotImplementedError``.
+
+    :param adapt_on_names: if True, more liberal "matching" will be used when
+     mapping the mapped columns of the ORM entity to those of the
+     given selectable - a name-based match will be performed if the
+     given selectable doesn't otherwise have a column that corresponds
+     to one on the entity.  The use case for this is when associating
+     an entity with some derived selectable such as one that uses
+     aggregate functions::
+
+        class UnitPrice(Base):
+            __tablename__ = "unit_price"
+            ...
+            unit_id = Column(Integer)
+            price = Column(Numeric)
+
+
+        aggregated_unit_price = (
+            Session.query(func.sum(UnitPrice.price).label("price"))
+            .group_by(UnitPrice.unit_id)
+            .subquery()
+        )
+
+        aggregated_unit_price = aliased(
+            UnitPrice, alias=aggregated_unit_price, adapt_on_names=True
+        )
+
+     Above, functions on ``aggregated_unit_price`` which refer to
+     ``.price`` will return the
+     ``func.sum(UnitPrice.price).label('price')`` column, as it is
+     matched on the name "price".  Ordinarily, the "price" function
+     wouldn't have any "column correspondence" to the actual
+     ``UnitPrice.price`` column as it is not a proxy of the original.
+
+    """
+    return AliasedInsp._alias_factory(
+        element,
+        alias=alias,
+        name=name,
+        flat=flat,
+        adapt_on_names=adapt_on_names,
+    )
+
+
+def with_polymorphic(
+    base: Union[Type[_O], Mapper[_O]],
+    classes: Union[Literal["*"], Iterable[Type[Any]]],
+    selectable: Union[Literal[False, None], FromClause] = False,
+    flat: bool = False,
+    polymorphic_on: Optional[ColumnElement[Any]] = None,
+    aliased: bool = False,
+    innerjoin: bool = False,
+    adapt_on_names: bool = False,
+    name: Optional[str] = None,
+    _use_mapper_path: bool = False,
+) -> AliasedClass[_O]:
+    """Produce an :class:`.AliasedClass` construct which specifies
+    columns for descendant mappers of the given base.
+
+    Using this method will ensure that each descendant mapper's
+    tables are included in the FROM clause, and will allow filter()
+    criterion to be used against those tables.  The resulting
+    instances will also have those columns already loaded so that
+    no "post fetch" of those columns will be required.
+
+    .. seealso::
+
+        :ref:`with_polymorphic` - full discussion of
+        :func:`_orm.with_polymorphic`.
+
+    :param base: Base class to be aliased.
+
+    :param classes: a single class or mapper, or list of
+        class/mappers, which inherit from the base class.
+        Alternatively, it may also be the string ``'*'``, in which case
+        all descending mapped classes will be added to the FROM clause.
+
+    :param aliased: when True, the selectable will be aliased.   For a
+        JOIN, this means the JOIN will be SELECTed from inside of a subquery
+        unless the :paramref:`_orm.with_polymorphic.flat` flag is set to
+        True, which is recommended for simpler use cases.
+
+    :param flat: Boolean, will be passed through to the
+     :meth:`_expression.FromClause.alias` call so that aliases of
+     :class:`_expression.Join` objects will alias the individual tables
+     inside the join, rather than creating a subquery.  This is generally
+     supported by all modern databases with regards to right-nested joins
+     and generally produces more efficient queries.  Setting this flag is
+     recommended as long as the resulting SQL is functional.
+
+    :param selectable: a table or subquery that will
+        be used in place of the generated FROM clause. This argument is
+        required if any of the desired classes use concrete table
+        inheritance, since SQLAlchemy currently cannot generate UNIONs
+        among tables automatically. If used, the ``selectable`` argument
+        must represent the full set of tables and columns mapped by every
+        mapped class. Otherwise, the unaccounted mapped columns will
+        result in their table being appended directly to the FROM clause
+        which will usually lead to incorrect results.
+
+        When left at its default value of ``False``, the polymorphic
+        selectable assigned to the base mapper is used for selecting rows.
+        However, it may also be passed as ``None``, which will bypass the
+        configured polymorphic selectable and instead construct an ad-hoc
+        selectable for the target classes given; for joined table inheritance
+        this will be a join that includes all target mappers and their
+        subclasses.
+
+    :param polymorphic_on: a column to be used as the "discriminator"
+        column for the given selectable. If not given, the polymorphic_on
+        attribute of the base classes' mapper will be used, if any. This
+        is useful for mappings that don't have polymorphic loading
+        behavior by default.
+
+    :param innerjoin: if True, an INNER JOIN will be used.  This should
+       only be specified if querying for one specific subtype only
+
+    :param adapt_on_names: Passes through the
+      :paramref:`_orm.aliased.adapt_on_names`
+      parameter to the aliased object.  This may be useful in situations where
+      the given selectable is not directly related to the existing mapped
+      selectable.
+
+      .. versionadded:: 1.4.33
+
+    :param name: Name given to the generated :class:`.AliasedClass`.
+
+      .. versionadded:: 2.0.31
+
+    """
+    return AliasedInsp._with_polymorphic_factory(
+        base,
+        classes,
+        selectable=selectable,
+        flat=flat,
+        polymorphic_on=polymorphic_on,
+        adapt_on_names=adapt_on_names,
+        aliased=aliased,
+        innerjoin=innerjoin,
+        name=name,
+        _use_mapper_path=_use_mapper_path,
+    )
+
+
+def join(
+    left: _FromClauseArgument,
+    right: _FromClauseArgument,
+    onclause: Optional[_OnClauseArgument] = None,
+    isouter: bool = False,
+    full: bool = False,
+) -> _ORMJoin:
+    r"""Produce an inner join between left and right clauses.
+
+    :func:`_orm.join` is an extension to the core join interface
+    provided by :func:`_expression.join()`, where the
+    left and right selectable may be not only core selectable
+    objects such as :class:`_schema.Table`, but also mapped classes or
+    :class:`.AliasedClass` instances.   The "on" clause can
+    be a SQL expression or an ORM mapped attribute
+    referencing a configured :func:`_orm.relationship`.
+
+    :func:`_orm.join` is not commonly needed in modern usage,
+    as its functionality is encapsulated within that of the
+    :meth:`_sql.Select.join` and :meth:`_query.Query.join`
+    methods. which feature a
+    significant amount of automation beyond :func:`_orm.join`
+    by itself.  Explicit use of :func:`_orm.join`
+    with ORM-enabled SELECT statements involves use of the
+    :meth:`_sql.Select.select_from` method, as in::
+
+        from sqlalchemy.orm import join
+
+        stmt = (
+            select(User)
+            .select_from(join(User, Address, User.addresses))
+            .filter(Address.email_address == "foo@bar.com")
+        )
+
+    In modern SQLAlchemy the above join can be written more
+    succinctly as::
+
+        stmt = (
+            select(User)
+            .join(User.addresses)
+            .filter(Address.email_address == "foo@bar.com")
+        )
+
+    .. warning:: using :func:`_orm.join` directly may not work properly
+       with modern ORM options such as :func:`_orm.with_loader_criteria`.
+       It is strongly recommended to use the idiomatic join patterns
+       provided by methods such as :meth:`.Select.join` and
+       :meth:`.Select.join_from` when creating ORM joins.
+
+    .. seealso::
+
+        :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel` for
+        background on idiomatic ORM join patterns
+
+    """
+    return _ORMJoin(left, right, onclause, isouter, full)
+
+
+def outerjoin(
+    left: _FromClauseArgument,
+    right: _FromClauseArgument,
+    onclause: Optional[_OnClauseArgument] = None,
+    full: bool = False,
+) -> _ORMJoin:
+    """Produce a left outer join between left and right clauses.
+
+    This is the "outer join" version of the :func:`_orm.join` function,
+    featuring the same behavior except that an OUTER JOIN is generated.
+    See that function's documentation for other usage details.
+
+    """
+    return _ORMJoin(left, right, onclause, True, full)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py
new file mode 100644
index 00000000..ccb8413b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/_typing.py
@@ -0,0 +1,179 @@
+# orm/_typing.py
+# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import operator
+from typing import Any
+from typing import Dict
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from ..engine.interfaces import _CoreKnownExecutionOptions
+from ..sql import roles
+from ..sql._orm_types import DMLStrategyArgument as DMLStrategyArgument
+from ..sql._orm_types import (
+    SynchronizeSessionArgument as SynchronizeSessionArgument,
+)
+from ..sql._typing import _HasClauseElement
+from ..sql.elements import ColumnElement
+from ..util.typing import Protocol
+from ..util.typing import TypeGuard
+
+if TYPE_CHECKING:
+    from .attributes import AttributeImpl
+    from .attributes import CollectionAttributeImpl
+    from .attributes import HasCollectionAdapter
+    from .attributes import QueryableAttribute
+    from .base import PassiveFlag
+    from .decl_api import registry as _registry_type
+    from .interfaces import InspectionAttr
+    from .interfaces import MapperProperty
+    from .interfaces import ORMOption
+    from .interfaces import UserDefinedOption
+    from .mapper import Mapper
+    from .relationships import RelationshipProperty
+    from .state import InstanceState
+    from .util import AliasedClass
+    from .util import AliasedInsp
+    from ..sql._typing import _CE
+    from ..sql.base import ExecutableOption
+
+_T = TypeVar("_T", bound=Any)
+
+
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+
+_O = TypeVar("_O", bound=object)
+"""The 'ORM mapped object' type.
+
+"""
+
+
+if TYPE_CHECKING:
+    _RegistryType = _registry_type
+
+_InternalEntityType = Union["Mapper[_T]", "AliasedInsp[_T]"]
+
+_ExternalEntityType = Union[Type[_T], "AliasedClass[_T]"]
+
+_EntityType = Union[
+    Type[_T], "AliasedClass[_T]", "Mapper[_T]", "AliasedInsp[_T]"
+]
+
+
+_ClassDict = Mapping[str, Any]
+_InstanceDict = Dict[str, Any]
+
+_IdentityKeyType = Tuple[Type[_T], Tuple[Any, ...], Optional[Any]]
+
+_ORMColumnExprArgument = Union[
+    ColumnElement[_T],
+    _HasClauseElement[_T],
+    roles.ExpressionElementRole[_T],
+]
+
+
+_ORMCOLEXPR = TypeVar("_ORMCOLEXPR", bound=ColumnElement[Any])
+
+
+class _OrmKnownExecutionOptions(_CoreKnownExecutionOptions, total=False):
+    populate_existing: bool
+    autoflush: bool
+    synchronize_session: SynchronizeSessionArgument
+    dml_strategy: DMLStrategyArgument
+    is_delete_using: bool
+    is_update_from: bool
+    render_nulls: bool
+
+
+OrmExecuteOptionsParameter = Union[
+    _OrmKnownExecutionOptions, Mapping[str, Any]
+]
+
+
+class _ORMAdapterProto(Protocol):
+    """protocol for the :class:`.AliasedInsp._orm_adapt_element` method
+    which is a synonym for :class:`.AliasedInsp._adapt_element`.
+
+
+    """
+
+    def __call__(self, obj: _CE, key: Optional[str] = None) -> _CE: ...
+
+
+class _LoaderCallable(Protocol):
+    def __call__(
+        self, state: InstanceState[Any], passive: PassiveFlag
+    ) -> Any: ...
+
+
+def is_orm_option(
+    opt: ExecutableOption,
+) -> TypeGuard[ORMOption]:
+    return not opt._is_core
+
+
+def is_user_defined_option(
+    opt: ExecutableOption,
+) -> TypeGuard[UserDefinedOption]:
+    return not opt._is_core and opt._is_user_defined  # type: ignore
+
+
+def is_composite_class(obj: Any) -> bool:
+    # inlining is_dataclass(obj)
+    return hasattr(obj, "__composite_values__") or hasattr(
+        obj, "__dataclass_fields__"
+    )
+
+
+if TYPE_CHECKING:
+
+    def insp_is_mapper_property(
+        obj: Any,
+    ) -> TypeGuard[MapperProperty[Any]]: ...
+
+    def insp_is_mapper(obj: Any) -> TypeGuard[Mapper[Any]]: ...
+
+    def insp_is_aliased_class(obj: Any) -> TypeGuard[AliasedInsp[Any]]: ...
+
+    def insp_is_attribute(
+        obj: InspectionAttr,
+    ) -> TypeGuard[QueryableAttribute[Any]]: ...
+
+    def attr_is_internal_proxy(
+        obj: InspectionAttr,
+    ) -> TypeGuard[QueryableAttribute[Any]]: ...
+
+    def prop_is_relationship(
+        prop: MapperProperty[Any],
+    ) -> TypeGuard[RelationshipProperty[Any]]: ...
+
+    def is_collection_impl(
+        impl: AttributeImpl,
+    ) -> TypeGuard[CollectionAttributeImpl]: ...
+
+    def is_has_collection_adapter(
+        impl: AttributeImpl,
+    ) -> TypeGuard[HasCollectionAdapter]: ...
+
+else:
+    insp_is_mapper_property = operator.attrgetter("is_property")
+    insp_is_mapper = operator.attrgetter("is_mapper")
+    insp_is_aliased_class = operator.attrgetter("is_aliased_class")
+    insp_is_attribute = operator.attrgetter("is_attribute")
+    attr_is_internal_proxy = operator.attrgetter("_is_internal_proxy")
+    is_collection_impl = operator.attrgetter("collection")
+    prop_is_relationship = operator.attrgetter("_is_relationship")
+    is_has_collection_adapter = operator.attrgetter(
+        "_is_has_collection_adapter"
+    )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py
new file mode 100644
index 00000000..8207b4ca
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/attributes.py
@@ -0,0 +1,2835 @@
+# orm/attributes.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Defines instrumentation for class attributes and their interaction
+with instances.
+
+This module is usually not directly visible to user applications, but
+defines a large part of the ORM's interactivity.
+
+
+"""
+
+from __future__ import annotations
+
+import dataclasses
+import operator
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import ClassVar
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import NamedTuple
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import collections
+from . import exc as orm_exc
+from . import interfaces
+from ._typing import insp_is_aliased_class
+from .base import _DeclarativeMapped
+from .base import ATTR_EMPTY
+from .base import ATTR_WAS_SET
+from .base import CALLABLES_OK
+from .base import DEFERRED_HISTORY_LOAD
+from .base import INCLUDE_PENDING_MUTATIONS  # noqa
+from .base import INIT_OK
+from .base import instance_dict as instance_dict
+from .base import instance_state as instance_state
+from .base import instance_str
+from .base import LOAD_AGAINST_COMMITTED
+from .base import LoaderCallableStatus
+from .base import manager_of_class as manager_of_class
+from .base import Mapped as Mapped  # noqa
+from .base import NEVER_SET  # noqa
+from .base import NO_AUTOFLUSH
+from .base import NO_CHANGE  # noqa
+from .base import NO_KEY
+from .base import NO_RAISE
+from .base import NO_VALUE
+from .base import NON_PERSISTENT_OK  # noqa
+from .base import opt_manager_of_class as opt_manager_of_class
+from .base import PASSIVE_CLASS_MISMATCH  # noqa
+from .base import PASSIVE_NO_FETCH
+from .base import PASSIVE_NO_FETCH_RELATED  # noqa
+from .base import PASSIVE_NO_INITIALIZE
+from .base import PASSIVE_NO_RESULT
+from .base import PASSIVE_OFF
+from .base import PASSIVE_ONLY_PERSISTENT
+from .base import PASSIVE_RETURN_NO_VALUE
+from .base import PassiveFlag
+from .base import RELATED_OBJECT_OK  # noqa
+from .base import SQL_OK  # noqa
+from .base import SQLORMExpression
+from .base import state_str
+from .. import event
+from .. import exc
+from .. import inspection
+from .. import util
+from ..event import dispatcher
+from ..event import EventTarget
+from ..sql import base as sql_base
+from ..sql import cache_key
+from ..sql import coercions
+from ..sql import roles
+from ..sql import visitors
+from ..sql.cache_key import HasCacheKey
+from ..sql.visitors import _TraverseInternalsType
+from ..sql.visitors import InternalTraversal
+from ..util.typing import Literal
+from ..util.typing import Self
+from ..util.typing import TypeGuard
+
+if TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _ExternalEntityType
+    from ._typing import _InstanceDict
+    from ._typing import _InternalEntityType
+    from ._typing import _LoaderCallable
+    from ._typing import _O
+    from .collections import _AdaptedCollectionProtocol
+    from .collections import CollectionAdapter
+    from .interfaces import MapperProperty
+    from .relationships import RelationshipProperty
+    from .state import InstanceState
+    from .util import AliasedInsp
+    from .writeonly import WriteOnlyAttributeImpl
+    from ..event.base import _Dispatch
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _DMLColumnArgument
+    from ..sql._typing import _InfoType
+    from ..sql._typing import _PropagateAttrsType
+    from ..sql.annotation import _AnnotationDict
+    from ..sql.elements import ColumnElement
+    from ..sql.elements import Label
+    from ..sql.operators import OperatorType
+    from ..sql.selectable import FromClause
+
+
+_T = TypeVar("_T")
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+
+
+_AllPendingType = Sequence[
+    Tuple[Optional["InstanceState[Any]"], Optional[object]]
+]
+
+
+_UNKNOWN_ATTR_KEY = object()
+
+
+@inspection._self_inspects
+class QueryableAttribute(
+    _DeclarativeMapped[_T_co],
+    SQLORMExpression[_T_co],
+    interfaces.InspectionAttr,
+    interfaces.PropComparator[_T_co],
+    roles.JoinTargetRole,
+    roles.OnClauseRole,
+    sql_base.Immutable,
+    cache_key.SlotsMemoizedHasCacheKey,
+    util.MemoizedSlots,
+    EventTarget,
+):
+    """Base class for :term:`descriptor` objects that intercept
+    attribute events on behalf of a :class:`.MapperProperty`
+    object.  The actual :class:`.MapperProperty` is accessible
+    via the :attr:`.QueryableAttribute.property`
+    attribute.
+
+
+    .. seealso::
+
+        :class:`.InstrumentedAttribute`
+
+        :class:`.MapperProperty`
+
+        :attr:`_orm.Mapper.all_orm_descriptors`
+
+        :attr:`_orm.Mapper.attrs`
+    """
+
+    __slots__ = (
+        "class_",
+        "key",
+        "impl",
+        "comparator",
+        "property",
+        "parent",
+        "expression",
+        "_of_type",
+        "_extra_criteria",
+        "_slots_dispatch",
+        "_propagate_attrs",
+        "_doc",
+    )
+
+    is_attribute = True
+
+    dispatch: dispatcher[QueryableAttribute[_T_co]]
+
+    class_: _ExternalEntityType[Any]
+    key: str
+    parententity: _InternalEntityType[Any]
+    impl: AttributeImpl
+    comparator: interfaces.PropComparator[_T_co]
+    _of_type: Optional[_InternalEntityType[Any]]
+    _extra_criteria: Tuple[ColumnElement[bool], ...]
+    _doc: Optional[str]
+
+    # PropComparator has a __visit_name__ to participate within
+    # traversals.   Disambiguate the attribute vs. a comparator.
+    __visit_name__ = "orm_instrumented_attribute"
+
+    def __init__(
+        self,
+        class_: _ExternalEntityType[_O],
+        key: str,
+        parententity: _InternalEntityType[_O],
+        comparator: interfaces.PropComparator[_T_co],
+        impl: Optional[AttributeImpl] = None,
+        of_type: Optional[_InternalEntityType[Any]] = None,
+        extra_criteria: Tuple[ColumnElement[bool], ...] = (),
+    ):
+        self.class_ = class_
+        self.key = key
+
+        self._parententity = self.parent = parententity
+
+        # this attribute is non-None after mappers are set up, however in the
+        # interim class manager setup, there's a check for None to see if it
+        # needs to be populated, so we assign None here leaving the attribute
+        # in a temporarily not-type-correct state
+        self.impl = impl  # type: ignore
+
+        assert comparator is not None
+        self.comparator = comparator
+        self._of_type = of_type
+        self._extra_criteria = extra_criteria
+        self._doc = None
+
+        manager = opt_manager_of_class(class_)
+        # manager is None in the case of AliasedClass
+        if manager:
+            # propagate existing event listeners from
+            # immediate superclass
+            for base in manager._bases:
+                if key in base:
+                    self.dispatch._update(base[key].dispatch)
+                    if base[key].dispatch._active_history:
+                        self.dispatch._active_history = True  # type: ignore
+
+    _cache_key_traversal = [
+        ("key", visitors.ExtendedInternalTraversal.dp_string),
+        ("_parententity", visitors.ExtendedInternalTraversal.dp_multi),
+        ("_of_type", visitors.ExtendedInternalTraversal.dp_multi),
+        ("_extra_criteria", visitors.InternalTraversal.dp_clauseelement_list),
+    ]
+
+    def __reduce__(self) -> Any:
+        # this method is only used in terms of the
+        # sqlalchemy.ext.serializer extension
+        return (
+            _queryable_attribute_unreduce,
+            (
+                self.key,
+                self._parententity.mapper.class_,
+                self._parententity,
+                self._parententity.entity,
+            ),
+        )
+
+    @property
+    def _impl_uses_objects(self) -> bool:
+        return self.impl.uses_objects
+
+    def get_history(
+        self, instance: Any, passive: PassiveFlag = PASSIVE_OFF
+    ) -> History:
+        return self.impl.get_history(
+            instance_state(instance), instance_dict(instance), passive
+        )
+
+    @property
+    def info(self) -> _InfoType:
+        """Return the 'info' dictionary for the underlying SQL element.
+
+        The behavior here is as follows:
+
+        * If the attribute is a column-mapped property, i.e.
+          :class:`.ColumnProperty`, which is mapped directly
+          to a schema-level :class:`_schema.Column` object, this attribute
+          will return the :attr:`.SchemaItem.info` dictionary associated
+          with the core-level :class:`_schema.Column` object.
+
+        * If the attribute is a :class:`.ColumnProperty` but is mapped to
+          any other kind of SQL expression other than a
+          :class:`_schema.Column`,
+          the attribute will refer to the :attr:`.MapperProperty.info`
+          dictionary associated directly with the :class:`.ColumnProperty`,
+          assuming the SQL expression itself does not have its own ``.info``
+          attribute (which should be the case, unless a user-defined SQL
+          construct has defined one).
+
+        * If the attribute refers to any other kind of
+          :class:`.MapperProperty`, including :class:`.Relationship`,
+          the attribute will refer to the :attr:`.MapperProperty.info`
+          dictionary associated with that :class:`.MapperProperty`.
+
+        * To access the :attr:`.MapperProperty.info` dictionary of the
+          :class:`.MapperProperty` unconditionally, including for a
+          :class:`.ColumnProperty` that's associated directly with a
+          :class:`_schema.Column`, the attribute can be referred to using
+          :attr:`.QueryableAttribute.property` attribute, as
+          ``MyClass.someattribute.property.info``.
+
+        .. seealso::
+
+            :attr:`.SchemaItem.info`
+
+            :attr:`.MapperProperty.info`
+
+        """
+        return self.comparator.info
+
+    parent: _InternalEntityType[Any]
+    """Return an inspection instance representing the parent.
+
+    This will be either an instance of :class:`_orm.Mapper`
+    or :class:`.AliasedInsp`, depending upon the nature
+    of the parent entity which this attribute is associated
+    with.
+
+    """
+
+    expression: ColumnElement[_T_co]
+    """The SQL expression object represented by this
+    :class:`.QueryableAttribute`.
+
+    This will typically be an instance of a :class:`_sql.ColumnElement`
+    subclass representing a column expression.
+
+    """
+
+    def _memoized_attr_expression(self) -> ColumnElement[_T]:
+        annotations: _AnnotationDict
+
+        # applies only to Proxy() as used by hybrid.
+        # currently is an exception to typing rather than feeding through
+        # non-string keys.
+        # ideally Proxy() would have a separate set of methods to deal
+        # with this case.
+        entity_namespace = self._entity_namespace
+        assert isinstance(entity_namespace, HasCacheKey)
+
+        if self.key is _UNKNOWN_ATTR_KEY:
+            annotations = {"entity_namespace": entity_namespace}
+        else:
+            annotations = {
+                "proxy_key": self.key,
+                "proxy_owner": self._parententity,
+                "entity_namespace": entity_namespace,
+            }
+
+        ce = self.comparator.__clause_element__()
+        try:
+            if TYPE_CHECKING:
+                assert isinstance(ce, ColumnElement)
+            anno = ce._annotate
+        except AttributeError as ae:
+            raise exc.InvalidRequestError(
+                'When interpreting attribute "%s" as a SQL expression, '
+                "expected __clause_element__() to return "
+                "a ClauseElement object, got: %r" % (self, ce)
+            ) from ae
+        else:
+            return anno(annotations)
+
+    def _memoized_attr__propagate_attrs(self) -> _PropagateAttrsType:
+        # this suits the case in coercions where we don't actually
+        # call ``__clause_element__()`` but still need to get
+        # resolved._propagate_attrs.  See #6558.
+        return util.immutabledict(
+            {
+                "compile_state_plugin": "orm",
+                "plugin_subject": self._parentmapper,
+            }
+        )
+
+    @property
+    def _entity_namespace(self) -> _InternalEntityType[Any]:
+        return self._parententity
+
+    @property
+    def _annotations(self) -> _AnnotationDict:
+        return self.__clause_element__()._annotations
+
+    def __clause_element__(self) -> ColumnElement[_T_co]:
+        return self.expression
+
+    @property
+    def _from_objects(self) -> List[FromClause]:
+        return self.expression._from_objects
+
+    def _bulk_update_tuples(
+        self, value: Any
+    ) -> Sequence[Tuple[_DMLColumnArgument, Any]]:
+        """Return setter tuples for a bulk UPDATE."""
+
+        return self.comparator._bulk_update_tuples(value)
+
+    def adapt_to_entity(self, adapt_to_entity: AliasedInsp[Any]) -> Self:
+        assert not self._of_type
+        return self.__class__(
+            adapt_to_entity.entity,
+            self.key,
+            impl=self.impl,
+            comparator=self.comparator.adapt_to_entity(adapt_to_entity),
+            parententity=adapt_to_entity,
+        )
+
+    def of_type(self, entity: _EntityType[_T]) -> QueryableAttribute[_T]:
+        return QueryableAttribute(
+            self.class_,
+            self.key,
+            self._parententity,
+            impl=self.impl,
+            comparator=self.comparator.of_type(entity),
+            of_type=inspection.inspect(entity),
+            extra_criteria=self._extra_criteria,
+        )
+
+    def and_(
+        self, *clauses: _ColumnExpressionArgument[bool]
+    ) -> QueryableAttribute[bool]:
+        if TYPE_CHECKING:
+            assert isinstance(self.comparator, RelationshipProperty.Comparator)
+
+        exprs = tuple(
+            coercions.expect(roles.WhereHavingRole, clause)
+            for clause in util.coerce_generator_arg(clauses)
+        )
+
+        return QueryableAttribute(
+            self.class_,
+            self.key,
+            self._parententity,
+            impl=self.impl,
+            comparator=self.comparator.and_(*exprs),
+            of_type=self._of_type,
+            extra_criteria=self._extra_criteria + exprs,
+        )
+
+    def _clone(self, **kw: Any) -> QueryableAttribute[_T]:
+        return QueryableAttribute(
+            self.class_,
+            self.key,
+            self._parententity,
+            impl=self.impl,
+            comparator=self.comparator,
+            of_type=self._of_type,
+            extra_criteria=self._extra_criteria,
+        )
+
+    def label(self, name: Optional[str]) -> Label[_T_co]:
+        return self.__clause_element__().label(name)
+
+    def operate(
+        self, op: OperatorType, *other: Any, **kwargs: Any
+    ) -> ColumnElement[Any]:
+        return op(self.comparator, *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def reverse_operate(
+        self, op: OperatorType, other: Any, **kwargs: Any
+    ) -> ColumnElement[Any]:
+        return op(other, self.comparator, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def hasparent(
+        self, state: InstanceState[Any], optimistic: bool = False
+    ) -> bool:
+        return self.impl.hasparent(state, optimistic=optimistic) is not False
+
+    def __getattr__(self, key: str) -> Any:
+        try:
+            return util.MemoizedSlots.__getattr__(self, key)
+        except AttributeError:
+            pass
+
+        try:
+            return getattr(self.comparator, key)
+        except AttributeError as err:
+            raise AttributeError(
+                "Neither %r object nor %r object associated with %s "
+                "has an attribute %r"
+                % (
+                    type(self).__name__,
+                    type(self.comparator).__name__,
+                    self,
+                    key,
+                )
+            ) from err
+
+    def __str__(self) -> str:
+        return f"{self.class_.__name__}.{self.key}"
+
+    def _memoized_attr_property(self) -> Optional[MapperProperty[Any]]:
+        return self.comparator.property
+
+
+def _queryable_attribute_unreduce(
+    key: str,
+    mapped_class: Type[_O],
+    parententity: _InternalEntityType[_O],
+    entity: _ExternalEntityType[Any],
+) -> Any:
+    # this method is only used in terms of the
+    # sqlalchemy.ext.serializer extension
+    if insp_is_aliased_class(parententity):
+        return entity._get_from_serialized(key, mapped_class, parententity)
+    else:
+        return getattr(entity, key)
+
+
+class InstrumentedAttribute(QueryableAttribute[_T_co]):
+    """Class bound instrumented attribute which adds basic
+    :term:`descriptor` methods.
+
+    See :class:`.QueryableAttribute` for a description of most features.
+
+
+    """
+
+    __slots__ = ()
+
+    inherit_cache = True
+    """:meta private:"""
+
+    # hack to make __doc__ writeable on instances of
+    # InstrumentedAttribute, while still keeping classlevel
+    # __doc__ correct
+
+    @util.rw_hybridproperty
+    def __doc__(self) -> Optional[str]:
+        return self._doc
+
+    @__doc__.setter  # type: ignore
+    def __doc__(self, value: Optional[str]) -> None:
+        self._doc = value
+
+    @__doc__.classlevel  # type: ignore
+    def __doc__(cls) -> Optional[str]:
+        return super().__doc__
+
+    def __set__(self, instance: object, value: Any) -> None:
+        self.impl.set(
+            instance_state(instance), instance_dict(instance), value, None
+        )
+
+    def __delete__(self, instance: object) -> None:
+        self.impl.delete(instance_state(instance), instance_dict(instance))
+
+    @overload
+    def __get__(
+        self, instance: None, owner: Any
+    ) -> InstrumentedAttribute[_T_co]: ...
+
+    @overload
+    def __get__(self, instance: object, owner: Any) -> _T_co: ...
+
+    def __get__(
+        self, instance: Optional[object], owner: Any
+    ) -> Union[InstrumentedAttribute[_T_co], _T_co]:
+        if instance is None:
+            return self
+
+        dict_ = instance_dict(instance)
+        if self.impl.supports_population and self.key in dict_:
+            return dict_[self.key]  # type: ignore[no-any-return]
+        else:
+            try:
+                state = instance_state(instance)
+            except AttributeError as err:
+                raise orm_exc.UnmappedInstanceError(instance) from err
+            return self.impl.get(state, dict_)  # type: ignore[no-any-return]
+
+
+@dataclasses.dataclass(frozen=True)
+class AdHocHasEntityNamespace(HasCacheKey):
+    _traverse_internals: ClassVar[_TraverseInternalsType] = [
+        ("_entity_namespace", InternalTraversal.dp_has_cache_key),
+    ]
+
+    # py37 compat, no slots=True on dataclass
+    __slots__ = ("_entity_namespace",)
+    _entity_namespace: _InternalEntityType[Any]
+    is_mapper: ClassVar[bool] = False
+    is_aliased_class: ClassVar[bool] = False
+
+    @property
+    def entity_namespace(self):
+        return self._entity_namespace.entity_namespace
+
+
+def create_proxied_attribute(
+    descriptor: Any,
+) -> Callable[..., QueryableAttribute[Any]]:
+    """Create an QueryableAttribute / user descriptor hybrid.
+
+    Returns a new QueryableAttribute type that delegates descriptor
+    behavior and getattr() to the given descriptor.
+    """
+
+    # TODO: can move this to descriptor_props if the need for this
+    # function is removed from ext/hybrid.py
+
+    class Proxy(QueryableAttribute[Any]):
+        """Presents the :class:`.QueryableAttribute` interface as a
+        proxy on top of a Python descriptor / :class:`.PropComparator`
+        combination.
+
+        """
+
+        _extra_criteria = ()
+
+        # the attribute error catches inside of __getattr__ basically create a
+        # singularity if you try putting slots on this too
+        # __slots__ = ("descriptor", "original_property", "_comparator")
+
+        def __init__(
+            self,
+            class_,
+            key,
+            descriptor,
+            comparator,
+            adapt_to_entity=None,
+            doc=None,
+            original_property=None,
+        ):
+            self.class_ = class_
+            self.key = key
+            self.descriptor = descriptor
+            self.original_property = original_property
+            self._comparator = comparator
+            self._adapt_to_entity = adapt_to_entity
+            self._doc = self.__doc__ = doc
+
+        @property
+        def _parententity(self):
+            return inspection.inspect(self.class_, raiseerr=False)
+
+        @property
+        def parent(self):
+            return inspection.inspect(self.class_, raiseerr=False)
+
+        _is_internal_proxy = True
+
+        _cache_key_traversal = [
+            ("key", visitors.ExtendedInternalTraversal.dp_string),
+            ("_parententity", visitors.ExtendedInternalTraversal.dp_multi),
+        ]
+
+        @property
+        def _impl_uses_objects(self):
+            return (
+                self.original_property is not None
+                and getattr(self.class_, self.key).impl.uses_objects
+            )
+
+        @property
+        def _entity_namespace(self):
+            if hasattr(self._comparator, "_parententity"):
+                return self._comparator._parententity
+            else:
+                # used by hybrid attributes which try to remain
+                # agnostic of any ORM concepts like mappers
+                return AdHocHasEntityNamespace(self._parententity)
+
+        @property
+        def property(self):
+            return self.comparator.property
+
+        @util.memoized_property
+        def comparator(self):
+            if callable(self._comparator):
+                self._comparator = self._comparator()
+            if self._adapt_to_entity:
+                self._comparator = self._comparator.adapt_to_entity(
+                    self._adapt_to_entity
+                )
+            return self._comparator
+
+        def adapt_to_entity(self, adapt_to_entity):
+            return self.__class__(
+                adapt_to_entity.entity,
+                self.key,
+                self.descriptor,
+                self._comparator,
+                adapt_to_entity,
+            )
+
+        def _clone(self, **kw):
+            return self.__class__(
+                self.class_,
+                self.key,
+                self.descriptor,
+                self._comparator,
+                adapt_to_entity=self._adapt_to_entity,
+                original_property=self.original_property,
+            )
+
+        def __get__(self, instance, owner):
+            retval = self.descriptor.__get__(instance, owner)
+            # detect if this is a plain Python @property, which just returns
+            # itself for class level access.  If so, then return us.
+            # Otherwise, return the object returned by the descriptor.
+            if retval is self.descriptor and instance is None:
+                return self
+            else:
+                return retval
+
+        def __str__(self) -> str:
+            return f"{self.class_.__name__}.{self.key}"
+
+        def __getattr__(self, attribute):
+            """Delegate __getattr__ to the original descriptor and/or
+            comparator."""
+
+            # this is unfortunately very complicated, and is easily prone
+            # to recursion overflows when implementations of related
+            # __getattr__ schemes are changed
+
+            try:
+                return util.MemoizedSlots.__getattr__(self, attribute)
+            except AttributeError:
+                pass
+
+            try:
+                return getattr(descriptor, attribute)
+            except AttributeError as err:
+                if attribute == "comparator":
+                    raise AttributeError("comparator") from err
+                try:
+                    # comparator itself might be unreachable
+                    comparator = self.comparator
+                except AttributeError as err2:
+                    raise AttributeError(
+                        "Neither %r object nor unconfigured comparator "
+                        "object associated with %s has an attribute %r"
+                        % (type(descriptor).__name__, self, attribute)
+                    ) from err2
+                else:
+                    try:
+                        return getattr(comparator, attribute)
+                    except AttributeError as err3:
+                        raise AttributeError(
+                            "Neither %r object nor %r object "
+                            "associated with %s has an attribute %r"
+                            % (
+                                type(descriptor).__name__,
+                                type(comparator).__name__,
+                                self,
+                                attribute,
+                            )
+                        ) from err3
+
+    Proxy.__name__ = type(descriptor).__name__ + "Proxy"
+
+    util.monkeypatch_proxied_specials(
+        Proxy, type(descriptor), name="descriptor", from_instance=descriptor
+    )
+    return Proxy
+
+
+OP_REMOVE = util.symbol("REMOVE")
+OP_APPEND = util.symbol("APPEND")
+OP_REPLACE = util.symbol("REPLACE")
+OP_BULK_REPLACE = util.symbol("BULK_REPLACE")
+OP_MODIFIED = util.symbol("MODIFIED")
+
+
+class AttributeEventToken:
+    """A token propagated throughout the course of a chain of attribute
+    events.
+
+    Serves as an indicator of the source of the event and also provides
+    a means of controlling propagation across a chain of attribute
+    operations.
+
+    The :class:`.Event` object is sent as the ``initiator`` argument
+    when dealing with events such as :meth:`.AttributeEvents.append`,
+    :meth:`.AttributeEvents.set`,
+    and :meth:`.AttributeEvents.remove`.
+
+    The :class:`.Event` object is currently interpreted by the backref
+    event handlers, and is used to control the propagation of operations
+    across two mutually-dependent attributes.
+
+    .. versionchanged:: 2.0  Changed the name from ``AttributeEvent``
+       to ``AttributeEventToken``.
+
+    :attribute impl: The :class:`.AttributeImpl` which is the current event
+     initiator.
+
+    :attribute op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE`,
+     :attr:`.OP_REPLACE`, or :attr:`.OP_BULK_REPLACE`, indicating the
+     source operation.
+
+    """
+
+    __slots__ = "impl", "op", "parent_token"
+
+    def __init__(self, attribute_impl: AttributeImpl, op: util.symbol):
+        self.impl = attribute_impl
+        self.op = op
+        self.parent_token = self.impl.parent_token
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, AttributeEventToken)
+            and other.impl is self.impl
+            and other.op == self.op
+        )
+
+    @property
+    def key(self):
+        return self.impl.key
+
+    def hasparent(self, state):
+        return self.impl.hasparent(state)
+
+
+AttributeEvent = AttributeEventToken  # legacy
+Event = AttributeEventToken  # legacy
+
+
+class AttributeImpl:
+    """internal implementation for instrumented attributes."""
+
+    collection: bool
+    default_accepts_scalar_loader: bool
+    uses_objects: bool
+    supports_population: bool
+    dynamic: bool
+
+    _is_has_collection_adapter = False
+
+    _replace_token: AttributeEventToken
+    _remove_token: AttributeEventToken
+    _append_token: AttributeEventToken
+
+    def __init__(
+        self,
+        class_: _ExternalEntityType[_O],
+        key: str,
+        callable_: Optional[_LoaderCallable],
+        dispatch: _Dispatch[QueryableAttribute[Any]],
+        trackparent: bool = False,
+        compare_function: Optional[Callable[..., bool]] = None,
+        active_history: bool = False,
+        parent_token: Optional[AttributeEventToken] = None,
+        load_on_unexpire: bool = True,
+        send_modified_events: bool = True,
+        accepts_scalar_loader: Optional[bool] = None,
+        **kwargs: Any,
+    ):
+        r"""Construct an AttributeImpl.
+
+        :param \class_: associated class
+
+        :param key: string name of the attribute
+
+        :param \callable_:
+          optional function which generates a callable based on a parent
+          instance, which produces the "default" values for a scalar or
+          collection attribute when it's first accessed, if not present
+          already.
+
+        :param trackparent:
+          if True, attempt to track if an instance has a parent attached
+          to it via this attribute.
+
+        :param compare_function:
+          a function that compares two values which are normally
+          assignable to this attribute.
+
+        :param active_history:
+          indicates that get_history() should always return the "old" value,
+          even if it means executing a lazy callable upon attribute change.
+
+        :param parent_token:
+          Usually references the MapperProperty, used as a key for
+          the hasparent() function to identify an "owning" attribute.
+          Allows multiple AttributeImpls to all match a single
+          owner attribute.
+
+        :param load_on_unexpire:
+          if False, don't include this attribute in a load-on-expired
+          operation, i.e. the "expired_attribute_loader" process.
+          The attribute can still be in the "expired" list and be
+          considered to be "expired".   Previously, this flag was called
+          "expire_missing" and is only used by a deferred column
+          attribute.
+
+        :param send_modified_events:
+          if False, the InstanceState._modified_event method will have no
+          effect; this means the attribute will never show up as changed in a
+          history entry.
+
+        """
+        self.class_ = class_
+        self.key = key
+        self.callable_ = callable_
+        self.dispatch = dispatch
+        self.trackparent = trackparent
+        self.parent_token = parent_token or self
+        self.send_modified_events = send_modified_events
+        if compare_function is None:
+            self.is_equal = operator.eq
+        else:
+            self.is_equal = compare_function
+
+        if accepts_scalar_loader is not None:
+            self.accepts_scalar_loader = accepts_scalar_loader
+        else:
+            self.accepts_scalar_loader = self.default_accepts_scalar_loader
+
+        _deferred_history = kwargs.pop("_deferred_history", False)
+        self._deferred_history = _deferred_history
+
+        if active_history:
+            self.dispatch._active_history = True
+
+        self.load_on_unexpire = load_on_unexpire
+        self._modified_token = AttributeEventToken(self, OP_MODIFIED)
+
+    __slots__ = (
+        "class_",
+        "key",
+        "callable_",
+        "dispatch",
+        "trackparent",
+        "parent_token",
+        "send_modified_events",
+        "is_equal",
+        "load_on_unexpire",
+        "_modified_token",
+        "accepts_scalar_loader",
+        "_deferred_history",
+    )
+
+    def __str__(self) -> str:
+        return f"{self.class_.__name__}.{self.key}"
+
+    def _get_active_history(self):
+        """Backwards compat for impl.active_history"""
+
+        return self.dispatch._active_history
+
+    def _set_active_history(self, value):
+        self.dispatch._active_history = value
+
+    active_history = property(_get_active_history, _set_active_history)
+
+    def hasparent(
+        self, state: InstanceState[Any], optimistic: bool = False
+    ) -> bool:
+        """Return the boolean value of a `hasparent` flag attached to
+        the given state.
+
+        The `optimistic` flag determines what the default return value
+        should be if no `hasparent` flag can be located.
+
+        As this function is used to determine if an instance is an
+        *orphan*, instances that were loaded from storage should be
+        assumed to not be orphans, until a True/False value for this
+        flag is set.
+
+        An instance attribute that is loaded by a callable function
+        will also not have a `hasparent` flag.
+
+        """
+        msg = "This AttributeImpl is not configured to track parents."
+        assert self.trackparent, msg
+
+        return (
+            state.parents.get(id(self.parent_token), optimistic) is not False
+        )
+
+    def sethasparent(
+        self,
+        state: InstanceState[Any],
+        parent_state: InstanceState[Any],
+        value: bool,
+    ) -> None:
+        """Set a boolean flag on the given item corresponding to
+        whether or not it is attached to a parent object via the
+        attribute represented by this ``InstrumentedAttribute``.
+
+        """
+        msg = "This AttributeImpl is not configured to track parents."
+        assert self.trackparent, msg
+
+        id_ = id(self.parent_token)
+        if value:
+            state.parents[id_] = parent_state
+        else:
+            if id_ in state.parents:
+                last_parent = state.parents[id_]
+
+                if (
+                    last_parent is not False
+                    and last_parent.key != parent_state.key
+                ):
+                    if last_parent.obj() is None:
+                        raise orm_exc.StaleDataError(
+                            "Removing state %s from parent "
+                            "state %s along attribute '%s', "
+                            "but the parent record "
+                            "has gone stale, can't be sure this "
+                            "is the most recent parent."
+                            % (
+                                state_str(state),
+                                state_str(parent_state),
+                                self.key,
+                            )
+                        )
+
+                    return
+
+            state.parents[id_] = False
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> History:
+        raise NotImplementedError()
+
+    def get_all_pending(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_NO_INITIALIZE,
+    ) -> _AllPendingType:
+        """Return a list of tuples of (state, obj)
+        for all objects in this attribute's current state
+        + history.
+
+        Only applies to object-based attributes.
+
+        This is an inlining of existing functionality
+        which roughly corresponds to:
+
+            get_state_history(
+                        state,
+                        key,
+                        passive=PASSIVE_NO_INITIALIZE).sum()
+
+        """
+        raise NotImplementedError()
+
+    def _default_value(
+        self, state: InstanceState[Any], dict_: _InstanceDict
+    ) -> Any:
+        """Produce an empty value for an uninitialized scalar attribute."""
+
+        assert self.key not in dict_, (
+            "_default_value should only be invoked for an "
+            "uninitialized or expired attribute"
+        )
+
+        value = None
+        for fn in self.dispatch.init_scalar:
+            ret = fn(state, value, dict_)
+            if ret is not ATTR_EMPTY:
+                value = ret
+
+        return value
+
+    def get(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> Any:
+        """Retrieve a value from the given object.
+        If a callable is assembled on this object's attribute, and
+        passive is False, the callable will be executed and the
+        resulting value will be set as the new value for this attribute.
+        """
+        if self.key in dict_:
+            return dict_[self.key]
+        else:
+            # if history present, don't load
+            key = self.key
+            if (
+                key not in state.committed_state
+                or state.committed_state[key] is NO_VALUE
+            ):
+                if not passive & CALLABLES_OK:
+                    return PASSIVE_NO_RESULT
+
+                value = self._fire_loader_callables(state, key, passive)
+
+                if value is PASSIVE_NO_RESULT or value is NO_VALUE:
+                    return value
+                elif value is ATTR_WAS_SET:
+                    try:
+                        return dict_[key]
+                    except KeyError as err:
+                        # TODO: no test coverage here.
+                        raise KeyError(
+                            "Deferred loader for attribute "
+                            "%r failed to populate "
+                            "correctly" % key
+                        ) from err
+                elif value is not ATTR_EMPTY:
+                    return self.set_committed_value(state, dict_, value)
+
+            if not passive & INIT_OK:
+                return NO_VALUE
+            else:
+                return self._default_value(state, dict_)
+
+    def _fire_loader_callables(
+        self, state: InstanceState[Any], key: str, passive: PassiveFlag
+    ) -> Any:
+        if (
+            self.accepts_scalar_loader
+            and self.load_on_unexpire
+            and key in state.expired_attributes
+        ):
+            return state._load_expired(state, passive)
+        elif key in state.callables:
+            callable_ = state.callables[key]
+            return callable_(state, passive)
+        elif self.callable_:
+            return self.callable_(state, passive)
+        else:
+            return ATTR_EMPTY
+
+    def append(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> None:
+        self.set(state, dict_, value, initiator, passive=passive)
+
+    def remove(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> None:
+        self.set(
+            state, dict_, None, initiator, passive=passive, check_old=value
+        )
+
+    def pop(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> None:
+        self.set(
+            state,
+            dict_,
+            None,
+            initiator,
+            passive=passive,
+            check_old=value,
+            pop=True,
+        )
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PASSIVE_OFF,
+        check_old: Any = None,
+        pop: bool = False,
+    ) -> None:
+        raise NotImplementedError()
+
+    def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None:
+        raise NotImplementedError()
+
+    def get_committed_value(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> Any:
+        """return the unchanged value of this attribute"""
+
+        if self.key in state.committed_state:
+            value = state.committed_state[self.key]
+            if value is NO_VALUE:
+                return None
+            else:
+                return value
+        else:
+            return self.get(state, dict_, passive=passive)
+
+    def set_committed_value(self, state, dict_, value):
+        """set an attribute value on the given instance and 'commit' it."""
+
+        dict_[self.key] = value
+        state._commit(dict_, [self.key])
+        return value
+
+
+class ScalarAttributeImpl(AttributeImpl):
+    """represents a scalar value-holding InstrumentedAttribute."""
+
+    default_accepts_scalar_loader = True
+    uses_objects = False
+    supports_population = True
+    collection = False
+    dynamic = False
+
+    __slots__ = "_replace_token", "_append_token", "_remove_token"
+
+    def __init__(self, *arg, **kw):
+        super().__init__(*arg, **kw)
+        self._replace_token = self._append_token = AttributeEventToken(
+            self, OP_REPLACE
+        )
+        self._remove_token = AttributeEventToken(self, OP_REMOVE)
+
+    def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None:
+        if self.dispatch._active_history:
+            old = self.get(state, dict_, PASSIVE_RETURN_NO_VALUE)
+        else:
+            old = dict_.get(self.key, NO_VALUE)
+
+        if self.dispatch.remove:
+            self.fire_remove_event(state, dict_, old, self._remove_token)
+        state._modified_event(dict_, self, old)
+
+        existing = dict_.pop(self.key, NO_VALUE)
+        if (
+            existing is NO_VALUE
+            and old is NO_VALUE
+            and not state.expired
+            and self.key not in state.expired_attributes
+        ):
+            raise AttributeError("%s object does not have a value" % self)
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: Dict[str, Any],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> History:
+        if self.key in dict_:
+            return History.from_scalar_attribute(self, state, dict_[self.key])
+        elif self.key in state.committed_state:
+            return History.from_scalar_attribute(self, state, NO_VALUE)
+        else:
+            if passive & INIT_OK:
+                passive ^= INIT_OK
+            current = self.get(state, dict_, passive=passive)
+            if current is PASSIVE_NO_RESULT:
+                return HISTORY_BLANK
+            else:
+                return History.from_scalar_attribute(self, state, current)
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: Dict[str, Any],
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PASSIVE_OFF,
+        check_old: Optional[object] = None,
+        pop: bool = False,
+    ) -> None:
+        if self.dispatch._active_history:
+            old = self.get(state, dict_, PASSIVE_RETURN_NO_VALUE)
+        else:
+            old = dict_.get(self.key, NO_VALUE)
+
+        if self.dispatch.set:
+            value = self.fire_replace_event(
+                state, dict_, value, old, initiator
+            )
+        state._modified_event(dict_, self, old)
+        dict_[self.key] = value
+
+    def fire_replace_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: _T,
+        previous: Any,
+        initiator: Optional[AttributeEventToken],
+    ) -> _T:
+        for fn in self.dispatch.set:
+            value = fn(
+                state, value, previous, initiator or self._replace_token
+            )
+        return value
+
+    def fire_remove_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+    ) -> None:
+        for fn in self.dispatch.remove:
+            fn(state, value, initiator or self._remove_token)
+
+
+class ScalarObjectAttributeImpl(ScalarAttributeImpl):
+    """represents a scalar-holding InstrumentedAttribute,
+    where the target object is also instrumented.
+
+    Adds events to delete/set operations.
+
+    """
+
+    default_accepts_scalar_loader = False
+    uses_objects = True
+    supports_population = True
+    collection = False
+
+    __slots__ = ()
+
+    def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None:
+        if self.dispatch._active_history:
+            old = self.get(
+                state,
+                dict_,
+                passive=PASSIVE_ONLY_PERSISTENT
+                | NO_AUTOFLUSH
+                | LOAD_AGAINST_COMMITTED,
+            )
+        else:
+            old = self.get(
+                state,
+                dict_,
+                passive=PASSIVE_NO_FETCH ^ INIT_OK
+                | LOAD_AGAINST_COMMITTED
+                | NO_RAISE,
+            )
+
+        self.fire_remove_event(state, dict_, old, self._remove_token)
+
+        existing = dict_.pop(self.key, NO_VALUE)
+
+        # if the attribute is expired, we currently have no way to tell
+        # that an object-attribute was expired vs. not loaded.   So
+        # for this test, we look to see if the object has a DB identity.
+        if (
+            existing is NO_VALUE
+            and old is not PASSIVE_NO_RESULT
+            and state.key is None
+        ):
+            raise AttributeError("%s object does not have a value" % self)
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> History:
+        if self.key in dict_:
+            current = dict_[self.key]
+        else:
+            if passive & INIT_OK:
+                passive ^= INIT_OK
+            current = self.get(state, dict_, passive=passive)
+            if current is PASSIVE_NO_RESULT:
+                return HISTORY_BLANK
+
+        if not self._deferred_history:
+            return History.from_object_attribute(self, state, current)
+        else:
+            original = state.committed_state.get(self.key, _NO_HISTORY)
+            if original is PASSIVE_NO_RESULT:
+                loader_passive = passive | (
+                    PASSIVE_ONLY_PERSISTENT
+                    | NO_AUTOFLUSH
+                    | LOAD_AGAINST_COMMITTED
+                    | NO_RAISE
+                    | DEFERRED_HISTORY_LOAD
+                )
+                original = self._fire_loader_callables(
+                    state, self.key, loader_passive
+                )
+            return History.from_object_attribute(
+                self, state, current, original=original
+            )
+
+    def get_all_pending(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_NO_INITIALIZE,
+    ) -> _AllPendingType:
+        if self.key in dict_:
+            current = dict_[self.key]
+        elif passive & CALLABLES_OK:
+            current = self.get(state, dict_, passive=passive)
+        else:
+            return []
+
+        ret: _AllPendingType
+
+        # can't use __hash__(), can't use __eq__() here
+        if (
+            current is not None
+            and current is not PASSIVE_NO_RESULT
+            and current is not NO_VALUE
+        ):
+            ret = [(instance_state(current), current)]
+        else:
+            ret = [(None, None)]
+
+        if self.key in state.committed_state:
+            original = state.committed_state[self.key]
+            if (
+                original is not None
+                and original is not PASSIVE_NO_RESULT
+                and original is not NO_VALUE
+                and original is not current
+            ):
+                ret.append((instance_state(original), original))
+        return ret
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PASSIVE_OFF,
+        check_old: Any = None,
+        pop: bool = False,
+    ) -> None:
+        """Set a value on the given InstanceState."""
+
+        if self.dispatch._active_history:
+            old = self.get(
+                state,
+                dict_,
+                passive=PASSIVE_ONLY_PERSISTENT
+                | NO_AUTOFLUSH
+                | LOAD_AGAINST_COMMITTED,
+            )
+        else:
+            old = self.get(
+                state,
+                dict_,
+                passive=PASSIVE_NO_FETCH ^ INIT_OK
+                | LOAD_AGAINST_COMMITTED
+                | NO_RAISE,
+            )
+
+        if (
+            check_old is not None
+            and old is not PASSIVE_NO_RESULT
+            and check_old is not old
+        ):
+            if pop:
+                return
+            else:
+                raise ValueError(
+                    "Object %s not associated with %s on attribute '%s'"
+                    % (instance_str(check_old), state_str(state), self.key)
+                )
+
+        value = self.fire_replace_event(state, dict_, value, old, initiator)
+        dict_[self.key] = value
+
+    def fire_remove_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+    ) -> None:
+        if self.trackparent and value not in (
+            None,
+            PASSIVE_NO_RESULT,
+            NO_VALUE,
+        ):
+            self.sethasparent(instance_state(value), state, False)
+
+        for fn in self.dispatch.remove:
+            fn(state, value, initiator or self._remove_token)
+
+        state._modified_event(dict_, self, value)
+
+    def fire_replace_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: _T,
+        previous: Any,
+        initiator: Optional[AttributeEventToken],
+    ) -> _T:
+        if self.trackparent:
+            if previous is not value and previous not in (
+                None,
+                PASSIVE_NO_RESULT,
+                NO_VALUE,
+            ):
+                self.sethasparent(instance_state(previous), state, False)
+
+        for fn in self.dispatch.set:
+            value = fn(
+                state, value, previous, initiator or self._replace_token
+            )
+
+        state._modified_event(dict_, self, previous)
+
+        if self.trackparent:
+            if value is not None:
+                self.sethasparent(instance_state(value), state, True)
+
+        return value
+
+
+class HasCollectionAdapter:
+    __slots__ = ()
+
+    collection: bool
+    _is_has_collection_adapter = True
+
+    def _dispose_previous_collection(
+        self,
+        state: InstanceState[Any],
+        collection: _AdaptedCollectionProtocol,
+        adapter: CollectionAdapter,
+        fire_event: bool,
+    ) -> None:
+        raise NotImplementedError()
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Literal[None] = ...,
+        passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
+    ) -> CollectionAdapter: ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: _AdaptedCollectionProtocol = ...,
+        passive: PassiveFlag = ...,
+    ) -> CollectionAdapter: ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = ...,
+        passive: PassiveFlag = ...,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]: ...
+
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]:
+        raise NotImplementedError()
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+        check_old: Any = None,
+        pop: bool = False,
+        _adapt: bool = True,
+    ) -> None:
+        raise NotImplementedError()
+
+
+if TYPE_CHECKING:
+
+    def _is_collection_attribute_impl(
+        impl: AttributeImpl,
+    ) -> TypeGuard[CollectionAttributeImpl]: ...
+
+else:
+    _is_collection_attribute_impl = operator.attrgetter("collection")
+
+
+class CollectionAttributeImpl(HasCollectionAdapter, AttributeImpl):
+    """A collection-holding attribute that instruments changes in membership.
+
+    Only handles collections of instrumented objects.
+
+    InstrumentedCollectionAttribute holds an arbitrary, user-specified
+    container object (defaulting to a list) and brokers access to the
+    CollectionAdapter, a "view" onto that object that presents consistent bag
+    semantics to the orm layer independent of the user data implementation.
+
+    """
+
+    uses_objects = True
+    collection = True
+    default_accepts_scalar_loader = False
+    supports_population = True
+    dynamic = False
+
+    _bulk_replace_token: AttributeEventToken
+
+    __slots__ = (
+        "copy",
+        "collection_factory",
+        "_append_token",
+        "_remove_token",
+        "_bulk_replace_token",
+        "_duck_typed_as",
+    )
+
+    def __init__(
+        self,
+        class_,
+        key,
+        callable_,
+        dispatch,
+        typecallable=None,
+        trackparent=False,
+        copy_function=None,
+        compare_function=None,
+        **kwargs,
+    ):
+        super().__init__(
+            class_,
+            key,
+            callable_,
+            dispatch,
+            trackparent=trackparent,
+            compare_function=compare_function,
+            **kwargs,
+        )
+
+        if copy_function is None:
+            copy_function = self.__copy
+        self.copy = copy_function
+        self.collection_factory = typecallable
+        self._append_token = AttributeEventToken(self, OP_APPEND)
+        self._remove_token = AttributeEventToken(self, OP_REMOVE)
+        self._bulk_replace_token = AttributeEventToken(self, OP_BULK_REPLACE)
+        self._duck_typed_as = util.duck_type_collection(
+            self.collection_factory()
+        )
+
+        if getattr(self.collection_factory, "_sa_linker", None):
+
+            @event.listens_for(self, "init_collection")
+            def link(target, collection, collection_adapter):
+                collection._sa_linker(collection_adapter)
+
+            @event.listens_for(self, "dispose_collection")
+            def unlink(target, collection, collection_adapter):
+                collection._sa_linker(None)
+
+    def __copy(self, item):
+        return [y for y in collections.collection_adapter(item)]
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> History:
+        current = self.get(state, dict_, passive=passive)
+
+        if current is PASSIVE_NO_RESULT:
+            if (
+                passive & PassiveFlag.INCLUDE_PENDING_MUTATIONS
+                and self.key in state._pending_mutations
+            ):
+                pending = state._pending_mutations[self.key]
+                return pending.merge_with_history(HISTORY_BLANK)
+            else:
+                return HISTORY_BLANK
+        else:
+            if passive & PassiveFlag.INCLUDE_PENDING_MUTATIONS:
+                # this collection is loaded / present.  should not be any
+                # pending mutations
+                assert self.key not in state._pending_mutations
+
+            return History.from_collection(self, state, current)
+
+    def get_all_pending(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PASSIVE_NO_INITIALIZE,
+    ) -> _AllPendingType:
+        # NOTE: passive is ignored here at the moment
+
+        if self.key not in dict_:
+            return []
+
+        current = dict_[self.key]
+        current = getattr(current, "_sa_adapter")
+
+        if self.key in state.committed_state:
+            original = state.committed_state[self.key]
+            if original is not NO_VALUE:
+                current_states = [
+                    ((c is not None) and instance_state(c) or None, c)
+                    for c in current
+                ]
+                original_states = [
+                    ((c is not None) and instance_state(c) or None, c)
+                    for c in original
+                ]
+
+                current_set = dict(current_states)
+                original_set = dict(original_states)
+
+                return (
+                    [
+                        (s, o)
+                        for s, o in current_states
+                        if s not in original_set
+                    ]
+                    + [(s, o) for s, o in current_states if s in original_set]
+                    + [
+                        (s, o)
+                        for s, o in original_states
+                        if s not in current_set
+                    ]
+                )
+
+        return [(instance_state(o), o) for o in current]
+
+    def fire_append_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: _T,
+        initiator: Optional[AttributeEventToken],
+        key: Optional[Any],
+    ) -> _T:
+        for fn in self.dispatch.append:
+            value = fn(state, value, initiator or self._append_token, key=key)
+
+        state._modified_event(dict_, self, NO_VALUE, True)
+
+        if self.trackparent and value is not None:
+            self.sethasparent(instance_state(value), state, True)
+
+        return value
+
+    def fire_append_wo_mutation_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: _T,
+        initiator: Optional[AttributeEventToken],
+        key: Optional[Any],
+    ) -> _T:
+        for fn in self.dispatch.append_wo_mutation:
+            value = fn(state, value, initiator or self._append_token, key=key)
+
+        return value
+
+    def fire_pre_remove_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        initiator: Optional[AttributeEventToken],
+        key: Optional[Any],
+    ) -> None:
+        """A special event used for pop() operations.
+
+        The "remove" event needs to have the item to be removed passed to
+        it, which in the case of pop from a set, we don't have a way to access
+        the item before the operation.   the event is used for all pop()
+        operations (even though set.pop is the one where it is really needed).
+
+        """
+        state._modified_event(dict_, self, NO_VALUE, True)
+
+    def fire_remove_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        key: Optional[Any],
+    ) -> None:
+        if self.trackparent and value is not None:
+            self.sethasparent(instance_state(value), state, False)
+
+        for fn in self.dispatch.remove:
+            fn(state, value, initiator or self._remove_token, key=key)
+
+        state._modified_event(dict_, self, NO_VALUE, True)
+
+    def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None:
+        if self.key not in dict_:
+            return
+
+        state._modified_event(dict_, self, NO_VALUE, True)
+
+        collection = self.get_collection(state, state.dict)
+        collection.clear_with_event()
+
+        # key is always present because we checked above.  e.g.
+        # del is a no-op if collection not present.
+        del dict_[self.key]
+
+    def _default_value(
+        self, state: InstanceState[Any], dict_: _InstanceDict
+    ) -> _AdaptedCollectionProtocol:
+        """Produce an empty collection for an un-initialized attribute"""
+
+        assert self.key not in dict_, (
+            "_default_value should only be invoked for an "
+            "uninitialized or expired attribute"
+        )
+
+        if self.key in state._empty_collections:
+            return state._empty_collections[self.key]
+
+        adapter, user_data = self._initialize_collection(state)
+        adapter._set_empty(user_data)
+        return user_data
+
+    def _initialize_collection(
+        self, state: InstanceState[Any]
+    ) -> Tuple[CollectionAdapter, _AdaptedCollectionProtocol]:
+        adapter, collection = state.manager.initialize_collection(
+            self.key, state, self.collection_factory
+        )
+
+        self.dispatch.init_collection(state, collection, adapter)
+
+        return adapter, collection
+
+    def append(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> None:
+        collection = self.get_collection(
+            state, dict_, user_data=None, passive=passive
+        )
+        if collection is PASSIVE_NO_RESULT:
+            value = self.fire_append_event(
+                state, dict_, value, initiator, key=NO_KEY
+            )
+            assert (
+                self.key not in dict_
+            ), "Collection was loaded during event handling."
+            state._get_pending_mutation(self.key).append(value)
+        else:
+            if TYPE_CHECKING:
+                assert isinstance(collection, CollectionAdapter)
+            collection.append_with_event(value, initiator)
+
+    def remove(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> None:
+        collection = self.get_collection(
+            state, state.dict, user_data=None, passive=passive
+        )
+        if collection is PASSIVE_NO_RESULT:
+            self.fire_remove_event(state, dict_, value, initiator, key=NO_KEY)
+            assert (
+                self.key not in dict_
+            ), "Collection was loaded during event handling."
+            state._get_pending_mutation(self.key).remove(value)
+        else:
+            if TYPE_CHECKING:
+                assert isinstance(collection, CollectionAdapter)
+            collection.remove_with_event(value, initiator)
+
+    def pop(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> None:
+        try:
+            # TODO: better solution here would be to add
+            # a "popper" role to collections.py to complement
+            # "remover".
+            self.remove(state, dict_, value, initiator, passive=passive)
+        except (ValueError, KeyError, IndexError):
+            pass
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+        check_old: Any = None,
+        pop: bool = False,
+        _adapt: bool = True,
+    ) -> None:
+        iterable = orig_iterable = value
+        new_keys = None
+
+        # pulling a new collection first so that an adaptation exception does
+        # not trigger a lazy load of the old collection.
+        new_collection, user_data = self._initialize_collection(state)
+        if _adapt:
+            if new_collection._converter is not None:
+                iterable = new_collection._converter(iterable)
+            else:
+                setting_type = util.duck_type_collection(iterable)
+                receiving_type = self._duck_typed_as
+
+                if setting_type is not receiving_type:
+                    given = (
+                        iterable is None
+                        and "None"
+                        or iterable.__class__.__name__
+                    )
+                    wanted = self._duck_typed_as.__name__
+                    raise TypeError(
+                        "Incompatible collection type: %s is not %s-like"
+                        % (given, wanted)
+                    )
+
+                # If the object is an adapted collection, return the (iterable)
+                # adapter.
+                if hasattr(iterable, "_sa_iterator"):
+                    iterable = iterable._sa_iterator()
+                elif setting_type is dict:
+                    new_keys = list(iterable)
+                    iterable = iterable.values()
+                else:
+                    iterable = iter(iterable)
+        elif util.duck_type_collection(iterable) is dict:
+            new_keys = list(value)
+
+        new_values = list(iterable)
+
+        evt = self._bulk_replace_token
+
+        self.dispatch.bulk_replace(state, new_values, evt, keys=new_keys)
+
+        # propagate NO_RAISE in passive through to the get() for the
+        # existing object (ticket #8862)
+        old = self.get(
+            state,
+            dict_,
+            passive=PASSIVE_ONLY_PERSISTENT ^ (passive & PassiveFlag.NO_RAISE),
+        )
+        if old is PASSIVE_NO_RESULT:
+            old = self._default_value(state, dict_)
+        elif old is orig_iterable:
+            # ignore re-assignment of the current collection, as happens
+            # implicitly with in-place operators (foo.collection |= other)
+            return
+
+        # place a copy of "old" in state.committed_state
+        state._modified_event(dict_, self, old, True)
+
+        old_collection = old._sa_adapter
+
+        dict_[self.key] = user_data
+
+        collections.bulk_replace(
+            new_values, old_collection, new_collection, initiator=evt
+        )
+
+        self._dispose_previous_collection(state, old, old_collection, True)
+
+    def _dispose_previous_collection(
+        self,
+        state: InstanceState[Any],
+        collection: _AdaptedCollectionProtocol,
+        adapter: CollectionAdapter,
+        fire_event: bool,
+    ) -> None:
+        del collection._sa_adapter
+
+        # discarding old collection make sure it is not referenced in empty
+        # collections.
+        state._empty_collections.pop(self.key, None)
+        if fire_event:
+            self.dispatch.dispose_collection(state, collection, adapter)
+
+    def _invalidate_collection(
+        self, collection: _AdaptedCollectionProtocol
+    ) -> None:
+        adapter = getattr(collection, "_sa_adapter")
+        adapter.invalidated = True
+
+    def set_committed_value(
+        self, state: InstanceState[Any], dict_: _InstanceDict, value: Any
+    ) -> _AdaptedCollectionProtocol:
+        """Set an attribute value on the given instance and 'commit' it."""
+
+        collection, user_data = self._initialize_collection(state)
+
+        if value:
+            collection.append_multiple_without_event(value)
+
+        state.dict[self.key] = user_data
+
+        state._commit(dict_, [self.key])
+
+        if self.key in state._pending_mutations:
+            # pending items exist.  issue a modified event,
+            # add/remove new items.
+            state._modified_event(dict_, self, user_data, True)
+
+            pending = state._pending_mutations.pop(self.key)
+            added = pending.added_items
+            removed = pending.deleted_items
+            for item in added:
+                collection.append_without_event(item)
+            for item in removed:
+                collection.remove_without_event(item)
+
+        return user_data
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Literal[None] = ...,
+        passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
+    ) -> CollectionAdapter: ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: _AdaptedCollectionProtocol = ...,
+        passive: PassiveFlag = ...,
+    ) -> CollectionAdapter: ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = ...,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]: ...
+
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = None,
+        passive: PassiveFlag = PASSIVE_OFF,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]:
+        """Retrieve the CollectionAdapter associated with the given state.
+
+        if user_data is None, retrieves it from the state using normal
+        "get()" rules, which will fire lazy callables or return the "empty"
+        collection value.
+
+        """
+        if user_data is None:
+            fetch_user_data = self.get(state, dict_, passive=passive)
+            if fetch_user_data is LoaderCallableStatus.PASSIVE_NO_RESULT:
+                return fetch_user_data
+            else:
+                user_data = cast("_AdaptedCollectionProtocol", fetch_user_data)
+
+        return user_data._sa_adapter
+
+
+def backref_listeners(
+    attribute: QueryableAttribute[Any], key: str, uselist: bool
+) -> None:
+    """Apply listeners to synchronize a two-way relationship."""
+
+    # use easily recognizable names for stack traces.
+
+    # in the sections marked "tokens to test for a recursive loop",
+    # this is somewhat brittle and very performance-sensitive logic
+    # that is specific to how we might arrive at each event.  a marker
+    # that can target us directly to arguments being invoked against
+    # the impl might be simpler, but could interfere with other systems.
+
+    parent_token = attribute.impl.parent_token
+    parent_impl = attribute.impl
+
+    def _acceptable_key_err(child_state, initiator, child_impl):
+        raise ValueError(
+            "Bidirectional attribute conflict detected: "
+            'Passing object %s to attribute "%s" '
+            'triggers a modify event on attribute "%s" '
+            'via the backref "%s".'
+            % (
+                state_str(child_state),
+                initiator.parent_token,
+                child_impl.parent_token,
+                attribute.impl.parent_token,
+            )
+        )
+
+    def emit_backref_from_scalar_set_event(
+        state, child, oldchild, initiator, **kw
+    ):
+        if oldchild is child:
+            return child
+        if (
+            oldchild is not None
+            and oldchild is not PASSIVE_NO_RESULT
+            and oldchild is not NO_VALUE
+        ):
+            # With lazy=None, there's no guarantee that the full collection is
+            # present when updating via a backref.
+            old_state, old_dict = (
+                instance_state(oldchild),
+                instance_dict(oldchild),
+            )
+            impl = old_state.manager[key].impl
+
+            # tokens to test for a recursive loop.
+            if not impl.collection and not impl.dynamic:
+                check_recursive_token = impl._replace_token
+            else:
+                check_recursive_token = impl._remove_token
+
+            if initiator is not check_recursive_token:
+                impl.pop(
+                    old_state,
+                    old_dict,
+                    state.obj(),
+                    parent_impl._append_token,
+                    passive=PASSIVE_NO_FETCH,
+                )
+
+        if child is not None:
+            child_state, child_dict = (
+                instance_state(child),
+                instance_dict(child),
+            )
+            child_impl = child_state.manager[key].impl
+
+            if (
+                initiator.parent_token is not parent_token
+                and initiator.parent_token is not child_impl.parent_token
+            ):
+                _acceptable_key_err(state, initiator, child_impl)
+
+            # tokens to test for a recursive loop.
+            check_append_token = child_impl._append_token
+            check_bulk_replace_token = (
+                child_impl._bulk_replace_token
+                if _is_collection_attribute_impl(child_impl)
+                else None
+            )
+
+            if (
+                initiator is not check_append_token
+                and initiator is not check_bulk_replace_token
+            ):
+                child_impl.append(
+                    child_state,
+                    child_dict,
+                    state.obj(),
+                    initiator,
+                    passive=PASSIVE_NO_FETCH,
+                )
+        return child
+
+    def emit_backref_from_collection_append_event(
+        state, child, initiator, **kw
+    ):
+        if child is None:
+            return
+
+        child_state, child_dict = instance_state(child), instance_dict(child)
+        child_impl = child_state.manager[key].impl
+
+        if (
+            initiator.parent_token is not parent_token
+            and initiator.parent_token is not child_impl.parent_token
+        ):
+            _acceptable_key_err(state, initiator, child_impl)
+
+        # tokens to test for a recursive loop.
+        check_append_token = child_impl._append_token
+        check_bulk_replace_token = (
+            child_impl._bulk_replace_token
+            if _is_collection_attribute_impl(child_impl)
+            else None
+        )
+
+        if (
+            initiator is not check_append_token
+            and initiator is not check_bulk_replace_token
+        ):
+            child_impl.append(
+                child_state,
+                child_dict,
+                state.obj(),
+                initiator,
+                passive=PASSIVE_NO_FETCH,
+            )
+        return child
+
+    def emit_backref_from_collection_remove_event(
+        state, child, initiator, **kw
+    ):
+        if (
+            child is not None
+            and child is not PASSIVE_NO_RESULT
+            and child is not NO_VALUE
+        ):
+            child_state, child_dict = (
+                instance_state(child),
+                instance_dict(child),
+            )
+            child_impl = child_state.manager[key].impl
+
+            check_replace_token: Optional[AttributeEventToken]
+
+            # tokens to test for a recursive loop.
+            if not child_impl.collection and not child_impl.dynamic:
+                check_remove_token = child_impl._remove_token
+                check_replace_token = child_impl._replace_token
+                check_for_dupes_on_remove = uselist and not parent_impl.dynamic
+            else:
+                check_remove_token = child_impl._remove_token
+                check_replace_token = (
+                    child_impl._bulk_replace_token
+                    if _is_collection_attribute_impl(child_impl)
+                    else None
+                )
+                check_for_dupes_on_remove = False
+
+            if (
+                initiator is not check_remove_token
+                and initiator is not check_replace_token
+            ):
+                if not check_for_dupes_on_remove or not util.has_dupes(
+                    # when this event is called, the item is usually
+                    # present in the list, except for a pop() operation.
+                    state.dict[parent_impl.key],
+                    child,
+                ):
+                    child_impl.pop(
+                        child_state,
+                        child_dict,
+                        state.obj(),
+                        initiator,
+                        passive=PASSIVE_NO_FETCH,
+                    )
+
+    if uselist:
+        event.listen(
+            attribute,
+            "append",
+            emit_backref_from_collection_append_event,
+            retval=True,
+            raw=True,
+            include_key=True,
+        )
+    else:
+        event.listen(
+            attribute,
+            "set",
+            emit_backref_from_scalar_set_event,
+            retval=True,
+            raw=True,
+            include_key=True,
+        )
+    # TODO: need coverage in test/orm/ of remove event
+    event.listen(
+        attribute,
+        "remove",
+        emit_backref_from_collection_remove_event,
+        retval=True,
+        raw=True,
+        include_key=True,
+    )
+
+
+_NO_HISTORY = util.symbol("NO_HISTORY")
+_NO_STATE_SYMBOLS = frozenset([id(PASSIVE_NO_RESULT), id(NO_VALUE)])
+
+
+class History(NamedTuple):
+    """A 3-tuple of added, unchanged and deleted values,
+    representing the changes which have occurred on an instrumented
+    attribute.
+
+    The easiest way to get a :class:`.History` object for a particular
+    attribute on an object is to use the :func:`_sa.inspect` function::
+
+        from sqlalchemy import inspect
+
+        hist = inspect(myobject).attrs.myattribute.history
+
+    Each tuple member is an iterable sequence:
+
+    * ``added`` - the collection of items added to the attribute (the first
+      tuple element).
+
+    * ``unchanged`` - the collection of items that have not changed on the
+      attribute (the second tuple element).
+
+    * ``deleted`` - the collection of items that have been removed from the
+      attribute (the third tuple element).
+
+    """
+
+    added: Union[Tuple[()], List[Any]]
+    unchanged: Union[Tuple[()], List[Any]]
+    deleted: Union[Tuple[()], List[Any]]
+
+    def __bool__(self) -> bool:
+        return self != HISTORY_BLANK
+
+    def empty(self) -> bool:
+        """Return True if this :class:`.History` has no changes
+        and no existing, unchanged state.
+
+        """
+
+        return not bool((self.added or self.deleted) or self.unchanged)
+
+    def sum(self) -> Sequence[Any]:
+        """Return a collection of added + unchanged + deleted."""
+
+        return (
+            (self.added or []) + (self.unchanged or []) + (self.deleted or [])
+        )
+
+    def non_deleted(self) -> Sequence[Any]:
+        """Return a collection of added + unchanged."""
+
+        return (self.added or []) + (self.unchanged or [])
+
+    def non_added(self) -> Sequence[Any]:
+        """Return a collection of unchanged + deleted."""
+
+        return (self.unchanged or []) + (self.deleted or [])
+
+    def has_changes(self) -> bool:
+        """Return True if this :class:`.History` has changes."""
+
+        return bool(self.added or self.deleted)
+
+    def _merge(self, added: Iterable[Any], deleted: Iterable[Any]) -> History:
+        return History(
+            list(self.added) + list(added),
+            self.unchanged,
+            list(self.deleted) + list(deleted),
+        )
+
+    def as_state(self) -> History:
+        return History(
+            [
+                (c is not None) and instance_state(c) or None
+                for c in self.added
+            ],
+            [
+                (c is not None) and instance_state(c) or None
+                for c in self.unchanged
+            ],
+            [
+                (c is not None) and instance_state(c) or None
+                for c in self.deleted
+            ],
+        )
+
+    @classmethod
+    def from_scalar_attribute(
+        cls,
+        attribute: ScalarAttributeImpl,
+        state: InstanceState[Any],
+        current: Any,
+    ) -> History:
+        original = state.committed_state.get(attribute.key, _NO_HISTORY)
+
+        deleted: Union[Tuple[()], List[Any]]
+
+        if original is _NO_HISTORY:
+            if current is NO_VALUE:
+                return cls((), (), ())
+            else:
+                return cls((), [current], ())
+        # don't let ClauseElement expressions here trip things up
+        elif (
+            current is not NO_VALUE
+            and attribute.is_equal(current, original) is True
+        ):
+            return cls((), [current], ())
+        else:
+            # current convention on native scalars is to not
+            # include information
+            # about missing previous value in "deleted", but
+            # we do include None, which helps in some primary
+            # key situations
+            if id(original) in _NO_STATE_SYMBOLS:
+                deleted = ()
+                # indicate a "del" operation occurred when we don't have
+                # the previous value as: ([None], (), ())
+                if id(current) in _NO_STATE_SYMBOLS:
+                    current = None
+            else:
+                deleted = [original]
+            if current is NO_VALUE:
+                return cls((), (), deleted)
+            else:
+                return cls([current], (), deleted)
+
+    @classmethod
+    def from_object_attribute(
+        cls,
+        attribute: ScalarObjectAttributeImpl,
+        state: InstanceState[Any],
+        current: Any,
+        original: Any = _NO_HISTORY,
+    ) -> History:
+        deleted: Union[Tuple[()], List[Any]]
+
+        if original is _NO_HISTORY:
+            original = state.committed_state.get(attribute.key, _NO_HISTORY)
+
+        if original is _NO_HISTORY:
+            if current is NO_VALUE:
+                return cls((), (), ())
+            else:
+                return cls((), [current], ())
+        elif current is original and current is not NO_VALUE:
+            return cls((), [current], ())
+        else:
+            # current convention on related objects is to not
+            # include information
+            # about missing previous value in "deleted", and
+            # to also not include None - the dependency.py rules
+            # ignore the None in any case.
+            if id(original) in _NO_STATE_SYMBOLS or original is None:
+                deleted = ()
+                # indicate a "del" operation occurred when we don't have
+                # the previous value as: ([None], (), ())
+                if id(current) in _NO_STATE_SYMBOLS:
+                    current = None
+            else:
+                deleted = [original]
+            if current is NO_VALUE:
+                return cls((), (), deleted)
+            else:
+                return cls([current], (), deleted)
+
+    @classmethod
+    def from_collection(
+        cls,
+        attribute: CollectionAttributeImpl,
+        state: InstanceState[Any],
+        current: Any,
+    ) -> History:
+        original = state.committed_state.get(attribute.key, _NO_HISTORY)
+        if current is NO_VALUE:
+            return cls((), (), ())
+
+        current = getattr(current, "_sa_adapter")
+        if original is NO_VALUE:
+            return cls(list(current), (), ())
+        elif original is _NO_HISTORY:
+            return cls((), list(current), ())
+        else:
+            current_states = [
+                ((c is not None) and instance_state(c) or None, c)
+                for c in current
+            ]
+            original_states = [
+                ((c is not None) and instance_state(c) or None, c)
+                for c in original
+            ]
+
+            current_set = dict(current_states)
+            original_set = dict(original_states)
+
+            return cls(
+                [o for s, o in current_states if s not in original_set],
+                [o for s, o in current_states if s in original_set],
+                [o for s, o in original_states if s not in current_set],
+            )
+
+
+HISTORY_BLANK = History((), (), ())
+
+
+def get_history(
+    obj: object, key: str, passive: PassiveFlag = PASSIVE_OFF
+) -> History:
+    """Return a :class:`.History` record for the given object
+    and attribute key.
+
+    This is the **pre-flush** history for a given attribute, which is
+    reset each time the :class:`.Session` flushes changes to the
+    current database transaction.
+
+    .. note::
+
+        Prefer to use the :attr:`.AttributeState.history` and
+        :meth:`.AttributeState.load_history` accessors to retrieve the
+        :class:`.History` for instance attributes.
+
+
+    :param obj: an object whose class is instrumented by the
+      attributes package.
+
+    :param key: string attribute name.
+
+    :param passive: indicates loading behavior for the attribute
+       if the value is not already present.   This is a
+       bitflag attribute, which defaults to the symbol
+       :attr:`.PASSIVE_OFF` indicating all necessary SQL
+       should be emitted.
+
+    .. seealso::
+
+        :attr:`.AttributeState.history`
+
+        :meth:`.AttributeState.load_history` - retrieve history
+        using loader callables if the value is not locally present.
+
+    """
+
+    return get_state_history(instance_state(obj), key, passive)
+
+
+def get_state_history(
+    state: InstanceState[Any], key: str, passive: PassiveFlag = PASSIVE_OFF
+) -> History:
+    return state.get_history(key, passive)
+
+
+def has_parent(
+    cls: Type[_O], obj: _O, key: str, optimistic: bool = False
+) -> bool:
+    """TODO"""
+    manager = manager_of_class(cls)
+    state = instance_state(obj)
+    return manager.has_parent(state, key, optimistic)
+
+
+def register_attribute(
+    class_: Type[_O],
+    key: str,
+    *,
+    comparator: interfaces.PropComparator[_T],
+    parententity: _InternalEntityType[_O],
+    doc: Optional[str] = None,
+    **kw: Any,
+) -> InstrumentedAttribute[_T]:
+    desc = register_descriptor(
+        class_, key, comparator=comparator, parententity=parententity, doc=doc
+    )
+    register_attribute_impl(class_, key, **kw)
+    return desc
+
+
+def register_attribute_impl(
+    class_: Type[_O],
+    key: str,
+    uselist: bool = False,
+    callable_: Optional[_LoaderCallable] = None,
+    useobject: bool = False,
+    impl_class: Optional[Type[AttributeImpl]] = None,
+    backref: Optional[str] = None,
+    **kw: Any,
+) -> QueryableAttribute[Any]:
+    manager = manager_of_class(class_)
+    if uselist:
+        factory = kw.pop("typecallable", None)
+        typecallable = manager.instrument_collection_class(
+            key, factory or list
+        )
+    else:
+        typecallable = kw.pop("typecallable", None)
+
+    dispatch = cast(
+        "_Dispatch[QueryableAttribute[Any]]", manager[key].dispatch
+    )  # noqa: E501
+
+    impl: AttributeImpl
+
+    if impl_class:
+        # TODO: this appears to be the WriteOnlyAttributeImpl /
+        # DynamicAttributeImpl constructor which is hardcoded
+        impl = cast("Type[WriteOnlyAttributeImpl]", impl_class)(
+            class_, key, dispatch, **kw
+        )
+    elif uselist:
+        impl = CollectionAttributeImpl(
+            class_, key, callable_, dispatch, typecallable=typecallable, **kw
+        )
+    elif useobject:
+        impl = ScalarObjectAttributeImpl(
+            class_, key, callable_, dispatch, **kw
+        )
+    else:
+        impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
+
+    manager[key].impl = impl
+
+    if backref:
+        backref_listeners(manager[key], backref, uselist)
+
+    manager.post_configure_attribute(key)
+    return manager[key]
+
+
+def register_descriptor(
+    class_: Type[Any],
+    key: str,
+    *,
+    comparator: interfaces.PropComparator[_T],
+    parententity: _InternalEntityType[Any],
+    doc: Optional[str] = None,
+) -> InstrumentedAttribute[_T]:
+    manager = manager_of_class(class_)
+
+    descriptor = InstrumentedAttribute(
+        class_, key, comparator=comparator, parententity=parententity
+    )
+
+    descriptor.__doc__ = doc  # type: ignore
+
+    manager.instrument_attribute(key, descriptor)
+    return descriptor
+
+
+def unregister_attribute(class_: Type[Any], key: str) -> None:
+    manager_of_class(class_).uninstrument_attribute(key)
+
+
+def init_collection(obj: object, key: str) -> CollectionAdapter:
+    """Initialize a collection attribute and return the collection adapter.
+
+    This function is used to provide direct access to collection internals
+    for a previously unloaded attribute.  e.g.::
+
+        collection_adapter = init_collection(someobject, "elements")
+        for elem in values:
+            collection_adapter.append_without_event(elem)
+
+    For an easier way to do the above, see
+    :func:`~sqlalchemy.orm.attributes.set_committed_value`.
+
+    :param obj: a mapped object
+
+    :param key: string attribute name where the collection is located.
+
+    """
+    state = instance_state(obj)
+    dict_ = state.dict
+    return init_state_collection(state, dict_, key)
+
+
+def init_state_collection(
+    state: InstanceState[Any], dict_: _InstanceDict, key: str
+) -> CollectionAdapter:
+    """Initialize a collection attribute and return the collection adapter.
+
+    Discards any existing collection which may be there.
+
+    """
+    attr = state.manager[key].impl
+
+    if TYPE_CHECKING:
+        assert isinstance(attr, HasCollectionAdapter)
+
+    old = dict_.pop(key, None)  # discard old collection
+    if old is not None:
+        old_collection = old._sa_adapter
+        attr._dispose_previous_collection(state, old, old_collection, False)
+
+    user_data = attr._default_value(state, dict_)
+    adapter: CollectionAdapter = attr.get_collection(
+        state, dict_, user_data, passive=PassiveFlag.PASSIVE_NO_FETCH
+    )
+    adapter._reset_empty()
+
+    return adapter
+
+
+def set_committed_value(instance, key, value):
+    """Set the value of an attribute with no history events.
+
+    Cancels any previous history present.  The value should be
+    a scalar value for scalar-holding attributes, or
+    an iterable for any collection-holding attribute.
+
+    This is the same underlying method used when a lazy loader
+    fires off and loads additional data from the database.
+    In particular, this method can be used by application code
+    which has loaded additional attributes or collections through
+    separate queries, which can then be attached to an instance
+    as though it were part of its original loaded state.
+
+    """
+    state, dict_ = instance_state(instance), instance_dict(instance)
+    state.manager[key].impl.set_committed_value(state, dict_, value)
+
+
+def set_attribute(
+    instance: object,
+    key: str,
+    value: Any,
+    initiator: Optional[AttributeEventToken] = None,
+) -> None:
+    """Set the value of an attribute, firing history events.
+
+    This function may be used regardless of instrumentation
+    applied directly to the class, i.e. no descriptors are required.
+    Custom attribute management schemes will need to make usage
+    of this method to establish attribute state as understood
+    by SQLAlchemy.
+
+    :param instance: the object that will be modified
+
+    :param key: string name of the attribute
+
+    :param value: value to assign
+
+    :param initiator: an instance of :class:`.Event` that would have
+     been propagated from a previous event listener.  This argument
+     is used when the :func:`.set_attribute` function is being used within
+     an existing event listening function where an :class:`.Event` object
+     is being supplied; the object may be used to track the origin of the
+     chain of events.
+
+     .. versionadded:: 1.2.3
+
+    """
+    state, dict_ = instance_state(instance), instance_dict(instance)
+    state.manager[key].impl.set(state, dict_, value, initiator)
+
+
+def get_attribute(instance: object, key: str) -> Any:
+    """Get the value of an attribute, firing any callables required.
+
+    This function may be used regardless of instrumentation
+    applied directly to the class, i.e. no descriptors are required.
+    Custom attribute management schemes will need to make usage
+    of this method to make usage of attribute state as understood
+    by SQLAlchemy.
+
+    """
+    state, dict_ = instance_state(instance), instance_dict(instance)
+    return state.manager[key].impl.get(state, dict_)
+
+
+def del_attribute(instance: object, key: str) -> None:
+    """Delete the value of an attribute, firing history events.
+
+    This function may be used regardless of instrumentation
+    applied directly to the class, i.e. no descriptors are required.
+    Custom attribute management schemes will need to make usage
+    of this method to establish attribute state as understood
+    by SQLAlchemy.
+
+    """
+    state, dict_ = instance_state(instance), instance_dict(instance)
+    state.manager[key].impl.delete(state, dict_)
+
+
+def flag_modified(instance: object, key: str) -> None:
+    """Mark an attribute on an instance as 'modified'.
+
+    This sets the 'modified' flag on the instance and
+    establishes an unconditional change event for the given attribute.
+    The attribute must have a value present, else an
+    :class:`.InvalidRequestError` is raised.
+
+    To mark an object "dirty" without referring to any specific attribute
+    so that it is considered within a flush, use the
+    :func:`.attributes.flag_dirty` call.
+
+    .. seealso::
+
+        :func:`.attributes.flag_dirty`
+
+    """
+    state, dict_ = instance_state(instance), instance_dict(instance)
+    impl = state.manager[key].impl
+    impl.dispatch.modified(state, impl._modified_token)
+    state._modified_event(dict_, impl, NO_VALUE, is_userland=True)
+
+
+def flag_dirty(instance: object) -> None:
+    """Mark an instance as 'dirty' without any specific attribute mentioned.
+
+    This is a special operation that will allow the object to travel through
+    the flush process for interception by events such as
+    :meth:`.SessionEvents.before_flush`.   Note that no SQL will be emitted in
+    the flush process for an object that has no changes, even if marked dirty
+    via this method.  However, a :meth:`.SessionEvents.before_flush` handler
+    will be able to see the object in the :attr:`.Session.dirty` collection and
+    may establish changes on it, which will then be included in the SQL
+    emitted.
+
+    .. versionadded:: 1.2
+
+    .. seealso::
+
+        :func:`.attributes.flag_modified`
+
+    """
+
+    state, dict_ = instance_state(instance), instance_dict(instance)
+    state._modified_event(dict_, None, NO_VALUE, is_userland=True)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py
new file mode 100644
index 00000000..ae0ba102
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/base.py
@@ -0,0 +1,973 @@
+# orm/base.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Constants and rudimental functions used throughout the ORM.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import no_type_check
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import exc
+from ._typing import insp_is_mapper
+from .. import exc as sa_exc
+from .. import inspection
+from .. import util
+from ..sql import roles
+from ..sql.elements import SQLColumnExpression
+from ..sql.elements import SQLCoreOperations
+from ..util import FastIntFlag
+from ..util.langhelpers import TypingOnly
+from ..util.typing import Literal
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _ExternalEntityType
+    from ._typing import _InternalEntityType
+    from .attributes import InstrumentedAttribute
+    from .dynamic import AppenderQuery
+    from .instrumentation import ClassManager
+    from .interfaces import PropComparator
+    from .mapper import Mapper
+    from .state import InstanceState
+    from .util import AliasedClass
+    from .writeonly import WriteOnlyCollection
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _InfoType
+    from ..sql.elements import ColumnElement
+    from ..sql.operators import OperatorType
+
+_T = TypeVar("_T", bound=Any)
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+
+_O = TypeVar("_O", bound=object)
+
+
+class LoaderCallableStatus(Enum):
+    PASSIVE_NO_RESULT = 0
+    """Symbol returned by a loader callable or other attribute/history
+    retrieval operation when a value could not be determined, based
+    on loader callable flags.
+    """
+
+    PASSIVE_CLASS_MISMATCH = 1
+    """Symbol indicating that an object is locally present for a given
+    primary key identity but it is not of the requested class.  The
+    return value is therefore None and no SQL should be emitted."""
+
+    ATTR_WAS_SET = 2
+    """Symbol returned by a loader callable to indicate the
+    retrieved value, or values, were assigned to their attributes
+    on the target object.
+    """
+
+    ATTR_EMPTY = 3
+    """Symbol used internally to indicate an attribute had no callable."""
+
+    NO_VALUE = 4
+    """Symbol which may be placed as the 'previous' value of an attribute,
+    indicating no value was loaded for an attribute when it was modified,
+    and flags indicated we were not to load it.
+    """
+
+    NEVER_SET = NO_VALUE
+    """
+    Synonymous with NO_VALUE
+
+    .. versionchanged:: 1.4   NEVER_SET was merged with NO_VALUE
+
+    """
+
+
+(
+    PASSIVE_NO_RESULT,
+    PASSIVE_CLASS_MISMATCH,
+    ATTR_WAS_SET,
+    ATTR_EMPTY,
+    NO_VALUE,
+) = tuple(LoaderCallableStatus)
+
+NEVER_SET = NO_VALUE
+
+
+class PassiveFlag(FastIntFlag):
+    """Bitflag interface that passes options onto loader callables"""
+
+    NO_CHANGE = 0
+    """No callables or SQL should be emitted on attribute access
+    and no state should change
+    """
+
+    CALLABLES_OK = 1
+    """Loader callables can be fired off if a value
+    is not present.
+    """
+
+    SQL_OK = 2
+    """Loader callables can emit SQL at least on scalar value attributes."""
+
+    RELATED_OBJECT_OK = 4
+    """Callables can use SQL to load related objects as well
+    as scalar value attributes.
+    """
+
+    INIT_OK = 8
+    """Attributes should be initialized with a blank
+    value (None or an empty collection) upon get, if no other
+    value can be obtained.
+    """
+
+    NON_PERSISTENT_OK = 16
+    """Callables can be emitted if the parent is not persistent."""
+
+    LOAD_AGAINST_COMMITTED = 32
+    """Callables should use committed values as primary/foreign keys during a
+    load.
+    """
+
+    NO_AUTOFLUSH = 64
+    """Loader callables should disable autoflush."""
+
+    NO_RAISE = 128
+    """Loader callables should not raise any assertions"""
+
+    DEFERRED_HISTORY_LOAD = 256
+    """indicates special load of the previous value of an attribute"""
+
+    INCLUDE_PENDING_MUTATIONS = 512
+
+    # pre-packaged sets of flags used as inputs
+    PASSIVE_OFF = (
+        RELATED_OBJECT_OK | NON_PERSISTENT_OK | INIT_OK | CALLABLES_OK | SQL_OK
+    )
+    "Callables can be emitted in all cases."
+
+    PASSIVE_RETURN_NO_VALUE = PASSIVE_OFF ^ INIT_OK
+    """PASSIVE_OFF ^ INIT_OK"""
+
+    PASSIVE_NO_INITIALIZE = PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK
+    "PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK"
+
+    PASSIVE_NO_FETCH = PASSIVE_OFF ^ SQL_OK
+    "PASSIVE_OFF ^ SQL_OK"
+
+    PASSIVE_NO_FETCH_RELATED = PASSIVE_OFF ^ RELATED_OBJECT_OK
+    "PASSIVE_OFF ^ RELATED_OBJECT_OK"
+
+    PASSIVE_ONLY_PERSISTENT = PASSIVE_OFF ^ NON_PERSISTENT_OK
+    "PASSIVE_OFF ^ NON_PERSISTENT_OK"
+
+    PASSIVE_MERGE = PASSIVE_OFF | NO_RAISE
+    """PASSIVE_OFF | NO_RAISE
+
+    Symbol used specifically for session.merge() and similar cases
+
+    """
+
+
+(
+    NO_CHANGE,
+    CALLABLES_OK,
+    SQL_OK,
+    RELATED_OBJECT_OK,
+    INIT_OK,
+    NON_PERSISTENT_OK,
+    LOAD_AGAINST_COMMITTED,
+    NO_AUTOFLUSH,
+    NO_RAISE,
+    DEFERRED_HISTORY_LOAD,
+    INCLUDE_PENDING_MUTATIONS,
+    PASSIVE_OFF,
+    PASSIVE_RETURN_NO_VALUE,
+    PASSIVE_NO_INITIALIZE,
+    PASSIVE_NO_FETCH,
+    PASSIVE_NO_FETCH_RELATED,
+    PASSIVE_ONLY_PERSISTENT,
+    PASSIVE_MERGE,
+) = PassiveFlag.__members__.values()
+
+DEFAULT_MANAGER_ATTR = "_sa_class_manager"
+DEFAULT_STATE_ATTR = "_sa_instance_state"
+
+
+class EventConstants(Enum):
+    EXT_CONTINUE = 1
+    EXT_STOP = 2
+    EXT_SKIP = 3
+    NO_KEY = 4
+    """indicates an :class:`.AttributeEvent` event that did not have any
+    key argument.
+
+    .. versionadded:: 2.0
+
+    """
+
+
+EXT_CONTINUE, EXT_STOP, EXT_SKIP, NO_KEY = tuple(EventConstants)
+
+
+class RelationshipDirection(Enum):
+    """enumeration which indicates the 'direction' of a
+    :class:`_orm.RelationshipProperty`.
+
+    :class:`.RelationshipDirection` is accessible from the
+    :attr:`_orm.Relationship.direction` attribute of
+    :class:`_orm.RelationshipProperty`.
+
+    """
+
+    ONETOMANY = 1
+    """Indicates the one-to-many direction for a :func:`_orm.relationship`.
+
+    This symbol is typically used by the internals but may be exposed within
+    certain API features.
+
+    """
+
+    MANYTOONE = 2
+    """Indicates the many-to-one direction for a :func:`_orm.relationship`.
+
+    This symbol is typically used by the internals but may be exposed within
+    certain API features.
+
+    """
+
+    MANYTOMANY = 3
+    """Indicates the many-to-many direction for a :func:`_orm.relationship`.
+
+    This symbol is typically used by the internals but may be exposed within
+    certain API features.
+
+    """
+
+
+ONETOMANY, MANYTOONE, MANYTOMANY = tuple(RelationshipDirection)
+
+
+class InspectionAttrExtensionType(Enum):
+    """Symbols indicating the type of extension that a
+    :class:`.InspectionAttr` is part of."""
+
+
+class NotExtension(InspectionAttrExtensionType):
+    NOT_EXTENSION = "not_extension"
+    """Symbol indicating an :class:`InspectionAttr` that's
+    not part of sqlalchemy.ext.
+
+    Is assigned to the :attr:`.InspectionAttr.extension_type`
+    attribute.
+
+    """
+
+
+_never_set = frozenset([NEVER_SET])
+
+_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT])
+
+_none_only_set = frozenset([None])
+
+_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED")
+
+_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE")
+
+_RAISE_FOR_STATE = util.symbol("RAISE_FOR_STATE")
+
+
+_F = TypeVar("_F", bound=Callable[..., Any])
+_Self = TypeVar("_Self")
+
+
+def _assertions(
+    *assertions: Any,
+) -> Callable[[_F], _F]:
+    @util.decorator
+    def generate(fn: _F, self: _Self, *args: Any, **kw: Any) -> _Self:
+        for assertion in assertions:
+            assertion(self, fn.__name__)
+        fn(self, *args, **kw)
+        return self
+
+    return generate
+
+
+if TYPE_CHECKING:
+
+    def manager_of_class(cls: Type[_O]) -> ClassManager[_O]: ...
+
+    @overload
+    def opt_manager_of_class(cls: AliasedClass[Any]) -> None: ...
+
+    @overload
+    def opt_manager_of_class(
+        cls: _ExternalEntityType[_O],
+    ) -> Optional[ClassManager[_O]]: ...
+
+    def opt_manager_of_class(
+        cls: _ExternalEntityType[_O],
+    ) -> Optional[ClassManager[_O]]: ...
+
+    def instance_state(instance: _O) -> InstanceState[_O]: ...
+
+    def instance_dict(instance: object) -> Dict[str, Any]: ...
+
+else:
+    # these can be replaced by sqlalchemy.ext.instrumentation
+    # if augmented class instrumentation is enabled.
+
+    def manager_of_class(cls):
+        try:
+            return cls.__dict__[DEFAULT_MANAGER_ATTR]
+        except KeyError as ke:
+            raise exc.UnmappedClassError(
+                cls, f"Can't locate an instrumentation manager for class {cls}"
+            ) from ke
+
+    def opt_manager_of_class(cls):
+        return cls.__dict__.get(DEFAULT_MANAGER_ATTR)
+
+    instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
+
+    instance_dict = operator.attrgetter("__dict__")
+
+
+def instance_str(instance: object) -> str:
+    """Return a string describing an instance."""
+
+    return state_str(instance_state(instance))
+
+
+def state_str(state: InstanceState[Any]) -> str:
+    """Return a string describing an instance via its InstanceState."""
+
+    if state is None:
+        return "None"
+    else:
+        return "<%s at 0x%x>" % (state.class_.__name__, id(state.obj()))
+
+
+def state_class_str(state: InstanceState[Any]) -> str:
+    """Return a string describing an instance's class via its
+    InstanceState.
+    """
+
+    if state is None:
+        return "None"
+    else:
+        return "<%s>" % (state.class_.__name__,)
+
+
+def attribute_str(instance: object, attribute: str) -> str:
+    return instance_str(instance) + "." + attribute
+
+
+def state_attribute_str(state: InstanceState[Any], attribute: str) -> str:
+    return state_str(state) + "." + attribute
+
+
+def object_mapper(instance: _T) -> Mapper[_T]:
+    """Given an object, return the primary Mapper associated with the object
+    instance.
+
+    Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+    if no mapping is configured.
+
+    This function is available via the inspection system as::
+
+        inspect(instance).mapper
+
+    Using the inspection system will raise
+    :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+    not part of a mapping.
+
+    """
+    return object_state(instance).mapper
+
+
+def object_state(instance: _T) -> InstanceState[_T]:
+    """Given an object, return the :class:`.InstanceState`
+    associated with the object.
+
+    Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+    if no mapping is configured.
+
+    Equivalent functionality is available via the :func:`_sa.inspect`
+    function as::
+
+        inspect(instance)
+
+    Using the inspection system will raise
+    :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+    not part of a mapping.
+
+    """
+    state = _inspect_mapped_object(instance)
+    if state is None:
+        raise exc.UnmappedInstanceError(instance)
+    else:
+        return state
+
+
+@inspection._inspects(object)
+def _inspect_mapped_object(instance: _T) -> Optional[InstanceState[_T]]:
+    try:
+        return instance_state(instance)
+    except (exc.UnmappedClassError,) + exc.NO_STATE:
+        return None
+
+
+def _class_to_mapper(
+    class_or_mapper: Union[Mapper[_T], Type[_T]]
+) -> Mapper[_T]:
+    # can't get mypy to see an overload for this
+    insp = inspection.inspect(class_or_mapper, False)
+    if insp is not None:
+        return insp.mapper  # type: ignore
+    else:
+        assert isinstance(class_or_mapper, type)
+        raise exc.UnmappedClassError(class_or_mapper)
+
+
+def _mapper_or_none(
+    entity: Union[Type[_T], _InternalEntityType[_T]]
+) -> Optional[Mapper[_T]]:
+    """Return the :class:`_orm.Mapper` for the given class or None if the
+    class is not mapped.
+    """
+
+    # can't get mypy to see an overload for this
+    insp = inspection.inspect(entity, False)
+    if insp is not None:
+        return insp.mapper  # type: ignore
+    else:
+        return None
+
+
+def _is_mapped_class(entity: Any) -> bool:
+    """Return True if the given object is a mapped class,
+    :class:`_orm.Mapper`, or :class:`.AliasedClass`.
+    """
+
+    insp = inspection.inspect(entity, False)
+    return (
+        insp is not None
+        and not insp.is_clause_element
+        and (insp.is_mapper or insp.is_aliased_class)
+    )
+
+
+def _is_aliased_class(entity: Any) -> bool:
+    insp = inspection.inspect(entity, False)
+    return insp is not None and getattr(insp, "is_aliased_class", False)
+
+
+@no_type_check
+def _entity_descriptor(entity: _EntityType[Any], key: str) -> Any:
+    """Return a class attribute given an entity and string name.
+
+    May return :class:`.InstrumentedAttribute` or user-defined
+    attribute.
+
+    """
+    insp = inspection.inspect(entity)
+    if insp.is_selectable:
+        description = entity
+        entity = insp.c
+    elif insp.is_aliased_class:
+        entity = insp.entity
+        description = entity
+    elif hasattr(insp, "mapper"):
+        description = entity = insp.mapper.class_
+    else:
+        description = entity
+
+    try:
+        return getattr(entity, key)
+    except AttributeError as err:
+        raise sa_exc.InvalidRequestError(
+            "Entity '%s' has no property '%s'" % (description, key)
+        ) from err
+
+
+if TYPE_CHECKING:
+
+    def _state_mapper(state: InstanceState[_O]) -> Mapper[_O]: ...
+
+else:
+    _state_mapper = util.dottedgetter("manager.mapper")
+
+
+def _inspect_mapped_class(
+    class_: Type[_O], configure: bool = False
+) -> Optional[Mapper[_O]]:
+    try:
+        class_manager = opt_manager_of_class(class_)
+        if class_manager is None or not class_manager.is_mapped:
+            return None
+        mapper = class_manager.mapper
+    except exc.NO_STATE:
+        return None
+    else:
+        if configure:
+            mapper._check_configure()
+        return mapper
+
+
+def _parse_mapper_argument(arg: Union[Mapper[_O], Type[_O]]) -> Mapper[_O]:
+    insp = inspection.inspect(arg, raiseerr=False)
+    if insp_is_mapper(insp):
+        return insp
+
+    raise sa_exc.ArgumentError(f"Mapper or mapped class expected, got {arg!r}")
+
+
+def class_mapper(class_: Type[_O], configure: bool = True) -> Mapper[_O]:
+    """Given a class, return the primary :class:`_orm.Mapper` associated
+    with the key.
+
+    Raises :exc:`.UnmappedClassError` if no mapping is configured
+    on the given class, or :exc:`.ArgumentError` if a non-class
+    object is passed.
+
+    Equivalent functionality is available via the :func:`_sa.inspect`
+    function as::
+
+        inspect(some_mapped_class)
+
+    Using the inspection system will raise
+    :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
+
+    """
+    mapper = _inspect_mapped_class(class_, configure=configure)
+    if mapper is None:
+        if not isinstance(class_, type):
+            raise sa_exc.ArgumentError(
+                "Class object expected, got '%r'." % (class_,)
+            )
+        raise exc.UnmappedClassError(class_)
+    else:
+        return mapper
+
+
+class InspectionAttr:
+    """A base class applied to all ORM objects and attributes that are
+    related to things that can be returned by the :func:`_sa.inspect` function.
+
+    The attributes defined here allow the usage of simple boolean
+    checks to test basic facts about the object returned.
+
+    While the boolean checks here are basically the same as using
+    the Python isinstance() function, the flags here can be used without
+    the need to import all of these classes, and also such that
+    the SQLAlchemy class system can change while leaving the flags
+    here intact for forwards-compatibility.
+
+    """
+
+    __slots__: Tuple[str, ...] = ()
+
+    is_selectable = False
+    """Return True if this object is an instance of
+    :class:`_expression.Selectable`."""
+
+    is_aliased_class = False
+    """True if this object is an instance of :class:`.AliasedClass`."""
+
+    is_instance = False
+    """True if this object is an instance of :class:`.InstanceState`."""
+
+    is_mapper = False
+    """True if this object is an instance of :class:`_orm.Mapper`."""
+
+    is_bundle = False
+    """True if this object is an instance of :class:`.Bundle`."""
+
+    is_property = False
+    """True if this object is an instance of :class:`.MapperProperty`."""
+
+    is_attribute = False
+    """True if this object is a Python :term:`descriptor`.
+
+    This can refer to one of many types.   Usually a
+    :class:`.QueryableAttribute` which handles attributes events on behalf
+    of a :class:`.MapperProperty`.   But can also be an extension type
+    such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
+    The :attr:`.InspectionAttr.extension_type` will refer to a constant
+    identifying the specific subtype.
+
+    .. seealso::
+
+        :attr:`_orm.Mapper.all_orm_descriptors`
+
+    """
+
+    _is_internal_proxy = False
+    """True if this object is an internal proxy object.
+
+    .. versionadded:: 1.2.12
+
+    """
+
+    is_clause_element = False
+    """True if this object is an instance of
+    :class:`_expression.ClauseElement`."""
+
+    extension_type: InspectionAttrExtensionType = NotExtension.NOT_EXTENSION
+    """The extension type, if any.
+    Defaults to :attr:`.interfaces.NotExtension.NOT_EXTENSION`
+
+    .. seealso::
+
+        :class:`.HybridExtensionType`
+
+        :class:`.AssociationProxyExtensionType`
+
+    """
+
+
+class InspectionAttrInfo(InspectionAttr):
+    """Adds the ``.info`` attribute to :class:`.InspectionAttr`.
+
+    The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo`
+    is that the former is compatible as a mixin for classes that specify
+    ``__slots__``; this is essentially an implementation artifact.
+
+    """
+
+    __slots__ = ()
+
+    @util.ro_memoized_property
+    def info(self) -> _InfoType:
+        """Info dictionary associated with the object, allowing user-defined
+        data to be associated with this :class:`.InspectionAttr`.
+
+        The dictionary is generated when first accessed.  Alternatively,
+        it can be specified as a constructor argument to the
+        :func:`.column_property`, :func:`_orm.relationship`, or
+        :func:`.composite`
+        functions.
+
+        .. seealso::
+
+            :attr:`.QueryableAttribute.info`
+
+            :attr:`.SchemaItem.info`
+
+        """
+        return {}
+
+
+class SQLORMOperations(SQLCoreOperations[_T_co], TypingOnly):
+    __slots__ = ()
+
+    if typing.TYPE_CHECKING:
+
+        def of_type(
+            self, class_: _EntityType[Any]
+        ) -> PropComparator[_T_co]: ...
+
+        def and_(
+            self, *criteria: _ColumnExpressionArgument[bool]
+        ) -> PropComparator[bool]: ...
+
+        def any(  # noqa: A001
+            self,
+            criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+            **kwargs: Any,
+        ) -> ColumnElement[bool]: ...
+
+        def has(
+            self,
+            criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+            **kwargs: Any,
+        ) -> ColumnElement[bool]: ...
+
+
+class ORMDescriptor(Generic[_T_co], TypingOnly):
+    """Represent any Python descriptor that provides a SQL expression
+    construct at the class level."""
+
+    __slots__ = ()
+
+    if typing.TYPE_CHECKING:
+
+        @overload
+        def __get__(
+            self, instance: Any, owner: Literal[None]
+        ) -> ORMDescriptor[_T_co]: ...
+
+        @overload
+        def __get__(
+            self, instance: Literal[None], owner: Any
+        ) -> SQLCoreOperations[_T_co]: ...
+
+        @overload
+        def __get__(self, instance: object, owner: Any) -> _T_co: ...
+
+        def __get__(
+            self, instance: object, owner: Any
+        ) -> Union[ORMDescriptor[_T_co], SQLCoreOperations[_T_co], _T_co]: ...
+
+
+class _MappedAnnotationBase(Generic[_T_co], TypingOnly):
+    """common class for Mapped and similar ORM container classes.
+
+    these are classes that can appear on the left side of an ORM declarative
+    mapping, containing a mapped class or in some cases a collection
+    surrounding a mapped class.
+
+    """
+
+    __slots__ = ()
+
+
+class SQLORMExpression(
+    SQLORMOperations[_T_co], SQLColumnExpression[_T_co], TypingOnly
+):
+    """A type that may be used to indicate any ORM-level attribute or
+    object that acts in place of one, in the context of SQL expression
+    construction.
+
+    :class:`.SQLORMExpression` extends from the Core
+    :class:`.SQLColumnExpression` to add additional SQL methods that are ORM
+    specific, such as :meth:`.PropComparator.of_type`, and is part of the bases
+    for :class:`.InstrumentedAttribute`. It may be used in :pep:`484` typing to
+    indicate arguments or return values that should behave as ORM-level
+    attribute expressions.
+
+    .. versionadded:: 2.0.0b4
+
+
+    """
+
+    __slots__ = ()
+
+
+class Mapped(
+    SQLORMExpression[_T_co],
+    ORMDescriptor[_T_co],
+    _MappedAnnotationBase[_T_co],
+    roles.DDLConstraintColumnRole,
+):
+    """Represent an ORM mapped attribute on a mapped class.
+
+    This class represents the complete descriptor interface for any class
+    attribute that will have been :term:`instrumented` by the ORM
+    :class:`_orm.Mapper` class.   Provides appropriate information to type
+    checkers such as pylance and mypy so that ORM-mapped attributes
+    are correctly typed.
+
+    The most prominent use of :class:`_orm.Mapped` is in
+    the :ref:`Declarative Mapping <orm_explicit_declarative_base>` form
+    of :class:`_orm.Mapper` configuration, where used explicitly it drives
+    the configuration of ORM attributes such as :func:`_orm.mapped_class`
+    and :func:`_orm.relationship`.
+
+    .. seealso::
+
+        :ref:`orm_explicit_declarative_base`
+
+        :ref:`orm_declarative_table`
+
+    .. tip::
+
+        The :class:`_orm.Mapped` class represents attributes that are handled
+        directly by the :class:`_orm.Mapper` class. It does not include other
+        Python descriptor classes that are provided as extensions, including
+        :ref:`hybrids_toplevel` and the :ref:`associationproxy_toplevel`.
+        While these systems still make use of ORM-specific superclasses
+        and structures, they are not :term:`instrumented` by the
+        :class:`_orm.Mapper` and instead provide their own functionality
+        when they are accessed on a class.
+
+    .. versionadded:: 1.4
+
+
+    """
+
+    __slots__ = ()
+
+    if typing.TYPE_CHECKING:
+
+        @overload
+        def __get__(
+            self, instance: None, owner: Any
+        ) -> InstrumentedAttribute[_T_co]: ...
+
+        @overload
+        def __get__(self, instance: object, owner: Any) -> _T_co: ...
+
+        def __get__(
+            self, instance: Optional[object], owner: Any
+        ) -> Union[InstrumentedAttribute[_T_co], _T_co]: ...
+
+        @classmethod
+        def _empty_constructor(cls, arg1: Any) -> Mapped[_T_co]: ...
+
+        def __set__(
+            self, instance: Any, value: Union[SQLCoreOperations[_T_co], _T_co]
+        ) -> None: ...
+
+        def __delete__(self, instance: Any) -> None: ...
+
+
+class _MappedAttribute(Generic[_T_co], TypingOnly):
+    """Mixin for attributes which should be replaced by mapper-assigned
+    attributes.
+
+    """
+
+    __slots__ = ()
+
+
+class _DeclarativeMapped(Mapped[_T_co], _MappedAttribute[_T_co]):
+    """Mixin for :class:`.MapperProperty` subclasses that allows them to
+    be compatible with ORM-annotated declarative mappings.
+
+    """
+
+    __slots__ = ()
+
+    # MappedSQLExpression, Relationship, Composite etc. dont actually do
+    # SQL expression behavior.  yet there is code that compares them with
+    # __eq__(), __ne__(), etc.   Since #8847 made Mapped even more full
+    # featured including ColumnOperators, we need to have those methods
+    # be no-ops for these objects, so return NotImplemented to fall back
+    # to normal comparison behavior.
+    def operate(self, op: OperatorType, *other: Any, **kwargs: Any) -> Any:
+        return NotImplemented
+
+    __sa_operate__ = operate
+
+    def reverse_operate(
+        self, op: OperatorType, other: Any, **kwargs: Any
+    ) -> Any:
+        return NotImplemented
+
+
+class DynamicMapped(_MappedAnnotationBase[_T_co]):
+    """Represent the ORM mapped attribute type for a "dynamic" relationship.
+
+    The :class:`_orm.DynamicMapped` type annotation may be used in an
+    :ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping
+    to indicate that the ``lazy="dynamic"`` loader strategy should be used
+    for a particular :func:`_orm.relationship`.
+
+    .. legacy::  The "dynamic" lazy loader strategy is the legacy form of what
+       is now the "write_only" strategy described in the section
+       :ref:`write_only_relationship`.
+
+    E.g.::
+
+        class User(Base):
+            __tablename__ = "user"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            addresses: DynamicMapped[Address] = relationship(
+                cascade="all,delete-orphan"
+            )
+
+    See the section :ref:`dynamic_relationship` for background.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`dynamic_relationship` - complete background
+
+        :class:`.WriteOnlyMapped` - fully 2.0 style version
+
+    """
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        @overload
+        def __get__(
+            self, instance: None, owner: Any
+        ) -> InstrumentedAttribute[_T_co]: ...
+
+        @overload
+        def __get__(
+            self, instance: object, owner: Any
+        ) -> AppenderQuery[_T_co]: ...
+
+        def __get__(
+            self, instance: Optional[object], owner: Any
+        ) -> Union[InstrumentedAttribute[_T_co], AppenderQuery[_T_co]]: ...
+
+        def __set__(
+            self, instance: Any, value: typing.Collection[_T_co]
+        ) -> None: ...
+
+
+class WriteOnlyMapped(_MappedAnnotationBase[_T_co]):
+    """Represent the ORM mapped attribute type for a "write only" relationship.
+
+    The :class:`_orm.WriteOnlyMapped` type annotation may be used in an
+    :ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping
+    to indicate that the ``lazy="write_only"`` loader strategy should be used
+    for a particular :func:`_orm.relationship`.
+
+    E.g.::
+
+        class User(Base):
+            __tablename__ = "user"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            addresses: WriteOnlyMapped[Address] = relationship(
+                cascade="all,delete-orphan"
+            )
+
+    See the section :ref:`write_only_relationship` for background.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`write_only_relationship` - complete background
+
+        :class:`.DynamicMapped` - includes legacy :class:`_orm.Query` support
+
+    """
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        @overload
+        def __get__(
+            self, instance: None, owner: Any
+        ) -> InstrumentedAttribute[_T_co]: ...
+
+        @overload
+        def __get__(
+            self, instance: object, owner: Any
+        ) -> WriteOnlyCollection[_T_co]: ...
+
+        def __get__(
+            self, instance: Optional[object], owner: Any
+        ) -> Union[
+            InstrumentedAttribute[_T_co], WriteOnlyCollection[_T_co]
+        ]: ...
+
+        def __set__(
+            self, instance: Any, value: typing.Collection[_T_co]
+        ) -> None: ...
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py
new file mode 100644
index 00000000..402d7bed
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/bulk_persistence.py
@@ -0,0 +1,2123 @@
+# orm/bulk_persistence.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""additional ORM persistence classes related to "bulk" operations,
+specifically outside of the flush() process.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import Optional
+from typing import overload
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import attributes
+from . import context
+from . import evaluator
+from . import exc as orm_exc
+from . import loading
+from . import persistence
+from .base import NO_VALUE
+from .context import AbstractORMCompileState
+from .context import FromStatement
+from .context import ORMFromStatementCompileState
+from .context import QueryContext
+from .. import exc as sa_exc
+from .. import util
+from ..engine import Dialect
+from ..engine import result as _result
+from ..sql import coercions
+from ..sql import dml
+from ..sql import expression
+from ..sql import roles
+from ..sql import select
+from ..sql import sqltypes
+from ..sql.base import _entity_namespace_key
+from ..sql.base import CompileState
+from ..sql.base import Options
+from ..sql.dml import DeleteDMLState
+from ..sql.dml import InsertDMLState
+from ..sql.dml import UpdateDMLState
+from ..util import EMPTY_DICT
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from ._typing import DMLStrategyArgument
+    from ._typing import OrmExecuteOptionsParameter
+    from ._typing import SynchronizeSessionArgument
+    from .mapper import Mapper
+    from .session import _BindArguments
+    from .session import ORMExecuteState
+    from .session import Session
+    from .session import SessionTransaction
+    from .state import InstanceState
+    from ..engine import Connection
+    from ..engine import cursor
+    from ..engine.interfaces import _CoreAnyExecuteParams
+
+_O = TypeVar("_O", bound=object)
+
+
+@overload
+def _bulk_insert(
+    mapper: Mapper[_O],
+    mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+    session_transaction: SessionTransaction,
+    *,
+    isstates: bool,
+    return_defaults: bool,
+    render_nulls: bool,
+    use_orm_insert_stmt: Literal[None] = ...,
+    execution_options: Optional[OrmExecuteOptionsParameter] = ...,
+) -> None: ...
+
+
+@overload
+def _bulk_insert(
+    mapper: Mapper[_O],
+    mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+    session_transaction: SessionTransaction,
+    *,
+    isstates: bool,
+    return_defaults: bool,
+    render_nulls: bool,
+    use_orm_insert_stmt: Optional[dml.Insert] = ...,
+    execution_options: Optional[OrmExecuteOptionsParameter] = ...,
+) -> cursor.CursorResult[Any]: ...
+
+
+def _bulk_insert(
+    mapper: Mapper[_O],
+    mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+    session_transaction: SessionTransaction,
+    *,
+    isstates: bool,
+    return_defaults: bool,
+    render_nulls: bool,
+    use_orm_insert_stmt: Optional[dml.Insert] = None,
+    execution_options: Optional[OrmExecuteOptionsParameter] = None,
+) -> Optional[cursor.CursorResult[Any]]:
+    base_mapper = mapper.base_mapper
+
+    if session_transaction.session.connection_callable:
+        raise NotImplementedError(
+            "connection_callable / per-instance sharding "
+            "not supported in bulk_insert()"
+        )
+
+    if isstates:
+        if TYPE_CHECKING:
+            mappings = cast(Iterable[InstanceState[_O]], mappings)
+
+        if return_defaults:
+            # list of states allows us to attach .key for return_defaults case
+            states = [(state, state.dict) for state in mappings]
+            mappings = [dict_ for (state, dict_) in states]
+        else:
+            mappings = [state.dict for state in mappings]
+    else:
+        if TYPE_CHECKING:
+            mappings = cast(Iterable[Dict[str, Any]], mappings)
+
+        if return_defaults:
+            # use dictionaries given, so that newly populated defaults
+            # can be delivered back to the caller (see #11661). This is **not**
+            # compatible with other use cases such as a session-executed
+            # insert() construct, as this will confuse the case of
+            # insert-per-subclass for joined inheritance cases (see
+            # test_bulk_statements.py::BulkDMLReturningJoinedInhTest).
+            #
+            # So in this conditional, we have **only** called
+            # session.bulk_insert_mappings() which does not have this
+            # requirement
+            mappings = list(mappings)
+        else:
+            # for all other cases we need to establish a local dictionary
+            # so that the incoming dictionaries aren't mutated
+            mappings = [dict(m) for m in mappings]
+        _expand_composites(mapper, mappings)
+
+    connection = session_transaction.connection(base_mapper)
+
+    return_result: Optional[cursor.CursorResult[Any]] = None
+
+    mappers_to_run = [
+        (table, mp)
+        for table, mp in base_mapper._sorted_tables.items()
+        if table in mapper._pks_by_table
+    ]
+
+    if return_defaults:
+        # not used by new-style bulk inserts, only used for legacy
+        bookkeeping = True
+    elif len(mappers_to_run) > 1:
+        # if we have more than one table, mapper to run where we will be
+        # either horizontally splicing, or copying values between tables,
+        # we need the "bookkeeping" / deterministic returning order
+        bookkeeping = True
+    else:
+        bookkeeping = False
+
+    for table, super_mapper in mappers_to_run:
+        # find bindparams in the statement. For bulk, we don't really know if
+        # a key in the params applies to a different table since we are
+        # potentially inserting for multiple tables here; looking at the
+        # bindparam() is a lot more direct.   in most cases this will
+        # use _generate_cache_key() which is memoized, although in practice
+        # the ultimate statement that's executed is probably not the same
+        # object so that memoization might not matter much.
+        extra_bp_names = (
+            [
+                b.key
+                for b in use_orm_insert_stmt._get_embedded_bindparams()
+                if b.key in mappings[0]
+            ]
+            if use_orm_insert_stmt is not None
+            else ()
+        )
+
+        records = (
+            (
+                None,
+                state_dict,
+                params,
+                mapper,
+                connection,
+                value_params,
+                has_all_pks,
+                has_all_defaults,
+            )
+            for (
+                state,
+                state_dict,
+                params,
+                mp,
+                conn,
+                value_params,
+                has_all_pks,
+                has_all_defaults,
+            ) in persistence._collect_insert_commands(
+                table,
+                ((None, mapping, mapper, connection) for mapping in mappings),
+                bulk=True,
+                return_defaults=bookkeeping,
+                render_nulls=render_nulls,
+                include_bulk_keys=extra_bp_names,
+            )
+        )
+
+        result = persistence._emit_insert_statements(
+            base_mapper,
+            None,
+            super_mapper,
+            table,
+            records,
+            bookkeeping=bookkeeping,
+            use_orm_insert_stmt=use_orm_insert_stmt,
+            execution_options=execution_options,
+        )
+        if use_orm_insert_stmt is not None:
+            if not use_orm_insert_stmt._returning or return_result is None:
+                return_result = result
+            elif result.returns_rows:
+                assert bookkeeping
+                return_result = return_result.splice_horizontally(result)
+
+    if return_defaults and isstates:
+        identity_cls = mapper._identity_class
+        identity_props = [p.key for p in mapper._identity_key_props]
+        for state, dict_ in states:
+            state.key = (
+                identity_cls,
+                tuple([dict_[key] for key in identity_props]),
+                None,
+            )
+
+    if use_orm_insert_stmt is not None:
+        assert return_result is not None
+        return return_result
+
+
+@overload
+def _bulk_update(
+    mapper: Mapper[Any],
+    mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+    session_transaction: SessionTransaction,
+    *,
+    isstates: bool,
+    update_changed_only: bool,
+    use_orm_update_stmt: Literal[None] = ...,
+    enable_check_rowcount: bool = True,
+) -> None: ...
+
+
+@overload
+def _bulk_update(
+    mapper: Mapper[Any],
+    mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+    session_transaction: SessionTransaction,
+    *,
+    isstates: bool,
+    update_changed_only: bool,
+    use_orm_update_stmt: Optional[dml.Update] = ...,
+    enable_check_rowcount: bool = True,
+) -> _result.Result[Any]: ...
+
+
+def _bulk_update(
+    mapper: Mapper[Any],
+    mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+    session_transaction: SessionTransaction,
+    *,
+    isstates: bool,
+    update_changed_only: bool,
+    use_orm_update_stmt: Optional[dml.Update] = None,
+    enable_check_rowcount: bool = True,
+) -> Optional[_result.Result[Any]]:
+    base_mapper = mapper.base_mapper
+
+    search_keys = mapper._primary_key_propkeys
+    if mapper._version_id_prop:
+        search_keys = {mapper._version_id_prop.key}.union(search_keys)
+
+    def _changed_dict(mapper, state):
+        return {
+            k: v
+            for k, v in state.dict.items()
+            if k in state.committed_state or k in search_keys
+        }
+
+    if isstates:
+        if update_changed_only:
+            mappings = [_changed_dict(mapper, state) for state in mappings]
+        else:
+            mappings = [state.dict for state in mappings]
+    else:
+        mappings = [dict(m) for m in mappings]
+        _expand_composites(mapper, mappings)
+
+    if session_transaction.session.connection_callable:
+        raise NotImplementedError(
+            "connection_callable / per-instance sharding "
+            "not supported in bulk_update()"
+        )
+
+    connection = session_transaction.connection(base_mapper)
+
+    # find bindparams in the statement. see _bulk_insert for similar
+    # notes for the insert case
+    extra_bp_names = (
+        [
+            b.key
+            for b in use_orm_update_stmt._get_embedded_bindparams()
+            if b.key in mappings[0]
+        ]
+        if use_orm_update_stmt is not None
+        else ()
+    )
+
+    for table, super_mapper in base_mapper._sorted_tables.items():
+        if not mapper.isa(super_mapper) or table not in mapper._pks_by_table:
+            continue
+
+        records = persistence._collect_update_commands(
+            None,
+            table,
+            (
+                (
+                    None,
+                    mapping,
+                    mapper,
+                    connection,
+                    (
+                        mapping[mapper._version_id_prop.key]
+                        if mapper._version_id_prop
+                        else None
+                    ),
+                )
+                for mapping in mappings
+            ),
+            bulk=True,
+            use_orm_update_stmt=use_orm_update_stmt,
+            include_bulk_keys=extra_bp_names,
+        )
+        persistence._emit_update_statements(
+            base_mapper,
+            None,
+            super_mapper,
+            table,
+            records,
+            bookkeeping=False,
+            use_orm_update_stmt=use_orm_update_stmt,
+            enable_check_rowcount=enable_check_rowcount,
+        )
+
+    if use_orm_update_stmt is not None:
+        return _result.null_result()
+
+
+def _expand_composites(mapper, mappings):
+    composite_attrs = mapper.composites
+    if not composite_attrs:
+        return
+
+    composite_keys = set(composite_attrs.keys())
+    populators = {
+        key: composite_attrs[key]._populate_composite_bulk_save_mappings_fn()
+        for key in composite_keys
+    }
+    for mapping in mappings:
+        for key in composite_keys.intersection(mapping):
+            populators[key](mapping)
+
+
+class ORMDMLState(AbstractORMCompileState):
+    is_dml_returning = True
+    from_statement_ctx: Optional[ORMFromStatementCompileState] = None
+
+    @classmethod
+    def _get_orm_crud_kv_pairs(
+        cls, mapper, statement, kv_iterator, needs_to_be_cacheable
+    ):
+        core_get_crud_kv_pairs = UpdateDMLState._get_crud_kv_pairs
+
+        for k, v in kv_iterator:
+            k = coercions.expect(roles.DMLColumnRole, k)
+
+            if isinstance(k, str):
+                desc = _entity_namespace_key(mapper, k, default=NO_VALUE)
+                if desc is NO_VALUE:
+                    yield (
+                        coercions.expect(roles.DMLColumnRole, k),
+                        (
+                            coercions.expect(
+                                roles.ExpressionElementRole,
+                                v,
+                                type_=sqltypes.NullType(),
+                                is_crud=True,
+                            )
+                            if needs_to_be_cacheable
+                            else v
+                        ),
+                    )
+                else:
+                    yield from core_get_crud_kv_pairs(
+                        statement,
+                        desc._bulk_update_tuples(v),
+                        needs_to_be_cacheable,
+                    )
+            elif "entity_namespace" in k._annotations:
+                k_anno = k._annotations
+                attr = _entity_namespace_key(
+                    k_anno["entity_namespace"], k_anno["proxy_key"]
+                )
+                yield from core_get_crud_kv_pairs(
+                    statement,
+                    attr._bulk_update_tuples(v),
+                    needs_to_be_cacheable,
+                )
+            else:
+                yield (
+                    k,
+                    (
+                        v
+                        if not needs_to_be_cacheable
+                        else coercions.expect(
+                            roles.ExpressionElementRole,
+                            v,
+                            type_=sqltypes.NullType(),
+                            is_crud=True,
+                        )
+                    ),
+                )
+
+    @classmethod
+    def _get_multi_crud_kv_pairs(cls, statement, kv_iterator):
+        plugin_subject = statement._propagate_attrs["plugin_subject"]
+
+        if not plugin_subject or not plugin_subject.mapper:
+            return UpdateDMLState._get_multi_crud_kv_pairs(
+                statement, kv_iterator
+            )
+
+        return [
+            dict(
+                cls._get_orm_crud_kv_pairs(
+                    plugin_subject.mapper, statement, value_dict.items(), False
+                )
+            )
+            for value_dict in kv_iterator
+        ]
+
+    @classmethod
+    def _get_crud_kv_pairs(cls, statement, kv_iterator, needs_to_be_cacheable):
+        assert (
+            needs_to_be_cacheable
+        ), "no test coverage for needs_to_be_cacheable=False"
+
+        plugin_subject = statement._propagate_attrs["plugin_subject"]
+
+        if not plugin_subject or not plugin_subject.mapper:
+            return UpdateDMLState._get_crud_kv_pairs(
+                statement, kv_iterator, needs_to_be_cacheable
+            )
+
+        return list(
+            cls._get_orm_crud_kv_pairs(
+                plugin_subject.mapper,
+                statement,
+                kv_iterator,
+                needs_to_be_cacheable,
+            )
+        )
+
+    @classmethod
+    def get_entity_description(cls, statement):
+        ext_info = statement.table._annotations["parententity"]
+        mapper = ext_info.mapper
+        if ext_info.is_aliased_class:
+            _label_name = ext_info.name
+        else:
+            _label_name = mapper.class_.__name__
+
+        return {
+            "name": _label_name,
+            "type": mapper.class_,
+            "expr": ext_info.entity,
+            "entity": ext_info.entity,
+            "table": mapper.local_table,
+        }
+
+    @classmethod
+    def get_returning_column_descriptions(cls, statement):
+        def _ent_for_col(c):
+            return c._annotations.get("parententity", None)
+
+        def _attr_for_col(c, ent):
+            if ent is None:
+                return c
+            proxy_key = c._annotations.get("proxy_key", None)
+            if not proxy_key:
+                return c
+            else:
+                return getattr(ent.entity, proxy_key, c)
+
+        return [
+            {
+                "name": c.key,
+                "type": c.type,
+                "expr": _attr_for_col(c, ent),
+                "aliased": ent.is_aliased_class,
+                "entity": ent.entity,
+            }
+            for c, ent in [
+                (c, _ent_for_col(c)) for c in statement._all_selected_columns
+            ]
+        ]
+
+    def _setup_orm_returning(
+        self,
+        compiler,
+        orm_level_statement,
+        dml_level_statement,
+        dml_mapper,
+        *,
+        use_supplemental_cols=True,
+    ):
+        """establish ORM column handlers for an INSERT, UPDATE, or DELETE
+        which uses explicit returning().
+
+        called within compilation level create_for_statement.
+
+        The _return_orm_returning() method then receives the Result
+        after the statement was executed, and applies ORM loading to the
+        state that we first established here.
+
+        """
+
+        if orm_level_statement._returning:
+            fs = FromStatement(
+                orm_level_statement._returning,
+                dml_level_statement,
+                _adapt_on_names=False,
+            )
+            fs = fs.execution_options(**orm_level_statement._execution_options)
+            fs = fs.options(*orm_level_statement._with_options)
+            self.select_statement = fs
+            self.from_statement_ctx = fsc = (
+                ORMFromStatementCompileState.create_for_statement(fs, compiler)
+            )
+            fsc.setup_dml_returning_compile_state(dml_mapper)
+
+            dml_level_statement = dml_level_statement._generate()
+            dml_level_statement._returning = ()
+
+            cols_to_return = [c for c in fsc.primary_columns if c is not None]
+
+            # since we are splicing result sets together, make sure there
+            # are columns of some kind returned in each result set
+            if not cols_to_return:
+                cols_to_return.extend(dml_mapper.primary_key)
+
+            if use_supplemental_cols:
+                dml_level_statement = dml_level_statement.return_defaults(
+                    # this is a little weird looking, but by passing
+                    # primary key as the main list of cols, this tells
+                    # return_defaults to omit server-default cols (and
+                    # actually all cols, due to some weird thing we should
+                    # clean up in crud.py).
+                    # Since we have cols_to_return, just return what we asked
+                    # for (plus primary key, which ORM persistence needs since
+                    # we likely set bookkeeping=True here, which is another
+                    # whole thing...).   We dont want to clutter the
+                    # statement up with lots of other cols the user didn't
+                    # ask for.  see #9685
+                    *dml_mapper.primary_key,
+                    supplemental_cols=cols_to_return,
+                )
+            else:
+                dml_level_statement = dml_level_statement.returning(
+                    *cols_to_return
+                )
+
+        return dml_level_statement
+
+    @classmethod
+    def _return_orm_returning(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        result,
+    ):
+        execution_context = result.context
+        compile_state = execution_context.compiled.compile_state
+
+        if (
+            compile_state.from_statement_ctx
+            and not compile_state.from_statement_ctx.compile_options._is_star
+        ):
+            load_options = execution_options.get(
+                "_sa_orm_load_options", QueryContext.default_load_options
+            )
+
+            querycontext = QueryContext(
+                compile_state.from_statement_ctx,
+                compile_state.select_statement,
+                statement,
+                params,
+                session,
+                load_options,
+                execution_options,
+                bind_arguments,
+            )
+            return loading.instances(result, querycontext)
+        else:
+            return result
+
+
+class BulkUDCompileState(ORMDMLState):
+    class default_update_options(Options):
+        _dml_strategy: DMLStrategyArgument = "auto"
+        _synchronize_session: SynchronizeSessionArgument = "auto"
+        _can_use_returning: bool = False
+        _is_delete_using: bool = False
+        _is_update_from: bool = False
+        _autoflush: bool = True
+        _subject_mapper: Optional[Mapper[Any]] = None
+        _resolved_values = EMPTY_DICT
+        _eval_condition = None
+        _matched_rows = None
+        _identity_token = None
+        _populate_existing: bool = False
+
+    @classmethod
+    def can_use_returning(
+        cls,
+        dialect: Dialect,
+        mapper: Mapper[Any],
+        *,
+        is_multitable: bool = False,
+        is_update_from: bool = False,
+        is_delete_using: bool = False,
+        is_executemany: bool = False,
+    ) -> bool:
+        raise NotImplementedError()
+
+    @classmethod
+    def orm_pre_session_exec(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        is_pre_event,
+    ):
+        (
+            update_options,
+            execution_options,
+        ) = BulkUDCompileState.default_update_options.from_execution_options(
+            "_sa_orm_update_options",
+            {
+                "synchronize_session",
+                "autoflush",
+                "populate_existing",
+                "identity_token",
+                "is_delete_using",
+                "is_update_from",
+                "dml_strategy",
+            },
+            execution_options,
+            statement._execution_options,
+        )
+        bind_arguments["clause"] = statement
+        try:
+            plugin_subject = statement._propagate_attrs["plugin_subject"]
+        except KeyError:
+            assert False, "statement had 'orm' plugin but no plugin_subject"
+        else:
+            if plugin_subject:
+                bind_arguments["mapper"] = plugin_subject.mapper
+                update_options += {"_subject_mapper": plugin_subject.mapper}
+
+        if "parententity" not in statement.table._annotations:
+            update_options += {"_dml_strategy": "core_only"}
+        elif not isinstance(params, list):
+            if update_options._dml_strategy == "auto":
+                update_options += {"_dml_strategy": "orm"}
+            elif update_options._dml_strategy == "bulk":
+                raise sa_exc.InvalidRequestError(
+                    'Can\'t use "bulk" ORM insert strategy without '
+                    "passing separate parameters"
+                )
+        else:
+            if update_options._dml_strategy == "auto":
+                update_options += {"_dml_strategy": "bulk"}
+
+        sync = update_options._synchronize_session
+        if sync is not None:
+            if sync not in ("auto", "evaluate", "fetch", False):
+                raise sa_exc.ArgumentError(
+                    "Valid strategies for session synchronization "
+                    "are 'auto', 'evaluate', 'fetch', False"
+                )
+            if update_options._dml_strategy == "bulk" and sync == "fetch":
+                raise sa_exc.InvalidRequestError(
+                    "The 'fetch' synchronization strategy is not available "
+                    "for 'bulk' ORM updates (i.e. multiple parameter sets)"
+                )
+
+        if not is_pre_event:
+            if update_options._autoflush:
+                session._autoflush()
+
+            if update_options._dml_strategy == "orm":
+                if update_options._synchronize_session == "auto":
+                    update_options = cls._do_pre_synchronize_auto(
+                        session,
+                        statement,
+                        params,
+                        execution_options,
+                        bind_arguments,
+                        update_options,
+                    )
+                elif update_options._synchronize_session == "evaluate":
+                    update_options = cls._do_pre_synchronize_evaluate(
+                        session,
+                        statement,
+                        params,
+                        execution_options,
+                        bind_arguments,
+                        update_options,
+                    )
+                elif update_options._synchronize_session == "fetch":
+                    update_options = cls._do_pre_synchronize_fetch(
+                        session,
+                        statement,
+                        params,
+                        execution_options,
+                        bind_arguments,
+                        update_options,
+                    )
+            elif update_options._dml_strategy == "bulk":
+                if update_options._synchronize_session == "auto":
+                    update_options += {"_synchronize_session": "evaluate"}
+
+            # indicators from the "pre exec" step that are then
+            # added to the DML statement, which will also be part of the cache
+            # key.  The compile level create_for_statement() method will then
+            # consume these at compiler time.
+            statement = statement._annotate(
+                {
+                    "synchronize_session": update_options._synchronize_session,
+                    "is_delete_using": update_options._is_delete_using,
+                    "is_update_from": update_options._is_update_from,
+                    "dml_strategy": update_options._dml_strategy,
+                    "can_use_returning": update_options._can_use_returning,
+                }
+            )
+
+        return (
+            statement,
+            util.immutabledict(execution_options).union(
+                {"_sa_orm_update_options": update_options}
+            ),
+        )
+
+    @classmethod
+    def orm_setup_cursor_result(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        result,
+    ):
+        # this stage of the execution is called after the
+        # do_orm_execute event hook.  meaning for an extension like
+        # horizontal sharding, this step happens *within* the horizontal
+        # sharding event handler which calls session.execute() re-entrantly
+        # and will occur for each backend individually.
+        # the sharding extension then returns its own merged result from the
+        # individual ones we return here.
+
+        update_options = execution_options["_sa_orm_update_options"]
+        if update_options._dml_strategy == "orm":
+            if update_options._synchronize_session == "evaluate":
+                cls._do_post_synchronize_evaluate(
+                    session, statement, result, update_options
+                )
+            elif update_options._synchronize_session == "fetch":
+                cls._do_post_synchronize_fetch(
+                    session, statement, result, update_options
+                )
+        elif update_options._dml_strategy == "bulk":
+            if update_options._synchronize_session == "evaluate":
+                cls._do_post_synchronize_bulk_evaluate(
+                    session, params, result, update_options
+                )
+            return result
+
+        return cls._return_orm_returning(
+            session,
+            statement,
+            params,
+            execution_options,
+            bind_arguments,
+            result,
+        )
+
+    @classmethod
+    def _adjust_for_extra_criteria(cls, global_attributes, ext_info):
+        """Apply extra criteria filtering.
+
+        For all distinct single-table-inheritance mappers represented in the
+        table being updated or deleted, produce additional WHERE criteria such
+        that only the appropriate subtypes are selected from the total results.
+
+        Additionally, add WHERE criteria originating from LoaderCriteriaOptions
+        collected from the statement.
+
+        """
+
+        return_crit = ()
+
+        adapter = ext_info._adapter if ext_info.is_aliased_class else None
+
+        if (
+            "additional_entity_criteria",
+            ext_info.mapper,
+        ) in global_attributes:
+            return_crit += tuple(
+                ae._resolve_where_criteria(ext_info)
+                for ae in global_attributes[
+                    ("additional_entity_criteria", ext_info.mapper)
+                ]
+                if ae.include_aliases or ae.entity is ext_info
+            )
+
+        if ext_info.mapper._single_table_criterion is not None:
+            return_crit += (ext_info.mapper._single_table_criterion,)
+
+        if adapter:
+            return_crit = tuple(adapter.traverse(crit) for crit in return_crit)
+
+        return return_crit
+
+    @classmethod
+    def _interpret_returning_rows(cls, result, mapper, rows):
+        """return rows that indicate PK cols in mapper.primary_key position
+        for RETURNING rows.
+
+        Prior to 2.0.36, this method seemed to be written for some kind of
+        inheritance scenario but the scenario was unused for actual joined
+        inheritance, and the function instead seemed to perform some kind of
+        partial translation that would remove non-PK cols if the PK cols
+        happened to be first in the row, but not otherwise.  The joined
+        inheritance walk feature here seems to have never been used as it was
+        always skipped by the "local_table" check.
+
+        As of 2.0.36 the function strips away non-PK cols and provides the
+        PK cols for the table in mapper PK order.
+
+        """
+
+        try:
+            if mapper.local_table is not mapper.base_mapper.local_table:
+                # TODO: dive more into how a local table PK is used for fetch
+                # sync, not clear if this is correct as it depends on the
+                # downstream routine to fetch rows using
+                # local_table.primary_key order
+                pk_keys = result._tuple_getter(mapper.local_table.primary_key)
+            else:
+                pk_keys = result._tuple_getter(mapper.primary_key)
+        except KeyError:
+            # can't use these rows, they don't have PK cols in them
+            # this is an unusual case where the user would have used
+            # .return_defaults()
+            return []
+
+        return [pk_keys(row) for row in rows]
+
+    @classmethod
+    def _get_matched_objects_on_criteria(cls, update_options, states):
+        mapper = update_options._subject_mapper
+        eval_condition = update_options._eval_condition
+
+        raw_data = [
+            (state.obj(), state, state.dict)
+            for state in states
+            if state.mapper.isa(mapper) and not state.expired
+        ]
+
+        identity_token = update_options._identity_token
+        if identity_token is not None:
+            raw_data = [
+                (obj, state, dict_)
+                for obj, state, dict_ in raw_data
+                if state.identity_token == identity_token
+            ]
+
+        result = []
+        for obj, state, dict_ in raw_data:
+            evaled_condition = eval_condition(obj)
+
+            # caution: don't use "in ()" or == here, _EXPIRE_OBJECT
+            # evaluates as True for all comparisons
+            if (
+                evaled_condition is True
+                or evaled_condition is evaluator._EXPIRED_OBJECT
+            ):
+                result.append(
+                    (
+                        obj,
+                        state,
+                        dict_,
+                        evaled_condition is evaluator._EXPIRED_OBJECT,
+                    )
+                )
+        return result
+
+    @classmethod
+    def _eval_condition_from_statement(cls, update_options, statement):
+        mapper = update_options._subject_mapper
+        target_cls = mapper.class_
+
+        evaluator_compiler = evaluator._EvaluatorCompiler(target_cls)
+        crit = ()
+        if statement._where_criteria:
+            crit += statement._where_criteria
+
+        global_attributes = {}
+        for opt in statement._with_options:
+            if opt._is_criteria_option:
+                opt.get_global_criteria(global_attributes)
+
+        if global_attributes:
+            crit += cls._adjust_for_extra_criteria(global_attributes, mapper)
+
+        if crit:
+            eval_condition = evaluator_compiler.process(*crit)
+        else:
+            # workaround for mypy https://github.com/python/mypy/issues/14027
+            def _eval_condition(obj):
+                return True
+
+            eval_condition = _eval_condition
+
+        return eval_condition
+
+    @classmethod
+    def _do_pre_synchronize_auto(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        update_options,
+    ):
+        """setup auto sync strategy
+
+
+        "auto" checks if we can use "evaluate" first, then falls back
+        to "fetch"
+
+        evaluate is vastly more efficient for the common case
+        where session is empty, only has a few objects, and the UPDATE
+        statement can potentially match thousands/millions of rows.
+
+        OTOH more complex criteria that fails to work with "evaluate"
+        we would hope usually correlates with fewer net rows.
+
+        """
+
+        try:
+            eval_condition = cls._eval_condition_from_statement(
+                update_options, statement
+            )
+
+        except evaluator.UnevaluatableError:
+            pass
+        else:
+            return update_options + {
+                "_eval_condition": eval_condition,
+                "_synchronize_session": "evaluate",
+            }
+
+        update_options += {"_synchronize_session": "fetch"}
+        return cls._do_pre_synchronize_fetch(
+            session,
+            statement,
+            params,
+            execution_options,
+            bind_arguments,
+            update_options,
+        )
+
+    @classmethod
+    def _do_pre_synchronize_evaluate(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        update_options,
+    ):
+        try:
+            eval_condition = cls._eval_condition_from_statement(
+                update_options, statement
+            )
+
+        except evaluator.UnevaluatableError as err:
+            raise sa_exc.InvalidRequestError(
+                'Could not evaluate current criteria in Python: "%s". '
+                "Specify 'fetch' or False for the "
+                "synchronize_session execution option." % err
+            ) from err
+
+        return update_options + {
+            "_eval_condition": eval_condition,
+        }
+
+    @classmethod
+    def _get_resolved_values(cls, mapper, statement):
+        if statement._multi_values:
+            return []
+        elif statement._ordered_values:
+            return list(statement._ordered_values)
+        elif statement._values:
+            return list(statement._values.items())
+        else:
+            return []
+
+    @classmethod
+    def _resolved_keys_as_propnames(cls, mapper, resolved_values):
+        values = []
+        for k, v in resolved_values:
+            if mapper and isinstance(k, expression.ColumnElement):
+                try:
+                    attr = mapper._columntoproperty[k]
+                except orm_exc.UnmappedColumnError:
+                    pass
+                else:
+                    values.append((attr.key, v))
+            else:
+                raise sa_exc.InvalidRequestError(
+                    "Attribute name not found, can't be "
+                    "synchronized back to objects: %r" % k
+                )
+        return values
+
+    @classmethod
+    def _do_pre_synchronize_fetch(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        update_options,
+    ):
+        mapper = update_options._subject_mapper
+
+        select_stmt = (
+            select(*(mapper.primary_key + (mapper.select_identity_token,)))
+            .select_from(mapper)
+            .options(*statement._with_options)
+        )
+        select_stmt._where_criteria = statement._where_criteria
+
+        # conditionally run the SELECT statement for pre-fetch, testing the
+        # "bind" for if we can use RETURNING or not using the do_orm_execute
+        # event.  If RETURNING is available, the do_orm_execute event
+        # will cancel the SELECT from being actually run.
+        #
+        # The way this is organized seems strange, why don't we just
+        # call can_use_returning() before invoking the statement and get
+        # answer?, why does this go through the whole execute phase using an
+        # event?  Answer: because we are integrating with extensions such
+        # as the horizontal sharding extention that "multiplexes" an individual
+        # statement run through multiple engines, and it uses
+        # do_orm_execute() to do that.
+
+        can_use_returning = None
+
+        def skip_for_returning(orm_context: ORMExecuteState) -> Any:
+            bind = orm_context.session.get_bind(**orm_context.bind_arguments)
+            nonlocal can_use_returning
+
+            per_bind_result = cls.can_use_returning(
+                bind.dialect,
+                mapper,
+                is_update_from=update_options._is_update_from,
+                is_delete_using=update_options._is_delete_using,
+                is_executemany=orm_context.is_executemany,
+            )
+
+            if can_use_returning is not None:
+                if can_use_returning != per_bind_result:
+                    raise sa_exc.InvalidRequestError(
+                        "For synchronize_session='fetch', can't mix multiple "
+                        "backends where some support RETURNING and others "
+                        "don't"
+                    )
+            elif orm_context.is_executemany and not per_bind_result:
+                raise sa_exc.InvalidRequestError(
+                    "For synchronize_session='fetch', can't use multiple "
+                    "parameter sets in ORM mode, which this backend does not "
+                    "support with RETURNING"
+                )
+            else:
+                can_use_returning = per_bind_result
+
+            if per_bind_result:
+                return _result.null_result()
+            else:
+                return None
+
+        result = session.execute(
+            select_stmt,
+            params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            _add_event=skip_for_returning,
+        )
+        matched_rows = result.fetchall()
+
+        return update_options + {
+            "_matched_rows": matched_rows,
+            "_can_use_returning": can_use_returning,
+        }
+
+
+@CompileState.plugin_for("orm", "insert")
+class BulkORMInsert(ORMDMLState, InsertDMLState):
+    class default_insert_options(Options):
+        _dml_strategy: DMLStrategyArgument = "auto"
+        _render_nulls: bool = False
+        _return_defaults: bool = False
+        _subject_mapper: Optional[Mapper[Any]] = None
+        _autoflush: bool = True
+        _populate_existing: bool = False
+
+    select_statement: Optional[FromStatement] = None
+
+    @classmethod
+    def orm_pre_session_exec(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        is_pre_event,
+    ):
+        (
+            insert_options,
+            execution_options,
+        ) = BulkORMInsert.default_insert_options.from_execution_options(
+            "_sa_orm_insert_options",
+            {"dml_strategy", "autoflush", "populate_existing", "render_nulls"},
+            execution_options,
+            statement._execution_options,
+        )
+        bind_arguments["clause"] = statement
+        try:
+            plugin_subject = statement._propagate_attrs["plugin_subject"]
+        except KeyError:
+            assert False, "statement had 'orm' plugin but no plugin_subject"
+        else:
+            if plugin_subject:
+                bind_arguments["mapper"] = plugin_subject.mapper
+                insert_options += {"_subject_mapper": plugin_subject.mapper}
+
+        if not params:
+            if insert_options._dml_strategy == "auto":
+                insert_options += {"_dml_strategy": "orm"}
+            elif insert_options._dml_strategy == "bulk":
+                raise sa_exc.InvalidRequestError(
+                    'Can\'t use "bulk" ORM insert strategy without '
+                    "passing separate parameters"
+                )
+        else:
+            if insert_options._dml_strategy == "auto":
+                insert_options += {"_dml_strategy": "bulk"}
+
+        if insert_options._dml_strategy != "raw":
+            # for ORM object loading, like ORMContext, we have to disable
+            # result set adapt_to_context, because we will be generating a
+            # new statement with specific columns that's cached inside of
+            # an ORMFromStatementCompileState, which we will re-use for
+            # each result.
+            if not execution_options:
+                execution_options = context._orm_load_exec_options
+            else:
+                execution_options = execution_options.union(
+                    context._orm_load_exec_options
+                )
+
+        if not is_pre_event and insert_options._autoflush:
+            session._autoflush()
+
+        statement = statement._annotate(
+            {"dml_strategy": insert_options._dml_strategy}
+        )
+
+        return (
+            statement,
+            util.immutabledict(execution_options).union(
+                {"_sa_orm_insert_options": insert_options}
+            ),
+        )
+
+    @classmethod
+    def orm_execute_statement(
+        cls,
+        session: Session,
+        statement: dml.Insert,
+        params: _CoreAnyExecuteParams,
+        execution_options: OrmExecuteOptionsParameter,
+        bind_arguments: _BindArguments,
+        conn: Connection,
+    ) -> _result.Result:
+        insert_options = execution_options.get(
+            "_sa_orm_insert_options", cls.default_insert_options
+        )
+
+        if insert_options._dml_strategy not in (
+            "raw",
+            "bulk",
+            "orm",
+            "auto",
+        ):
+            raise sa_exc.ArgumentError(
+                "Valid strategies for ORM insert strategy "
+                "are 'raw', 'orm', 'bulk', 'auto"
+            )
+
+        result: _result.Result[Any]
+
+        if insert_options._dml_strategy == "raw":
+            result = conn.execute(
+                statement, params or {}, execution_options=execution_options
+            )
+            return result
+
+        if insert_options._dml_strategy == "bulk":
+            mapper = insert_options._subject_mapper
+
+            if (
+                statement._post_values_clause is not None
+                and mapper._multiple_persistence_tables
+            ):
+                raise sa_exc.InvalidRequestError(
+                    "bulk INSERT with a 'post values' clause "
+                    "(typically upsert) not supported for multi-table "
+                    f"mapper {mapper}"
+                )
+
+            assert mapper is not None
+            assert session._transaction is not None
+            result = _bulk_insert(
+                mapper,
+                cast(
+                    "Iterable[Dict[str, Any]]",
+                    [params] if isinstance(params, dict) else params,
+                ),
+                session._transaction,
+                isstates=False,
+                return_defaults=insert_options._return_defaults,
+                render_nulls=insert_options._render_nulls,
+                use_orm_insert_stmt=statement,
+                execution_options=execution_options,
+            )
+        elif insert_options._dml_strategy == "orm":
+            result = conn.execute(
+                statement, params or {}, execution_options=execution_options
+            )
+        else:
+            raise AssertionError()
+
+        if not bool(statement._returning):
+            return result
+
+        if insert_options._populate_existing:
+            load_options = execution_options.get(
+                "_sa_orm_load_options", QueryContext.default_load_options
+            )
+            load_options += {"_populate_existing": True}
+            execution_options = execution_options.union(
+                {"_sa_orm_load_options": load_options}
+            )
+
+        return cls._return_orm_returning(
+            session,
+            statement,
+            params,
+            execution_options,
+            bind_arguments,
+            result,
+        )
+
+    @classmethod
+    def create_for_statement(cls, statement, compiler, **kw) -> BulkORMInsert:
+        self = cast(
+            BulkORMInsert,
+            super().create_for_statement(statement, compiler, **kw),
+        )
+
+        if compiler is not None:
+            toplevel = not compiler.stack
+        else:
+            toplevel = True
+        if not toplevel:
+            return self
+
+        mapper = statement._propagate_attrs["plugin_subject"]
+        dml_strategy = statement._annotations.get("dml_strategy", "raw")
+        if dml_strategy == "bulk":
+            self._setup_for_bulk_insert(compiler)
+        elif dml_strategy == "orm":
+            self._setup_for_orm_insert(compiler, mapper)
+
+        return self
+
+    @classmethod
+    def _resolved_keys_as_col_keys(cls, mapper, resolved_value_dict):
+        return {
+            col.key if col is not None else k: v
+            for col, k, v in (
+                (mapper.c.get(k), k, v) for k, v in resolved_value_dict.items()
+            )
+        }
+
+    def _setup_for_orm_insert(self, compiler, mapper):
+        statement = orm_level_statement = cast(dml.Insert, self.statement)
+
+        statement = self._setup_orm_returning(
+            compiler,
+            orm_level_statement,
+            statement,
+            dml_mapper=mapper,
+            use_supplemental_cols=False,
+        )
+        self.statement = statement
+
+    def _setup_for_bulk_insert(self, compiler):
+        """establish an INSERT statement within the context of
+        bulk insert.
+
+        This method will be within the "conn.execute()" call that is invoked
+        by persistence._emit_insert_statement().
+
+        """
+        statement = orm_level_statement = cast(dml.Insert, self.statement)
+        an = statement._annotations
+
+        emit_insert_table, emit_insert_mapper = (
+            an["_emit_insert_table"],
+            an["_emit_insert_mapper"],
+        )
+
+        statement = statement._clone()
+
+        statement.table = emit_insert_table
+        if self._dict_parameters:
+            self._dict_parameters = {
+                col: val
+                for col, val in self._dict_parameters.items()
+                if col.table is emit_insert_table
+            }
+
+        statement = self._setup_orm_returning(
+            compiler,
+            orm_level_statement,
+            statement,
+            dml_mapper=emit_insert_mapper,
+            use_supplemental_cols=True,
+        )
+
+        if (
+            self.from_statement_ctx is not None
+            and self.from_statement_ctx.compile_options._is_star
+        ):
+            raise sa_exc.CompileError(
+                "Can't use RETURNING * with bulk ORM INSERT.  "
+                "Please use a different INSERT form, such as INSERT..VALUES "
+                "or INSERT with a Core Connection"
+            )
+
+        self.statement = statement
+
+
+@CompileState.plugin_for("orm", "update")
+class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
+    @classmethod
+    def create_for_statement(cls, statement, compiler, **kw):
+        self = cls.__new__(cls)
+
+        dml_strategy = statement._annotations.get(
+            "dml_strategy", "unspecified"
+        )
+
+        toplevel = not compiler.stack
+
+        if toplevel and dml_strategy == "bulk":
+            self._setup_for_bulk_update(statement, compiler)
+        elif (
+            dml_strategy == "core_only"
+            or dml_strategy == "unspecified"
+            and "parententity" not in statement.table._annotations
+        ):
+            UpdateDMLState.__init__(self, statement, compiler, **kw)
+        elif not toplevel or dml_strategy in ("orm", "unspecified"):
+            self._setup_for_orm_update(statement, compiler)
+
+        return self
+
+    def _setup_for_orm_update(self, statement, compiler, **kw):
+        orm_level_statement = statement
+
+        toplevel = not compiler.stack
+
+        ext_info = statement.table._annotations["parententity"]
+
+        self.mapper = mapper = ext_info.mapper
+
+        self._resolved_values = self._get_resolved_values(mapper, statement)
+
+        self._init_global_attributes(
+            statement,
+            compiler,
+            toplevel=toplevel,
+            process_criteria_for_toplevel=toplevel,
+        )
+
+        if statement._values:
+            self._resolved_values = dict(self._resolved_values)
+
+        new_stmt = statement._clone()
+
+        if new_stmt.table._annotations["parententity"] is mapper:
+            new_stmt.table = mapper.local_table
+
+        # note if the statement has _multi_values, these
+        # are passed through to the new statement, which will then raise
+        # InvalidRequestError because UPDATE doesn't support multi_values
+        # right now.
+        if statement._ordered_values:
+            new_stmt._ordered_values = self._resolved_values
+        elif statement._values:
+            new_stmt._values = self._resolved_values
+
+        new_crit = self._adjust_for_extra_criteria(
+            self.global_attributes, mapper
+        )
+        if new_crit:
+            new_stmt = new_stmt.where(*new_crit)
+
+        # if we are against a lambda statement we might not be the
+        # topmost object that received per-execute annotations
+
+        # do this first as we need to determine if there is
+        # UPDATE..FROM
+
+        UpdateDMLState.__init__(self, new_stmt, compiler, **kw)
+
+        use_supplemental_cols = False
+
+        if not toplevel:
+            synchronize_session = None
+        else:
+            synchronize_session = compiler._annotations.get(
+                "synchronize_session", None
+            )
+        can_use_returning = compiler._annotations.get(
+            "can_use_returning", None
+        )
+        if can_use_returning is not False:
+            # even though pre_exec has determined basic
+            # can_use_returning for the dialect, if we are to use
+            # RETURNING we need to run can_use_returning() at this level
+            # unconditionally because is_delete_using was not known
+            # at the pre_exec level
+            can_use_returning = (
+                synchronize_session == "fetch"
+                and self.can_use_returning(
+                    compiler.dialect, mapper, is_multitable=self.is_multitable
+                )
+            )
+
+        if synchronize_session == "fetch" and can_use_returning:
+            use_supplemental_cols = True
+
+            # NOTE: we might want to RETURNING the actual columns to be
+            # synchronized also.  however this is complicated and difficult
+            # to align against the behavior of "evaluate".  Additionally,
+            # in a large number (if not the majority) of cases, we have the
+            # "evaluate" answer, usually a fixed value, in memory already and
+            # there's no need to re-fetch the same value
+            # over and over again.   so perhaps if it could be RETURNING just
+            # the elements that were based on a SQL expression and not
+            # a constant.   For now it doesn't quite seem worth it
+            new_stmt = new_stmt.return_defaults(*new_stmt.table.primary_key)
+
+        if toplevel:
+            new_stmt = self._setup_orm_returning(
+                compiler,
+                orm_level_statement,
+                new_stmt,
+                dml_mapper=mapper,
+                use_supplemental_cols=use_supplemental_cols,
+            )
+
+        self.statement = new_stmt
+
+    def _setup_for_bulk_update(self, statement, compiler, **kw):
+        """establish an UPDATE statement within the context of
+        bulk insert.
+
+        This method will be within the "conn.execute()" call that is invoked
+        by persistence._emit_update_statement().
+
+        """
+        statement = cast(dml.Update, statement)
+        an = statement._annotations
+
+        emit_update_table, _ = (
+            an["_emit_update_table"],
+            an["_emit_update_mapper"],
+        )
+
+        statement = statement._clone()
+        statement.table = emit_update_table
+
+        UpdateDMLState.__init__(self, statement, compiler, **kw)
+
+        if self._ordered_values:
+            raise sa_exc.InvalidRequestError(
+                "bulk ORM UPDATE does not support ordered_values() for "
+                "custom UPDATE statements with bulk parameter sets.  Use a "
+                "non-bulk UPDATE statement or use values()."
+            )
+
+        if self._dict_parameters:
+            self._dict_parameters = {
+                col: val
+                for col, val in self._dict_parameters.items()
+                if col.table is emit_update_table
+            }
+        self.statement = statement
+
+    @classmethod
+    def orm_execute_statement(
+        cls,
+        session: Session,
+        statement: dml.Update,
+        params: _CoreAnyExecuteParams,
+        execution_options: OrmExecuteOptionsParameter,
+        bind_arguments: _BindArguments,
+        conn: Connection,
+    ) -> _result.Result:
+
+        update_options = execution_options.get(
+            "_sa_orm_update_options", cls.default_update_options
+        )
+
+        if update_options._populate_existing:
+            load_options = execution_options.get(
+                "_sa_orm_load_options", QueryContext.default_load_options
+            )
+            load_options += {"_populate_existing": True}
+            execution_options = execution_options.union(
+                {"_sa_orm_load_options": load_options}
+            )
+
+        if update_options._dml_strategy not in (
+            "orm",
+            "auto",
+            "bulk",
+            "core_only",
+        ):
+            raise sa_exc.ArgumentError(
+                "Valid strategies for ORM UPDATE strategy "
+                "are 'orm', 'auto', 'bulk', 'core_only'"
+            )
+
+        result: _result.Result[Any]
+
+        if update_options._dml_strategy == "bulk":
+            enable_check_rowcount = not statement._where_criteria
+
+            assert update_options._synchronize_session != "fetch"
+
+            if (
+                statement._where_criteria
+                and update_options._synchronize_session == "evaluate"
+            ):
+                raise sa_exc.InvalidRequestError(
+                    "bulk synchronize of persistent objects not supported "
+                    "when using bulk update with additional WHERE "
+                    "criteria right now.  add synchronize_session=None "
+                    "execution option to bypass synchronize of persistent "
+                    "objects."
+                )
+            mapper = update_options._subject_mapper
+            assert mapper is not None
+            assert session._transaction is not None
+            result = _bulk_update(
+                mapper,
+                cast(
+                    "Iterable[Dict[str, Any]]",
+                    [params] if isinstance(params, dict) else params,
+                ),
+                session._transaction,
+                isstates=False,
+                update_changed_only=False,
+                use_orm_update_stmt=statement,
+                enable_check_rowcount=enable_check_rowcount,
+            )
+            return cls.orm_setup_cursor_result(
+                session,
+                statement,
+                params,
+                execution_options,
+                bind_arguments,
+                result,
+            )
+        else:
+            return super().orm_execute_statement(
+                session,
+                statement,
+                params,
+                execution_options,
+                bind_arguments,
+                conn,
+            )
+
+    @classmethod
+    def can_use_returning(
+        cls,
+        dialect: Dialect,
+        mapper: Mapper[Any],
+        *,
+        is_multitable: bool = False,
+        is_update_from: bool = False,
+        is_delete_using: bool = False,
+        is_executemany: bool = False,
+    ) -> bool:
+        # normal answer for "should we use RETURNING" at all.
+        normal_answer = (
+            dialect.update_returning and mapper.local_table.implicit_returning
+        )
+        if not normal_answer:
+            return False
+
+        if is_executemany:
+            return dialect.update_executemany_returning
+
+        # these workarounds are currently hypothetical for UPDATE,
+        # unlike DELETE where they impact MariaDB
+        if is_update_from:
+            return dialect.update_returning_multifrom
+
+        elif is_multitable and not dialect.update_returning_multifrom:
+            raise sa_exc.CompileError(
+                f'Dialect "{dialect.name}" does not support RETURNING '
+                "with UPDATE..FROM; for synchronize_session='fetch', "
+                "please add the additional execution option "
+                "'is_update_from=True' to the statement to indicate that "
+                "a separate SELECT should be used for this backend."
+            )
+
+        return True
+
+    @classmethod
+    def _do_post_synchronize_bulk_evaluate(
+        cls, session, params, result, update_options
+    ):
+        if not params:
+            return
+
+        mapper = update_options._subject_mapper
+        pk_keys = [prop.key for prop in mapper._identity_key_props]
+
+        identity_map = session.identity_map
+
+        for param in params:
+            identity_key = mapper.identity_key_from_primary_key(
+                (param[key] for key in pk_keys),
+                update_options._identity_token,
+            )
+            state = identity_map.fast_get_state(identity_key)
+            if not state:
+                continue
+
+            evaluated_keys = set(param).difference(pk_keys)
+
+            dict_ = state.dict
+            # only evaluate unmodified attributes
+            to_evaluate = state.unmodified.intersection(evaluated_keys)
+            for key in to_evaluate:
+                if key in dict_:
+                    dict_[key] = param[key]
+
+            state.manager.dispatch.refresh(state, None, to_evaluate)
+
+            state._commit(dict_, list(to_evaluate))
+
+            # attributes that were formerly modified instead get expired.
+            # this only gets hit if the session had pending changes
+            # and autoflush were set to False.
+            to_expire = evaluated_keys.intersection(dict_).difference(
+                to_evaluate
+            )
+            if to_expire:
+                state._expire_attributes(dict_, to_expire)
+
+    @classmethod
+    def _do_post_synchronize_evaluate(
+        cls, session, statement, result, update_options
+    ):
+        matched_objects = cls._get_matched_objects_on_criteria(
+            update_options,
+            session.identity_map.all_states(),
+        )
+
+        cls._apply_update_set_values_to_objects(
+            session,
+            update_options,
+            statement,
+            result.context.compiled_parameters[0],
+            [(obj, state, dict_) for obj, state, dict_, _ in matched_objects],
+            result.prefetch_cols(),
+            result.postfetch_cols(),
+        )
+
+    @classmethod
+    def _do_post_synchronize_fetch(
+        cls, session, statement, result, update_options
+    ):
+        target_mapper = update_options._subject_mapper
+
+        returned_defaults_rows = result.returned_defaults_rows
+        if returned_defaults_rows:
+            pk_rows = cls._interpret_returning_rows(
+                result, target_mapper, returned_defaults_rows
+            )
+            matched_rows = [
+                tuple(row) + (update_options._identity_token,)
+                for row in pk_rows
+            ]
+        else:
+            matched_rows = update_options._matched_rows
+
+        objs = [
+            session.identity_map[identity_key]
+            for identity_key in [
+                target_mapper.identity_key_from_primary_key(
+                    list(primary_key),
+                    identity_token=identity_token,
+                )
+                for primary_key, identity_token in [
+                    (row[0:-1], row[-1]) for row in matched_rows
+                ]
+                if update_options._identity_token is None
+                or identity_token == update_options._identity_token
+            ]
+            if identity_key in session.identity_map
+        ]
+
+        if not objs:
+            return
+
+        cls._apply_update_set_values_to_objects(
+            session,
+            update_options,
+            statement,
+            result.context.compiled_parameters[0],
+            [
+                (
+                    obj,
+                    attributes.instance_state(obj),
+                    attributes.instance_dict(obj),
+                )
+                for obj in objs
+            ],
+            result.prefetch_cols(),
+            result.postfetch_cols(),
+        )
+
+    @classmethod
+    def _apply_update_set_values_to_objects(
+        cls,
+        session,
+        update_options,
+        statement,
+        effective_params,
+        matched_objects,
+        prefetch_cols,
+        postfetch_cols,
+    ):
+        """apply values to objects derived from an update statement, e.g.
+        UPDATE..SET <values>
+
+        """
+
+        mapper = update_options._subject_mapper
+        target_cls = mapper.class_
+        evaluator_compiler = evaluator._EvaluatorCompiler(target_cls)
+        resolved_values = cls._get_resolved_values(mapper, statement)
+        resolved_keys_as_propnames = cls._resolved_keys_as_propnames(
+            mapper, resolved_values
+        )
+        value_evaluators = {}
+        for key, value in resolved_keys_as_propnames:
+            try:
+                _evaluator = evaluator_compiler.process(
+                    coercions.expect(roles.ExpressionElementRole, value)
+                )
+            except evaluator.UnevaluatableError:
+                pass
+            else:
+                value_evaluators[key] = _evaluator
+
+        evaluated_keys = list(value_evaluators.keys())
+        attrib = {k for k, v in resolved_keys_as_propnames}
+
+        states = set()
+
+        to_prefetch = {
+            c
+            for c in prefetch_cols
+            if c.key in effective_params
+            and c in mapper._columntoproperty
+            and c.key not in evaluated_keys
+        }
+        to_expire = {
+            mapper._columntoproperty[c].key
+            for c in postfetch_cols
+            if c in mapper._columntoproperty
+        }.difference(evaluated_keys)
+
+        prefetch_transfer = [
+            (mapper._columntoproperty[c].key, c.key) for c in to_prefetch
+        ]
+
+        for obj, state, dict_ in matched_objects:
+
+            dict_.update(
+                {
+                    col_to_prop: effective_params[c_key]
+                    for col_to_prop, c_key in prefetch_transfer
+                }
+            )
+
+            state._expire_attributes(state.dict, to_expire)
+
+            to_evaluate = state.unmodified.intersection(evaluated_keys)
+
+            for key in to_evaluate:
+                if key in dict_:
+                    # only run eval for attributes that are present.
+                    dict_[key] = value_evaluators[key](obj)
+
+            state.manager.dispatch.refresh(state, None, to_evaluate)
+
+            state._commit(dict_, list(to_evaluate))
+
+            # attributes that were formerly modified instead get expired.
+            # this only gets hit if the session had pending changes
+            # and autoflush were set to False.
+            to_expire = attrib.intersection(dict_).difference(to_evaluate)
+            if to_expire:
+                state._expire_attributes(dict_, to_expire)
+
+            states.add(state)
+        session._register_altered(states)
+
+
+@CompileState.plugin_for("orm", "delete")
+class BulkORMDelete(BulkUDCompileState, DeleteDMLState):
+    @classmethod
+    def create_for_statement(cls, statement, compiler, **kw):
+        self = cls.__new__(cls)
+
+        dml_strategy = statement._annotations.get(
+            "dml_strategy", "unspecified"
+        )
+
+        if (
+            dml_strategy == "core_only"
+            or dml_strategy == "unspecified"
+            and "parententity" not in statement.table._annotations
+        ):
+            DeleteDMLState.__init__(self, statement, compiler, **kw)
+            return self
+
+        toplevel = not compiler.stack
+
+        orm_level_statement = statement
+
+        ext_info = statement.table._annotations["parententity"]
+        self.mapper = mapper = ext_info.mapper
+
+        self._init_global_attributes(
+            statement,
+            compiler,
+            toplevel=toplevel,
+            process_criteria_for_toplevel=toplevel,
+        )
+
+        new_stmt = statement._clone()
+
+        if new_stmt.table._annotations["parententity"] is mapper:
+            new_stmt.table = mapper.local_table
+
+        new_crit = cls._adjust_for_extra_criteria(
+            self.global_attributes, mapper
+        )
+        if new_crit:
+            new_stmt = new_stmt.where(*new_crit)
+
+        # do this first as we need to determine if there is
+        # DELETE..FROM
+        DeleteDMLState.__init__(self, new_stmt, compiler, **kw)
+
+        use_supplemental_cols = False
+
+        if not toplevel:
+            synchronize_session = None
+        else:
+            synchronize_session = compiler._annotations.get(
+                "synchronize_session", None
+            )
+        can_use_returning = compiler._annotations.get(
+            "can_use_returning", None
+        )
+        if can_use_returning is not False:
+            # even though pre_exec has determined basic
+            # can_use_returning for the dialect, if we are to use
+            # RETURNING we need to run can_use_returning() at this level
+            # unconditionally because is_delete_using was not known
+            # at the pre_exec level
+            can_use_returning = (
+                synchronize_session == "fetch"
+                and self.can_use_returning(
+                    compiler.dialect,
+                    mapper,
+                    is_multitable=self.is_multitable,
+                    is_delete_using=compiler._annotations.get(
+                        "is_delete_using", False
+                    ),
+                )
+            )
+
+        if can_use_returning:
+            use_supplemental_cols = True
+
+            new_stmt = new_stmt.return_defaults(*new_stmt.table.primary_key)
+
+        if toplevel:
+            new_stmt = self._setup_orm_returning(
+                compiler,
+                orm_level_statement,
+                new_stmt,
+                dml_mapper=mapper,
+                use_supplemental_cols=use_supplemental_cols,
+            )
+
+        self.statement = new_stmt
+
+        return self
+
+    @classmethod
+    def orm_execute_statement(
+        cls,
+        session: Session,
+        statement: dml.Delete,
+        params: _CoreAnyExecuteParams,
+        execution_options: OrmExecuteOptionsParameter,
+        bind_arguments: _BindArguments,
+        conn: Connection,
+    ) -> _result.Result:
+        update_options = execution_options.get(
+            "_sa_orm_update_options", cls.default_update_options
+        )
+
+        if update_options._dml_strategy == "bulk":
+            raise sa_exc.InvalidRequestError(
+                "Bulk ORM DELETE not supported right now. "
+                "Statement may be invoked at the "
+                "Core level using "
+                "session.connection().execute(stmt, parameters)"
+            )
+
+        if update_options._dml_strategy not in ("orm", "auto", "core_only"):
+            raise sa_exc.ArgumentError(
+                "Valid strategies for ORM DELETE strategy are 'orm', 'auto', "
+                "'core_only'"
+            )
+
+        return super().orm_execute_statement(
+            session, statement, params, execution_options, bind_arguments, conn
+        )
+
+    @classmethod
+    def can_use_returning(
+        cls,
+        dialect: Dialect,
+        mapper: Mapper[Any],
+        *,
+        is_multitable: bool = False,
+        is_update_from: bool = False,
+        is_delete_using: bool = False,
+        is_executemany: bool = False,
+    ) -> bool:
+        # normal answer for "should we use RETURNING" at all.
+        normal_answer = (
+            dialect.delete_returning and mapper.local_table.implicit_returning
+        )
+        if not normal_answer:
+            return False
+
+        # now get into special workarounds because MariaDB supports
+        # DELETE...RETURNING but not DELETE...USING...RETURNING.
+        if is_delete_using:
+            # is_delete_using hint was passed.   use
+            # additional dialect feature (True for PG, False for MariaDB)
+            return dialect.delete_returning_multifrom
+
+        elif is_multitable and not dialect.delete_returning_multifrom:
+            # is_delete_using hint was not passed, but we determined
+            # at compile time that this is in fact a DELETE..USING.
+            # it's too late to continue since we did not pre-SELECT.
+            # raise that we need that hint up front.
+
+            raise sa_exc.CompileError(
+                f'Dialect "{dialect.name}" does not support RETURNING '
+                "with DELETE..USING; for synchronize_session='fetch', "
+                "please add the additional execution option "
+                "'is_delete_using=True' to the statement to indicate that "
+                "a separate SELECT should be used for this backend."
+            )
+
+        return True
+
+    @classmethod
+    def _do_post_synchronize_evaluate(
+        cls, session, statement, result, update_options
+    ):
+        matched_objects = cls._get_matched_objects_on_criteria(
+            update_options,
+            session.identity_map.all_states(),
+        )
+
+        to_delete = []
+
+        for _, state, dict_, is_partially_expired in matched_objects:
+            if is_partially_expired:
+                state._expire(dict_, session.identity_map._modified)
+            else:
+                to_delete.append(state)
+
+        if to_delete:
+            session._remove_newly_deleted(to_delete)
+
+    @classmethod
+    def _do_post_synchronize_fetch(
+        cls, session, statement, result, update_options
+    ):
+        target_mapper = update_options._subject_mapper
+
+        returned_defaults_rows = result.returned_defaults_rows
+
+        if returned_defaults_rows:
+            pk_rows = cls._interpret_returning_rows(
+                result, target_mapper, returned_defaults_rows
+            )
+
+            matched_rows = [
+                tuple(row) + (update_options._identity_token,)
+                for row in pk_rows
+            ]
+        else:
+            matched_rows = update_options._matched_rows
+
+        for row in matched_rows:
+            primary_key = row[0:-1]
+            identity_token = row[-1]
+
+            # TODO: inline this and call remove_newly_deleted
+            # once
+            identity_key = target_mapper.identity_key_from_primary_key(
+                list(primary_key),
+                identity_token=identity_token,
+            )
+            if identity_key in session.identity_map:
+                session._remove_newly_deleted(
+                    [
+                        attributes.instance_state(
+                            session.identity_map[identity_key]
+                        )
+                    ]
+                )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py
new file mode 100644
index 00000000..70307ec7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/clsregistry.py
@@ -0,0 +1,571 @@
+# orm/clsregistry.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Routines to handle the string class registry used by declarative.
+
+This system allows specification of classes and expressions used in
+:func:`_orm.relationship` using strings.
+
+"""
+
+from __future__ import annotations
+
+import re
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import NoReturn
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import interfaces
+from .descriptor_props import SynonymProperty
+from .properties import ColumnProperty
+from .util import class_mapper
+from .. import exc
+from .. import inspection
+from .. import util
+from ..sql.schema import _get_table_key
+from ..util.typing import CallableReference
+
+if TYPE_CHECKING:
+    from .relationships import RelationshipProperty
+    from ..sql.schema import MetaData
+    from ..sql.schema import Table
+
+_T = TypeVar("_T", bound=Any)
+
+_ClsRegistryType = MutableMapping[str, Union[type, "ClsRegistryToken"]]
+
+# strong references to registries which we place in
+# the _decl_class_registry, which is usually weak referencing.
+# the internal registries here link to classes with weakrefs and remove
+# themselves when all references to contained classes are removed.
+_registries: Set[ClsRegistryToken] = set()
+
+
+def add_class(
+    classname: str, cls: Type[_T], decl_class_registry: _ClsRegistryType
+) -> None:
+    """Add a class to the _decl_class_registry associated with the
+    given declarative class.
+
+    """
+    if classname in decl_class_registry:
+        # class already exists.
+        existing = decl_class_registry[classname]
+        if not isinstance(existing, _MultipleClassMarker):
+            existing = decl_class_registry[classname] = _MultipleClassMarker(
+                [cls, cast("Type[Any]", existing)]
+            )
+    else:
+        decl_class_registry[classname] = cls
+
+    try:
+        root_module = cast(
+            _ModuleMarker, decl_class_registry["_sa_module_registry"]
+        )
+    except KeyError:
+        decl_class_registry["_sa_module_registry"] = root_module = (
+            _ModuleMarker("_sa_module_registry", None)
+        )
+
+    tokens = cls.__module__.split(".")
+
+    # build up a tree like this:
+    # modulename:  myapp.snacks.nuts
+    #
+    # myapp->snack->nuts->(classes)
+    # snack->nuts->(classes)
+    # nuts->(classes)
+    #
+    # this allows partial token paths to be used.
+    while tokens:
+        token = tokens.pop(0)
+        module = root_module.get_module(token)
+        for token in tokens:
+            module = module.get_module(token)
+
+        try:
+            module.add_class(classname, cls)
+        except AttributeError as ae:
+            if not isinstance(module, _ModuleMarker):
+                raise exc.InvalidRequestError(
+                    f'name "{classname}" matches both a '
+                    "class name and a module name"
+                ) from ae
+            else:
+                raise
+
+
+def remove_class(
+    classname: str, cls: Type[Any], decl_class_registry: _ClsRegistryType
+) -> None:
+    if classname in decl_class_registry:
+        existing = decl_class_registry[classname]
+        if isinstance(existing, _MultipleClassMarker):
+            existing.remove_item(cls)
+        else:
+            del decl_class_registry[classname]
+
+    try:
+        root_module = cast(
+            _ModuleMarker, decl_class_registry["_sa_module_registry"]
+        )
+    except KeyError:
+        return
+
+    tokens = cls.__module__.split(".")
+
+    while tokens:
+        token = tokens.pop(0)
+        module = root_module.get_module(token)
+        for token in tokens:
+            module = module.get_module(token)
+        try:
+            module.remove_class(classname, cls)
+        except AttributeError:
+            if not isinstance(module, _ModuleMarker):
+                pass
+            else:
+                raise
+
+
+def _key_is_empty(
+    key: str,
+    decl_class_registry: _ClsRegistryType,
+    test: Callable[[Any], bool],
+) -> bool:
+    """test if a key is empty of a certain object.
+
+    used for unit tests against the registry to see if garbage collection
+    is working.
+
+    "test" is a callable that will be passed an object should return True
+    if the given object is the one we were looking for.
+
+    We can't pass the actual object itself b.c. this is for testing garbage
+    collection; the caller will have to have removed references to the
+    object itself.
+
+    """
+    if key not in decl_class_registry:
+        return True
+
+    thing = decl_class_registry[key]
+    if isinstance(thing, _MultipleClassMarker):
+        for sub_thing in thing.contents:
+            if test(sub_thing):
+                return False
+        else:
+            raise NotImplementedError("unknown codepath")
+    else:
+        return not test(thing)
+
+
+class ClsRegistryToken:
+    """an object that can be in the registry._class_registry as a value."""
+
+    __slots__ = ()
+
+
+class _MultipleClassMarker(ClsRegistryToken):
+    """refers to multiple classes of the same name
+    within _decl_class_registry.
+
+    """
+
+    __slots__ = "on_remove", "contents", "__weakref__"
+
+    contents: Set[weakref.ref[Type[Any]]]
+    on_remove: CallableReference[Optional[Callable[[], None]]]
+
+    def __init__(
+        self,
+        classes: Iterable[Type[Any]],
+        on_remove: Optional[Callable[[], None]] = None,
+    ):
+        self.on_remove = on_remove
+        self.contents = {
+            weakref.ref(item, self._remove_item) for item in classes
+        }
+        _registries.add(self)
+
+    def remove_item(self, cls: Type[Any]) -> None:
+        self._remove_item(weakref.ref(cls))
+
+    def __iter__(self) -> Generator[Optional[Type[Any]], None, None]:
+        return (ref() for ref in self.contents)
+
+    def attempt_get(self, path: List[str], key: str) -> Type[Any]:
+        if len(self.contents) > 1:
+            raise exc.InvalidRequestError(
+                'Multiple classes found for path "%s" '
+                "in the registry of this declarative "
+                "base. Please use a fully module-qualified path."
+                % (".".join(path + [key]))
+            )
+        else:
+            ref = list(self.contents)[0]
+            cls = ref()
+            if cls is None:
+                raise NameError(key)
+            return cls
+
+    def _remove_item(self, ref: weakref.ref[Type[Any]]) -> None:
+        self.contents.discard(ref)
+        if not self.contents:
+            _registries.discard(self)
+            if self.on_remove:
+                self.on_remove()
+
+    def add_item(self, item: Type[Any]) -> None:
+        # protect against class registration race condition against
+        # asynchronous garbage collection calling _remove_item,
+        # [ticket:3208] and [ticket:10782]
+        modules = {
+            cls.__module__
+            for cls in [ref() for ref in list(self.contents)]
+            if cls is not None
+        }
+        if item.__module__ in modules:
+            util.warn(
+                "This declarative base already contains a class with the "
+                "same class name and module name as %s.%s, and will "
+                "be replaced in the string-lookup table."
+                % (item.__module__, item.__name__)
+            )
+        self.contents.add(weakref.ref(item, self._remove_item))
+
+
+class _ModuleMarker(ClsRegistryToken):
+    """Refers to a module name within
+    _decl_class_registry.
+
+    """
+
+    __slots__ = "parent", "name", "contents", "mod_ns", "path", "__weakref__"
+
+    parent: Optional[_ModuleMarker]
+    contents: Dict[str, Union[_ModuleMarker, _MultipleClassMarker]]
+    mod_ns: _ModNS
+    path: List[str]
+
+    def __init__(self, name: str, parent: Optional[_ModuleMarker]):
+        self.parent = parent
+        self.name = name
+        self.contents = {}
+        self.mod_ns = _ModNS(self)
+        if self.parent:
+            self.path = self.parent.path + [self.name]
+        else:
+            self.path = []
+        _registries.add(self)
+
+    def __contains__(self, name: str) -> bool:
+        return name in self.contents
+
+    def __getitem__(self, name: str) -> ClsRegistryToken:
+        return self.contents[name]
+
+    def _remove_item(self, name: str) -> None:
+        self.contents.pop(name, None)
+        if not self.contents:
+            if self.parent is not None:
+                self.parent._remove_item(self.name)
+            _registries.discard(self)
+
+    def resolve_attr(self, key: str) -> Union[_ModNS, Type[Any]]:
+        return self.mod_ns.__getattr__(key)
+
+    def get_module(self, name: str) -> _ModuleMarker:
+        if name not in self.contents:
+            marker = _ModuleMarker(name, self)
+            self.contents[name] = marker
+        else:
+            marker = cast(_ModuleMarker, self.contents[name])
+        return marker
+
+    def add_class(self, name: str, cls: Type[Any]) -> None:
+        if name in self.contents:
+            existing = cast(_MultipleClassMarker, self.contents[name])
+            try:
+                existing.add_item(cls)
+            except AttributeError as ae:
+                if not isinstance(existing, _MultipleClassMarker):
+                    raise exc.InvalidRequestError(
+                        f'name "{name}" matches both a '
+                        "class name and a module name"
+                    ) from ae
+                else:
+                    raise
+        else:
+            existing = self.contents[name] = _MultipleClassMarker(
+                [cls], on_remove=lambda: self._remove_item(name)
+            )
+
+    def remove_class(self, name: str, cls: Type[Any]) -> None:
+        if name in self.contents:
+            existing = cast(_MultipleClassMarker, self.contents[name])
+            existing.remove_item(cls)
+
+
+class _ModNS:
+    __slots__ = ("__parent",)
+
+    __parent: _ModuleMarker
+
+    def __init__(self, parent: _ModuleMarker):
+        self.__parent = parent
+
+    def __getattr__(self, key: str) -> Union[_ModNS, Type[Any]]:
+        try:
+            value = self.__parent.contents[key]
+        except KeyError:
+            pass
+        else:
+            if value is not None:
+                if isinstance(value, _ModuleMarker):
+                    return value.mod_ns
+                else:
+                    assert isinstance(value, _MultipleClassMarker)
+                    return value.attempt_get(self.__parent.path, key)
+        raise NameError(
+            "Module %r has no mapped classes "
+            "registered under the name %r" % (self.__parent.name, key)
+        )
+
+
+class _GetColumns:
+    __slots__ = ("cls",)
+
+    cls: Type[Any]
+
+    def __init__(self, cls: Type[Any]):
+        self.cls = cls
+
+    def __getattr__(self, key: str) -> Any:
+        mp = class_mapper(self.cls, configure=False)
+        if mp:
+            if key not in mp.all_orm_descriptors:
+                raise AttributeError(
+                    "Class %r does not have a mapped column named %r"
+                    % (self.cls, key)
+                )
+
+            desc = mp.all_orm_descriptors[key]
+            if desc.extension_type is interfaces.NotExtension.NOT_EXTENSION:
+                assert isinstance(desc, attributes.QueryableAttribute)
+                prop = desc.property
+                if isinstance(prop, SynonymProperty):
+                    key = prop.name
+                elif not isinstance(prop, ColumnProperty):
+                    raise exc.InvalidRequestError(
+                        "Property %r is not an instance of"
+                        " ColumnProperty (i.e. does not correspond"
+                        " directly to a Column)." % key
+                    )
+        return getattr(self.cls, key)
+
+
+inspection._inspects(_GetColumns)(
+    lambda target: inspection.inspect(target.cls)
+)
+
+
+class _GetTable:
+    __slots__ = "key", "metadata"
+
+    key: str
+    metadata: MetaData
+
+    def __init__(self, key: str, metadata: MetaData):
+        self.key = key
+        self.metadata = metadata
+
+    def __getattr__(self, key: str) -> Table:
+        return self.metadata.tables[_get_table_key(key, self.key)]
+
+
+def _determine_container(key: str, value: Any) -> _GetColumns:
+    if isinstance(value, _MultipleClassMarker):
+        value = value.attempt_get([], key)
+    return _GetColumns(value)
+
+
+class _class_resolver:
+    __slots__ = (
+        "cls",
+        "prop",
+        "arg",
+        "fallback",
+        "_dict",
+        "_resolvers",
+        "favor_tables",
+    )
+
+    cls: Type[Any]
+    prop: RelationshipProperty[Any]
+    fallback: Mapping[str, Any]
+    arg: str
+    favor_tables: bool
+    _resolvers: Tuple[Callable[[str], Any], ...]
+
+    def __init__(
+        self,
+        cls: Type[Any],
+        prop: RelationshipProperty[Any],
+        fallback: Mapping[str, Any],
+        arg: str,
+        favor_tables: bool = False,
+    ):
+        self.cls = cls
+        self.prop = prop
+        self.arg = arg
+        self.fallback = fallback
+        self._dict = util.PopulateDict(self._access_cls)
+        self._resolvers = ()
+        self.favor_tables = favor_tables
+
+    def _access_cls(self, key: str) -> Any:
+        cls = self.cls
+
+        manager = attributes.manager_of_class(cls)
+        decl_base = manager.registry
+        assert decl_base is not None
+        decl_class_registry = decl_base._class_registry
+        metadata = decl_base.metadata
+
+        if self.favor_tables:
+            if key in metadata.tables:
+                return metadata.tables[key]
+            elif key in metadata._schemas:
+                return _GetTable(key, getattr(cls, "metadata", metadata))
+
+        if key in decl_class_registry:
+            return _determine_container(key, decl_class_registry[key])
+
+        if not self.favor_tables:
+            if key in metadata.tables:
+                return metadata.tables[key]
+            elif key in metadata._schemas:
+                return _GetTable(key, getattr(cls, "metadata", metadata))
+
+        if "_sa_module_registry" in decl_class_registry and key in cast(
+            _ModuleMarker, decl_class_registry["_sa_module_registry"]
+        ):
+            registry = cast(
+                _ModuleMarker, decl_class_registry["_sa_module_registry"]
+            )
+            return registry.resolve_attr(key)
+        elif self._resolvers:
+            for resolv in self._resolvers:
+                value = resolv(key)
+                if value is not None:
+                    return value
+
+        return self.fallback[key]
+
+    def _raise_for_name(self, name: str, err: Exception) -> NoReturn:
+        generic_match = re.match(r"(.+)\[(.+)\]", name)
+
+        if generic_match:
+            clsarg = generic_match.group(2).strip("'")
+            raise exc.InvalidRequestError(
+                f"When initializing mapper {self.prop.parent}, "
+                f'expression "relationship({self.arg!r})" seems to be '
+                "using a generic class as the argument to relationship(); "
+                "please state the generic argument "
+                "using an annotation, e.g. "
+                f'"{self.prop.key}: Mapped[{generic_match.group(1)}'
+                f"['{clsarg}']] = relationship()\""
+            ) from err
+        else:
+            raise exc.InvalidRequestError(
+                "When initializing mapper %s, expression %r failed to "
+                "locate a name (%r). If this is a class name, consider "
+                "adding this relationship() to the %r class after "
+                "both dependent classes have been defined."
+                % (self.prop.parent, self.arg, name, self.cls)
+            ) from err
+
+    def _resolve_name(self) -> Union[Table, Type[Any], _ModNS]:
+        name = self.arg
+        d = self._dict
+        rval = None
+        try:
+            for token in name.split("."):
+                if rval is None:
+                    rval = d[token]
+                else:
+                    rval = getattr(rval, token)
+        except KeyError as err:
+            self._raise_for_name(name, err)
+        except NameError as n:
+            self._raise_for_name(n.args[0], n)
+        else:
+            if isinstance(rval, _GetColumns):
+                return rval.cls
+            else:
+                if TYPE_CHECKING:
+                    assert isinstance(rval, (type, Table, _ModNS))
+                return rval
+
+    def __call__(self) -> Any:
+        try:
+            x = eval(self.arg, globals(), self._dict)
+
+            if isinstance(x, _GetColumns):
+                return x.cls
+            else:
+                return x
+        except NameError as n:
+            self._raise_for_name(n.args[0], n)
+
+
+_fallback_dict: Mapping[str, Any] = None  # type: ignore
+
+
+def _resolver(cls: Type[Any], prop: RelationshipProperty[Any]) -> Tuple[
+    Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]],
+    Callable[[str, bool], _class_resolver],
+]:
+    global _fallback_dict
+
+    if _fallback_dict is None:
+        import sqlalchemy
+        from . import foreign
+        from . import remote
+
+        _fallback_dict = util.immutabledict(sqlalchemy.__dict__).union(
+            {"foreign": foreign, "remote": remote}
+        )
+
+    def resolve_arg(arg: str, favor_tables: bool = False) -> _class_resolver:
+        return _class_resolver(
+            cls, prop, _fallback_dict, arg, favor_tables=favor_tables
+        )
+
+    def resolve_name(
+        arg: str,
+    ) -> Callable[[], Union[Type[Any], Table, _ModNS]]:
+        return _class_resolver(cls, prop, _fallback_dict, arg)._resolve_name
+
+    return resolve_name, resolve_arg
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py
new file mode 100644
index 00000000..336b1133
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/collections.py
@@ -0,0 +1,1627 @@
+# orm/collections.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Support for collections of mapped entities.
+
+The collections package supplies the machinery used to inform the ORM of
+collection membership changes.  An instrumentation via decoration approach is
+used, allowing arbitrary types (including built-ins) to be used as entity
+collections without requiring inheritance from a base class.
+
+Instrumentation decoration relays membership change events to the
+:class:`.CollectionAttributeImpl` that is currently managing the collection.
+The decorators observe function call arguments and return values, tracking
+entities entering or leaving the collection.  Two decorator approaches are
+provided.  One is a bundle of generic decorators that map function arguments
+and return values to events::
+
+  from sqlalchemy.orm.collections import collection
+
+
+  class MyClass:
+      # ...
+
+      @collection.adds(1)
+      def store(self, item):
+          self.data.append(item)
+
+      @collection.removes_return()
+      def pop(self):
+          return self.data.pop()
+
+The second approach is a bundle of targeted decorators that wrap appropriate
+append and remove notifiers around the mutation methods present in the
+standard Python ``list``, ``set`` and ``dict`` interfaces.  These could be
+specified in terms of generic decorator recipes, but are instead hand-tooled
+for increased efficiency.  The targeted decorators occasionally implement
+adapter-like behavior, such as mapping bulk-set methods (``extend``,
+``update``, ``__setslice__``, etc.) into the series of atomic mutation events
+that the ORM requires.
+
+The targeted decorators are used internally for automatic instrumentation of
+entity collection classes.  Every collection class goes through a
+transformation process roughly like so:
+
+1. If the class is a built-in, substitute a trivial sub-class
+2. Is this class already instrumented?
+3. Add in generic decorators
+4. Sniff out the collection interface through duck-typing
+5. Add targeted decoration to any undecorated interface method
+
+This process modifies the class at runtime, decorating methods and adding some
+bookkeeping properties.  This isn't possible (or desirable) for built-in
+classes like ``list``, so trivial sub-classes are substituted to hold
+decoration::
+
+  class InstrumentedList(list):
+      pass
+
+Collection classes can be specified in ``relationship(collection_class=)`` as
+types or a function that returns an instance.  Collection classes are
+inspected and instrumented during the mapper compilation phase.  The
+collection_class callable will be executed once to produce a specimen
+instance, and the type of that specimen will be instrumented.  Functions that
+return built-in types like ``lists`` will be adapted to produce instrumented
+instances.
+
+When extending a known type like ``list``, additional decorations are not
+generally not needed.  Odds are, the extension method will delegate to a
+method that's already instrumented.  For example::
+
+  class QueueIsh(list):
+      def push(self, item):
+          self.append(item)
+
+      def shift(self):
+          return self.pop(0)
+
+There's no need to decorate these methods.  ``append`` and ``pop`` are already
+instrumented as part of the ``list`` interface.  Decorating them would fire
+duplicate events, which should be avoided.
+
+The targeted decoration tries not to rely on other methods in the underlying
+collection class, but some are unavoidable.  Many depend on 'read' methods
+being present to properly instrument a 'write', for example, ``__setitem__``
+needs ``__getitem__``.  "Bulk" methods like ``update`` and ``extend`` may also
+reimplemented in terms of atomic appends and removes, so the ``extend``
+decoration will actually perform many ``append`` operations and not call the
+underlying method at all.
+
+Tight control over bulk operation and the firing of events is also possible by
+implementing the instrumentation internally in your methods.  The basic
+instrumentation package works under the general assumption that collection
+mutation will not raise unusual exceptions.  If you want to closely
+orchestrate append and remove events with exception management, internal
+instrumentation may be the answer.  Within your method,
+``collection_adapter(self)`` will retrieve an object that you can use for
+explicit control over triggering append and remove events.
+
+The owning object and :class:`.CollectionAttributeImpl` are also reachable
+through the adapter, allowing for some very sophisticated behavior.
+
+"""
+from __future__ import annotations
+
+import operator
+import threading
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import NoReturn
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from .base import NO_KEY
+from .. import exc as sa_exc
+from .. import util
+from ..sql.base import NO_ARG
+from ..util.compat import inspect_getfullargspec
+from ..util.typing import Protocol
+
+if typing.TYPE_CHECKING:
+    from .attributes import AttributeEventToken
+    from .attributes import CollectionAttributeImpl
+    from .mapped_collection import attribute_keyed_dict
+    from .mapped_collection import column_keyed_dict
+    from .mapped_collection import keyfunc_mapping
+    from .mapped_collection import KeyFuncDict  # noqa: F401
+    from .state import InstanceState
+
+
+__all__ = [
+    "collection",
+    "collection_adapter",
+    "keyfunc_mapping",
+    "column_keyed_dict",
+    "attribute_keyed_dict",
+    "KeyFuncDict",
+    # old names in < 2.0
+    "mapped_collection",
+    "column_mapped_collection",
+    "attribute_mapped_collection",
+    "MappedCollection",
+]
+
+__instrumentation_mutex = threading.Lock()
+
+
+_CollectionFactoryType = Callable[[], "_AdaptedCollectionProtocol"]
+
+_T = TypeVar("_T", bound=Any)
+_KT = TypeVar("_KT", bound=Any)
+_VT = TypeVar("_VT", bound=Any)
+_COL = TypeVar("_COL", bound="Collection[Any]")
+_FN = TypeVar("_FN", bound="Callable[..., Any]")
+
+
+class _CollectionConverterProtocol(Protocol):
+    def __call__(self, collection: _COL) -> _COL: ...
+
+
+class _AdaptedCollectionProtocol(Protocol):
+    _sa_adapter: CollectionAdapter
+    _sa_appender: Callable[..., Any]
+    _sa_remover: Callable[..., Any]
+    _sa_iterator: Callable[..., Iterable[Any]]
+    _sa_converter: _CollectionConverterProtocol
+
+
+class collection:
+    """Decorators for entity collection classes.
+
+    The decorators fall into two groups: annotations and interception recipes.
+
+    The annotating decorators (appender, remover, iterator, converter,
+    internally_instrumented) indicate the method's purpose and take no
+    arguments.  They are not written with parens::
+
+        @collection.appender
+        def append(self, append): ...
+
+    The recipe decorators all require parens, even those that take no
+    arguments::
+
+        @collection.adds("entity")
+        def insert(self, position, entity): ...
+
+
+        @collection.removes_return()
+        def popitem(self): ...
+
+    """
+
+    # Bundled as a class solely for ease of use: packaging, doc strings,
+    # importability.
+
+    @staticmethod
+    def appender(fn):
+        """Tag the method as the collection appender.
+
+        The appender method is called with one positional argument: the value
+        to append. The method will be automatically decorated with 'adds(1)'
+        if not already decorated::
+
+            @collection.appender
+            def add(self, append): ...
+
+
+            # or, equivalently
+            @collection.appender
+            @collection.adds(1)
+            def add(self, append): ...
+
+
+            # for mapping type, an 'append' may kick out a previous value
+            # that occupies that slot.  consider d['a'] = 'foo'- any previous
+            # value in d['a'] is discarded.
+            @collection.appender
+            @collection.replaces(1)
+            def add(self, entity):
+                key = some_key_func(entity)
+                previous = None
+                if key in self:
+                    previous = self[key]
+                self[key] = entity
+                return previous
+
+        If the value to append is not allowed in the collection, you may
+        raise an exception.  Something to remember is that the appender
+        will be called for each object mapped by a database query.  If the
+        database contains rows that violate your collection semantics, you
+        will need to get creative to fix the problem, as access via the
+        collection will not work.
+
+        If the appender method is internally instrumented, you must also
+        receive the keyword argument '_sa_initiator' and ensure its
+        promulgation to collection events.
+
+        """
+        fn._sa_instrument_role = "appender"
+        return fn
+
+    @staticmethod
+    def remover(fn):
+        """Tag the method as the collection remover.
+
+        The remover method is called with one positional argument: the value
+        to remove. The method will be automatically decorated with
+        :meth:`removes_return` if not already decorated::
+
+            @collection.remover
+            def zap(self, entity): ...
+
+
+            # or, equivalently
+            @collection.remover
+            @collection.removes_return()
+            def zap(self): ...
+
+        If the value to remove is not present in the collection, you may
+        raise an exception or return None to ignore the error.
+
+        If the remove method is internally instrumented, you must also
+        receive the keyword argument '_sa_initiator' and ensure its
+        promulgation to collection events.
+
+        """
+        fn._sa_instrument_role = "remover"
+        return fn
+
+    @staticmethod
+    def iterator(fn):
+        """Tag the method as the collection remover.
+
+        The iterator method is called with no arguments.  It is expected to
+        return an iterator over all collection members::
+
+            @collection.iterator
+            def __iter__(self): ...
+
+        """
+        fn._sa_instrument_role = "iterator"
+        return fn
+
+    @staticmethod
+    def internally_instrumented(fn):
+        """Tag the method as instrumented.
+
+        This tag will prevent any decoration from being applied to the
+        method. Use this if you are orchestrating your own calls to
+        :func:`.collection_adapter` in one of the basic SQLAlchemy
+        interface methods, or to prevent an automatic ABC method
+        decoration from wrapping your implementation::
+
+            # normally an 'extend' method on a list-like class would be
+            # automatically intercepted and re-implemented in terms of
+            # SQLAlchemy events and append().  your implementation will
+            # never be called, unless:
+            @collection.internally_instrumented
+            def extend(self, items): ...
+
+        """
+        fn._sa_instrumented = True
+        return fn
+
+    @staticmethod
+    @util.deprecated(
+        "1.3",
+        "The :meth:`.collection.converter` handler is deprecated and will "
+        "be removed in a future release.  Please refer to the "
+        ":class:`.AttributeEvents.bulk_replace` listener interface in "
+        "conjunction with the :func:`.event.listen` function.",
+    )
+    def converter(fn):
+        """Tag the method as the collection converter.
+
+        This optional method will be called when a collection is being
+        replaced entirely, as in::
+
+            myobj.acollection = [newvalue1, newvalue2]
+
+        The converter method will receive the object being assigned and should
+        return an iterable of values suitable for use by the ``appender``
+        method.  A converter must not assign values or mutate the collection,
+        its sole job is to adapt the value the user provides into an iterable
+        of values for the ORM's use.
+
+        The default converter implementation will use duck-typing to do the
+        conversion.  A dict-like collection will be convert into an iterable
+        of dictionary values, and other types will simply be iterated::
+
+            @collection.converter
+            def convert(self, other): ...
+
+        If the duck-typing of the object does not match the type of this
+        collection, a TypeError is raised.
+
+        Supply an implementation of this method if you want to expand the
+        range of possible types that can be assigned in bulk or perform
+        validation on the values about to be assigned.
+
+        """
+        fn._sa_instrument_role = "converter"
+        return fn
+
+    @staticmethod
+    def adds(arg):
+        """Mark the method as adding an entity to the collection.
+
+        Adds "add to collection" handling to the method.  The decorator
+        argument indicates which method argument holds the SQLAlchemy-relevant
+        value.  Arguments can be specified positionally (i.e. integer) or by
+        name::
+
+            @collection.adds(1)
+            def push(self, item): ...
+
+
+            @collection.adds("entity")
+            def do_stuff(self, thing, entity=None): ...
+
+        """
+
+        def decorator(fn):
+            fn._sa_instrument_before = ("fire_append_event", arg)
+            return fn
+
+        return decorator
+
+    @staticmethod
+    def replaces(arg):
+        """Mark the method as replacing an entity in the collection.
+
+        Adds "add to collection" and "remove from collection" handling to
+        the method.  The decorator argument indicates which method argument
+        holds the SQLAlchemy-relevant value to be added, and return value, if
+        any will be considered the value to remove.
+
+        Arguments can be specified positionally (i.e. integer) or by name::
+
+            @collection.replaces(2)
+            def __setitem__(self, index, item): ...
+
+        """
+
+        def decorator(fn):
+            fn._sa_instrument_before = ("fire_append_event", arg)
+            fn._sa_instrument_after = "fire_remove_event"
+            return fn
+
+        return decorator
+
+    @staticmethod
+    def removes(arg):
+        """Mark the method as removing an entity in the collection.
+
+        Adds "remove from collection" handling to the method.  The decorator
+        argument indicates which method argument holds the SQLAlchemy-relevant
+        value to be removed. Arguments can be specified positionally (i.e.
+        integer) or by name::
+
+            @collection.removes(1)
+            def zap(self, item): ...
+
+        For methods where the value to remove is not known at call-time, use
+        collection.removes_return.
+
+        """
+
+        def decorator(fn):
+            fn._sa_instrument_before = ("fire_remove_event", arg)
+            return fn
+
+        return decorator
+
+    @staticmethod
+    def removes_return():
+        """Mark the method as removing an entity in the collection.
+
+        Adds "remove from collection" handling to the method.  The return
+        value of the method, if any, is considered the value to remove.  The
+        method arguments are not inspected::
+
+            @collection.removes_return()
+            def pop(self): ...
+
+        For methods where the value to remove is known at call-time, use
+        collection.remove.
+
+        """
+
+        def decorator(fn):
+            fn._sa_instrument_after = "fire_remove_event"
+            return fn
+
+        return decorator
+
+
+if TYPE_CHECKING:
+
+    def collection_adapter(collection: Collection[Any]) -> CollectionAdapter:
+        """Fetch the :class:`.CollectionAdapter` for a collection."""
+
+else:
+    collection_adapter = operator.attrgetter("_sa_adapter")
+
+
+class CollectionAdapter:
+    """Bridges between the ORM and arbitrary Python collections.
+
+    Proxies base-level collection operations (append, remove, iterate)
+    to the underlying Python collection, and emits add/remove events for
+    entities entering or leaving the collection.
+
+    The ORM uses :class:`.CollectionAdapter` exclusively for interaction with
+    entity collections.
+
+
+    """
+
+    __slots__ = (
+        "attr",
+        "_key",
+        "_data",
+        "owner_state",
+        "_converter",
+        "invalidated",
+        "empty",
+    )
+
+    attr: CollectionAttributeImpl
+    _key: str
+
+    # this is actually a weakref; see note in constructor
+    _data: Callable[..., _AdaptedCollectionProtocol]
+
+    owner_state: InstanceState[Any]
+    _converter: _CollectionConverterProtocol
+    invalidated: bool
+    empty: bool
+
+    def __init__(
+        self,
+        attr: CollectionAttributeImpl,
+        owner_state: InstanceState[Any],
+        data: _AdaptedCollectionProtocol,
+    ):
+        self.attr = attr
+        self._key = attr.key
+
+        # this weakref stays referenced throughout the lifespan of
+        # CollectionAdapter.  so while the weakref can return None, this
+        # is realistically only during garbage collection of this object, so
+        # we type this as a callable that returns _AdaptedCollectionProtocol
+        # in all cases.
+        self._data = weakref.ref(data)  # type: ignore
+
+        self.owner_state = owner_state
+        data._sa_adapter = self
+        self._converter = data._sa_converter
+        self.invalidated = False
+        self.empty = False
+
+    def _warn_invalidated(self) -> None:
+        util.warn("This collection has been invalidated.")
+
+    @property
+    def data(self) -> _AdaptedCollectionProtocol:
+        "The entity collection being adapted."
+        return self._data()
+
+    @property
+    def _referenced_by_owner(self) -> bool:
+        """return True if the owner state still refers to this collection.
+
+        This will return False within a bulk replace operation,
+        where this collection is the one being replaced.
+
+        """
+        return self.owner_state.dict[self._key] is self._data()
+
+    def bulk_appender(self):
+        return self._data()._sa_appender
+
+    def append_with_event(
+        self, item: Any, initiator: Optional[AttributeEventToken] = None
+    ) -> None:
+        """Add an entity to the collection, firing mutation events."""
+
+        self._data()._sa_appender(item, _sa_initiator=initiator)
+
+    def _set_empty(self, user_data):
+        assert (
+            not self.empty
+        ), "This collection adapter is already in the 'empty' state"
+        self.empty = True
+        self.owner_state._empty_collections[self._key] = user_data
+
+    def _reset_empty(self) -> None:
+        assert (
+            self.empty
+        ), "This collection adapter is not in the 'empty' state"
+        self.empty = False
+        self.owner_state.dict[self._key] = (
+            self.owner_state._empty_collections.pop(self._key)
+        )
+
+    def _refuse_empty(self) -> NoReturn:
+        raise sa_exc.InvalidRequestError(
+            "This is a special 'empty' collection which cannot accommodate "
+            "internal mutation operations"
+        )
+
+    def append_without_event(self, item: Any) -> None:
+        """Add or restore an entity to the collection, firing no events."""
+
+        if self.empty:
+            self._refuse_empty()
+        self._data()._sa_appender(item, _sa_initiator=False)
+
+    def append_multiple_without_event(self, items: Iterable[Any]) -> None:
+        """Add or restore an entity to the collection, firing no events."""
+        if self.empty:
+            self._refuse_empty()
+        appender = self._data()._sa_appender
+        for item in items:
+            appender(item, _sa_initiator=False)
+
+    def bulk_remover(self):
+        return self._data()._sa_remover
+
+    def remove_with_event(
+        self, item: Any, initiator: Optional[AttributeEventToken] = None
+    ) -> None:
+        """Remove an entity from the collection, firing mutation events."""
+        self._data()._sa_remover(item, _sa_initiator=initiator)
+
+    def remove_without_event(self, item: Any) -> None:
+        """Remove an entity from the collection, firing no events."""
+        if self.empty:
+            self._refuse_empty()
+        self._data()._sa_remover(item, _sa_initiator=False)
+
+    def clear_with_event(
+        self, initiator: Optional[AttributeEventToken] = None
+    ) -> None:
+        """Empty the collection, firing a mutation event for each entity."""
+
+        if self.empty:
+            self._refuse_empty()
+        remover = self._data()._sa_remover
+        for item in list(self):
+            remover(item, _sa_initiator=initiator)
+
+    def clear_without_event(self) -> None:
+        """Empty the collection, firing no events."""
+
+        if self.empty:
+            self._refuse_empty()
+        remover = self._data()._sa_remover
+        for item in list(self):
+            remover(item, _sa_initiator=False)
+
+    def __iter__(self):
+        """Iterate over entities in the collection."""
+
+        return iter(self._data()._sa_iterator())
+
+    def __len__(self):
+        """Count entities in the collection."""
+        return len(list(self._data()._sa_iterator()))
+
+    def __bool__(self):
+        return True
+
+    def _fire_append_wo_mutation_event_bulk(
+        self, items, initiator=None, key=NO_KEY
+    ):
+        if not items:
+            return
+
+        if initiator is not False:
+            if self.invalidated:
+                self._warn_invalidated()
+
+            if self.empty:
+                self._reset_empty()
+
+            for item in items:
+                self.attr.fire_append_wo_mutation_event(
+                    self.owner_state,
+                    self.owner_state.dict,
+                    item,
+                    initiator,
+                    key,
+                )
+
+    def fire_append_wo_mutation_event(self, item, initiator=None, key=NO_KEY):
+        """Notify that a entity is entering the collection but is already
+        present.
+
+
+        Initiator is a token owned by the InstrumentedAttribute that
+        initiated the membership mutation, and should be left as None
+        unless you are passing along an initiator value from a chained
+        operation.
+
+        .. versionadded:: 1.4.15
+
+        """
+        if initiator is not False:
+            if self.invalidated:
+                self._warn_invalidated()
+
+            if self.empty:
+                self._reset_empty()
+
+            return self.attr.fire_append_wo_mutation_event(
+                self.owner_state, self.owner_state.dict, item, initiator, key
+            )
+        else:
+            return item
+
+    def fire_append_event(self, item, initiator=None, key=NO_KEY):
+        """Notify that a entity has entered the collection.
+
+        Initiator is a token owned by the InstrumentedAttribute that
+        initiated the membership mutation, and should be left as None
+        unless you are passing along an initiator value from a chained
+        operation.
+
+        """
+        if initiator is not False:
+            if self.invalidated:
+                self._warn_invalidated()
+
+            if self.empty:
+                self._reset_empty()
+
+            return self.attr.fire_append_event(
+                self.owner_state, self.owner_state.dict, item, initiator, key
+            )
+        else:
+            return item
+
+    def _fire_remove_event_bulk(self, items, initiator=None, key=NO_KEY):
+        if not items:
+            return
+
+        if initiator is not False:
+            if self.invalidated:
+                self._warn_invalidated()
+
+            if self.empty:
+                self._reset_empty()
+
+            for item in items:
+                self.attr.fire_remove_event(
+                    self.owner_state,
+                    self.owner_state.dict,
+                    item,
+                    initiator,
+                    key,
+                )
+
+    def fire_remove_event(self, item, initiator=None, key=NO_KEY):
+        """Notify that a entity has been removed from the collection.
+
+        Initiator is the InstrumentedAttribute that initiated the membership
+        mutation, and should be left as None unless you are passing along
+        an initiator value from a chained operation.
+
+        """
+        if initiator is not False:
+            if self.invalidated:
+                self._warn_invalidated()
+
+            if self.empty:
+                self._reset_empty()
+
+            self.attr.fire_remove_event(
+                self.owner_state, self.owner_state.dict, item, initiator, key
+            )
+
+    def fire_pre_remove_event(self, initiator=None, key=NO_KEY):
+        """Notify that an entity is about to be removed from the collection.
+
+        Only called if the entity cannot be removed after calling
+        fire_remove_event().
+
+        """
+        if self.invalidated:
+            self._warn_invalidated()
+        self.attr.fire_pre_remove_event(
+            self.owner_state,
+            self.owner_state.dict,
+            initiator=initiator,
+            key=key,
+        )
+
+    def __getstate__(self):
+        return {
+            "key": self._key,
+            "owner_state": self.owner_state,
+            "owner_cls": self.owner_state.class_,
+            "data": self.data,
+            "invalidated": self.invalidated,
+            "empty": self.empty,
+        }
+
+    def __setstate__(self, d):
+        self._key = d["key"]
+        self.owner_state = d["owner_state"]
+
+        # see note in constructor regarding this type: ignore
+        self._data = weakref.ref(d["data"])  # type: ignore
+
+        self._converter = d["data"]._sa_converter
+        d["data"]._sa_adapter = self
+        self.invalidated = d["invalidated"]
+        self.attr = getattr(d["owner_cls"], self._key).impl
+        self.empty = d.get("empty", False)
+
+
+def bulk_replace(values, existing_adapter, new_adapter, initiator=None):
+    """Load a new collection, firing events based on prior like membership.
+
+    Appends instances in ``values`` onto the ``new_adapter``. Events will be
+    fired for any instance not present in the ``existing_adapter``.  Any
+    instances in ``existing_adapter`` not present in ``values`` will have
+    remove events fired upon them.
+
+    :param values: An iterable of collection member instances
+
+    :param existing_adapter: A :class:`.CollectionAdapter` of
+     instances to be replaced
+
+    :param new_adapter: An empty :class:`.CollectionAdapter`
+     to load with ``values``
+
+
+    """
+
+    assert isinstance(values, list)
+
+    idset = util.IdentitySet
+    existing_idset = idset(existing_adapter or ())
+    constants = existing_idset.intersection(values or ())
+    additions = idset(values or ()).difference(constants)
+    removals = existing_idset.difference(constants)
+
+    appender = new_adapter.bulk_appender()
+
+    for member in values or ():
+        if member in additions:
+            appender(member, _sa_initiator=initiator)
+        elif member in constants:
+            appender(member, _sa_initiator=False)
+
+    if existing_adapter:
+        existing_adapter._fire_append_wo_mutation_event_bulk(
+            constants, initiator=initiator
+        )
+        existing_adapter._fire_remove_event_bulk(removals, initiator=initiator)
+
+
+def prepare_instrumentation(
+    factory: Union[Type[Collection[Any]], _CollectionFactoryType],
+) -> _CollectionFactoryType:
+    """Prepare a callable for future use as a collection class factory.
+
+    Given a collection class factory (either a type or no-arg callable),
+    return another factory that will produce compatible instances when
+    called.
+
+    This function is responsible for converting collection_class=list
+    into the run-time behavior of collection_class=InstrumentedList.
+
+    """
+
+    impl_factory: _CollectionFactoryType
+
+    # Convert a builtin to 'Instrumented*'
+    if factory in __canned_instrumentation:
+        impl_factory = __canned_instrumentation[factory]
+    else:
+        impl_factory = cast(_CollectionFactoryType, factory)
+
+    cls: Union[_CollectionFactoryType, Type[Collection[Any]]]
+
+    # Create a specimen
+    cls = type(impl_factory())
+
+    # Did factory callable return a builtin?
+    if cls in __canned_instrumentation:
+        # if so, just convert.
+        # in previous major releases, this codepath wasn't working and was
+        # not covered by tests.   prior to that it supplied a "wrapper"
+        # function that would return the class, though the rationale for this
+        # case is not known
+        impl_factory = __canned_instrumentation[cls]
+        cls = type(impl_factory())
+
+    # Instrument the class if needed.
+    if __instrumentation_mutex.acquire():
+        try:
+            if getattr(cls, "_sa_instrumented", None) != id(cls):
+                _instrument_class(cls)
+        finally:
+            __instrumentation_mutex.release()
+
+    return impl_factory
+
+
+def _instrument_class(cls):
+    """Modify methods in a class and install instrumentation."""
+
+    # In the normal call flow, a request for any of the 3 basic collection
+    # types is transformed into one of our trivial subclasses
+    # (e.g. InstrumentedList).  Catch anything else that sneaks in here...
+    if cls.__module__ == "__builtin__":
+        raise sa_exc.ArgumentError(
+            "Can not instrument a built-in type. Use a "
+            "subclass, even a trivial one."
+        )
+
+    roles, methods = _locate_roles_and_methods(cls)
+
+    _setup_canned_roles(cls, roles, methods)
+
+    _assert_required_roles(cls, roles, methods)
+
+    _set_collection_attributes(cls, roles, methods)
+
+
+def _locate_roles_and_methods(cls):
+    """search for _sa_instrument_role-decorated methods in
+    method resolution order, assign to roles.
+
+    """
+
+    roles: Dict[str, str] = {}
+    methods: Dict[str, Tuple[Optional[str], Optional[int], Optional[str]]] = {}
+
+    for supercls in cls.__mro__:
+        for name, method in vars(supercls).items():
+            if not callable(method):
+                continue
+
+            # note role declarations
+            if hasattr(method, "_sa_instrument_role"):
+                role = method._sa_instrument_role
+                assert role in (
+                    "appender",
+                    "remover",
+                    "iterator",
+                    "converter",
+                )
+                roles.setdefault(role, name)
+
+            # transfer instrumentation requests from decorated function
+            # to the combined queue
+            before: Optional[Tuple[str, int]] = None
+            after: Optional[str] = None
+
+            if hasattr(method, "_sa_instrument_before"):
+                op, argument = method._sa_instrument_before
+                assert op in ("fire_append_event", "fire_remove_event")
+                before = op, argument
+            if hasattr(method, "_sa_instrument_after"):
+                op = method._sa_instrument_after
+                assert op in ("fire_append_event", "fire_remove_event")
+                after = op
+            if before:
+                methods[name] = before + (after,)
+            elif after:
+                methods[name] = None, None, after
+    return roles, methods
+
+
+def _setup_canned_roles(cls, roles, methods):
+    """see if this class has "canned" roles based on a known
+    collection type (dict, set, list).  Apply those roles
+    as needed to the "roles" dictionary, and also
+    prepare "decorator" methods
+
+    """
+    collection_type = util.duck_type_collection(cls)
+    if collection_type in __interfaces:
+        assert collection_type is not None
+        canned_roles, decorators = __interfaces[collection_type]
+        for role, name in canned_roles.items():
+            roles.setdefault(role, name)
+
+        # apply ABC auto-decoration to methods that need it
+        for method, decorator in decorators.items():
+            fn = getattr(cls, method, None)
+            if (
+                fn
+                and method not in methods
+                and not hasattr(fn, "_sa_instrumented")
+            ):
+                setattr(cls, method, decorator(fn))
+
+
+def _assert_required_roles(cls, roles, methods):
+    """ensure all roles are present, and apply implicit instrumentation if
+    needed
+
+    """
+    if "appender" not in roles or not hasattr(cls, roles["appender"]):
+        raise sa_exc.ArgumentError(
+            "Type %s must elect an appender method to be "
+            "a collection class" % cls.__name__
+        )
+    elif roles["appender"] not in methods and not hasattr(
+        getattr(cls, roles["appender"]), "_sa_instrumented"
+    ):
+        methods[roles["appender"]] = ("fire_append_event", 1, None)
+
+    if "remover" not in roles or not hasattr(cls, roles["remover"]):
+        raise sa_exc.ArgumentError(
+            "Type %s must elect a remover method to be "
+            "a collection class" % cls.__name__
+        )
+    elif roles["remover"] not in methods and not hasattr(
+        getattr(cls, roles["remover"]), "_sa_instrumented"
+    ):
+        methods[roles["remover"]] = ("fire_remove_event", 1, None)
+
+    if "iterator" not in roles or not hasattr(cls, roles["iterator"]):
+        raise sa_exc.ArgumentError(
+            "Type %s must elect an iterator method to be "
+            "a collection class" % cls.__name__
+        )
+
+
+def _set_collection_attributes(cls, roles, methods):
+    """apply ad-hoc instrumentation from decorators, class-level defaults
+    and implicit role declarations
+
+    """
+    for method_name, (before, argument, after) in methods.items():
+        setattr(
+            cls,
+            method_name,
+            _instrument_membership_mutator(
+                getattr(cls, method_name), before, argument, after
+            ),
+        )
+    # intern the role map
+    for role, method_name in roles.items():
+        setattr(cls, "_sa_%s" % role, getattr(cls, method_name))
+
+    cls._sa_adapter = None
+
+    if not hasattr(cls, "_sa_converter"):
+        cls._sa_converter = None
+    cls._sa_instrumented = id(cls)
+
+
+def _instrument_membership_mutator(method, before, argument, after):
+    """Route method args and/or return value through the collection
+    adapter."""
+    # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))'
+    if before:
+        fn_args = list(
+            util.flatten_iterator(inspect_getfullargspec(method)[0])
+        )
+        if isinstance(argument, int):
+            pos_arg = argument
+            named_arg = len(fn_args) > argument and fn_args[argument] or None
+        else:
+            if argument in fn_args:
+                pos_arg = fn_args.index(argument)
+            else:
+                pos_arg = None
+            named_arg = argument
+        del fn_args
+
+    def wrapper(*args, **kw):
+        if before:
+            if pos_arg is None:
+                if named_arg not in kw:
+                    raise sa_exc.ArgumentError(
+                        "Missing argument %s" % argument
+                    )
+                value = kw[named_arg]
+            else:
+                if len(args) > pos_arg:
+                    value = args[pos_arg]
+                elif named_arg in kw:
+                    value = kw[named_arg]
+                else:
+                    raise sa_exc.ArgumentError(
+                        "Missing argument %s" % argument
+                    )
+
+        initiator = kw.pop("_sa_initiator", None)
+        if initiator is False:
+            executor = None
+        else:
+            executor = args[0]._sa_adapter
+
+        if before and executor:
+            getattr(executor, before)(value, initiator)
+
+        if not after or not executor:
+            return method(*args, **kw)
+        else:
+            res = method(*args, **kw)
+            if res is not None:
+                getattr(executor, after)(res, initiator)
+            return res
+
+    wrapper._sa_instrumented = True  # type: ignore[attr-defined]
+    if hasattr(method, "_sa_instrument_role"):
+        wrapper._sa_instrument_role = method._sa_instrument_role  # type: ignore[attr-defined]  # noqa: E501
+    wrapper.__name__ = method.__name__
+    wrapper.__doc__ = method.__doc__
+    return wrapper
+
+
+def __set_wo_mutation(collection, item, _sa_initiator=None):
+    """Run set wo mutation events.
+
+    The collection is not mutated.
+
+    """
+    if _sa_initiator is not False:
+        executor = collection._sa_adapter
+        if executor:
+            executor.fire_append_wo_mutation_event(
+                item, _sa_initiator, key=None
+            )
+
+
+def __set(collection, item, _sa_initiator, key):
+    """Run set events.
+
+    This event always occurs before the collection is actually mutated.
+
+    """
+
+    if _sa_initiator is not False:
+        executor = collection._sa_adapter
+        if executor:
+            item = executor.fire_append_event(item, _sa_initiator, key=key)
+    return item
+
+
+def __del(collection, item, _sa_initiator, key):
+    """Run del events.
+
+    This event occurs before the collection is actually mutated, *except*
+    in the case of a pop operation, in which case it occurs afterwards.
+    For pop operations, the __before_pop hook is called before the
+    operation occurs.
+
+    """
+    if _sa_initiator is not False:
+        executor = collection._sa_adapter
+        if executor:
+            executor.fire_remove_event(item, _sa_initiator, key=key)
+
+
+def __before_pop(collection, _sa_initiator=None):
+    """An event which occurs on a before a pop() operation occurs."""
+    executor = collection._sa_adapter
+    if executor:
+        executor.fire_pre_remove_event(_sa_initiator)
+
+
+def _list_decorators() -> Dict[str, Callable[[_FN], _FN]]:
+    """Tailored instrumentation wrappers for any list-like class."""
+
+    def _tidy(fn):
+        fn._sa_instrumented = True
+        fn.__doc__ = getattr(list, fn.__name__).__doc__
+
+    def append(fn):
+        def append(self, item, _sa_initiator=None):
+            item = __set(self, item, _sa_initiator, NO_KEY)
+            fn(self, item)
+
+        _tidy(append)
+        return append
+
+    def remove(fn):
+        def remove(self, value, _sa_initiator=None):
+            __del(self, value, _sa_initiator, NO_KEY)
+            # testlib.pragma exempt:__eq__
+            fn(self, value)
+
+        _tidy(remove)
+        return remove
+
+    def insert(fn):
+        def insert(self, index, value):
+            value = __set(self, value, None, index)
+            fn(self, index, value)
+
+        _tidy(insert)
+        return insert
+
+    def __setitem__(fn):
+        def __setitem__(self, index, value):
+            if not isinstance(index, slice):
+                existing = self[index]
+                if existing is not None:
+                    __del(self, existing, None, index)
+                value = __set(self, value, None, index)
+                fn(self, index, value)
+            else:
+                # slice assignment requires __delitem__, insert, __len__
+                step = index.step or 1
+                start = index.start or 0
+                if start < 0:
+                    start += len(self)
+                if index.stop is not None:
+                    stop = index.stop
+                else:
+                    stop = len(self)
+                if stop < 0:
+                    stop += len(self)
+
+                if step == 1:
+                    if value is self:
+                        return
+                    for i in range(start, stop, step):
+                        if len(self) > start:
+                            del self[start]
+
+                    for i, item in enumerate(value):
+                        self.insert(i + start, item)
+                else:
+                    rng = list(range(start, stop, step))
+                    if len(value) != len(rng):
+                        raise ValueError(
+                            "attempt to assign sequence of size %s to "
+                            "extended slice of size %s"
+                            % (len(value), len(rng))
+                        )
+                    for i, item in zip(rng, value):
+                        self.__setitem__(i, item)
+
+        _tidy(__setitem__)
+        return __setitem__
+
+    def __delitem__(fn):
+        def __delitem__(self, index):
+            if not isinstance(index, slice):
+                item = self[index]
+                __del(self, item, None, index)
+                fn(self, index)
+            else:
+                # slice deletion requires __getslice__ and a slice-groking
+                # __getitem__ for stepped deletion
+                # note: not breaking this into atomic dels
+                for item in self[index]:
+                    __del(self, item, None, index)
+                fn(self, index)
+
+        _tidy(__delitem__)
+        return __delitem__
+
+    def extend(fn):
+        def extend(self, iterable):
+            for value in list(iterable):
+                self.append(value)
+
+        _tidy(extend)
+        return extend
+
+    def __iadd__(fn):
+        def __iadd__(self, iterable):
+            # list.__iadd__ takes any iterable and seems to let TypeError
+            # raise as-is instead of returning NotImplemented
+            for value in list(iterable):
+                self.append(value)
+            return self
+
+        _tidy(__iadd__)
+        return __iadd__
+
+    def pop(fn):
+        def pop(self, index=-1):
+            __before_pop(self)
+            item = fn(self, index)
+            __del(self, item, None, index)
+            return item
+
+        _tidy(pop)
+        return pop
+
+    def clear(fn):
+        def clear(self, index=-1):
+            for item in self:
+                __del(self, item, None, index)
+            fn(self)
+
+        _tidy(clear)
+        return clear
+
+    # __imul__ : not wrapping this.  all members of the collection are already
+    # present, so no need to fire appends... wrapping it with an explicit
+    # decorator is still possible, so events on *= can be had if they're
+    # desired.  hard to imagine a use case for __imul__, though.
+
+    l = locals().copy()
+    l.pop("_tidy")
+    return l
+
+
+def _dict_decorators() -> Dict[str, Callable[[_FN], _FN]]:
+    """Tailored instrumentation wrappers for any dict-like mapping class."""
+
+    def _tidy(fn):
+        fn._sa_instrumented = True
+        fn.__doc__ = getattr(dict, fn.__name__).__doc__
+
+    def __setitem__(fn):
+        def __setitem__(self, key, value, _sa_initiator=None):
+            if key in self:
+                __del(self, self[key], _sa_initiator, key)
+            value = __set(self, value, _sa_initiator, key)
+            fn(self, key, value)
+
+        _tidy(__setitem__)
+        return __setitem__
+
+    def __delitem__(fn):
+        def __delitem__(self, key, _sa_initiator=None):
+            if key in self:
+                __del(self, self[key], _sa_initiator, key)
+            fn(self, key)
+
+        _tidy(__delitem__)
+        return __delitem__
+
+    def clear(fn):
+        def clear(self):
+            for key in self:
+                __del(self, self[key], None, key)
+            fn(self)
+
+        _tidy(clear)
+        return clear
+
+    def pop(fn):
+        def pop(self, key, default=NO_ARG):
+            __before_pop(self)
+            _to_del = key in self
+            if default is NO_ARG:
+                item = fn(self, key)
+            else:
+                item = fn(self, key, default)
+            if _to_del:
+                __del(self, item, None, key)
+            return item
+
+        _tidy(pop)
+        return pop
+
+    def popitem(fn):
+        def popitem(self):
+            __before_pop(self)
+            item = fn(self)
+            __del(self, item[1], None, 1)
+            return item
+
+        _tidy(popitem)
+        return popitem
+
+    def setdefault(fn):
+        def setdefault(self, key, default=None):
+            if key not in self:
+                self.__setitem__(key, default)
+                return default
+            else:
+                value = self.__getitem__(key)
+                if value is default:
+                    __set_wo_mutation(self, value, None)
+
+                return value
+
+        _tidy(setdefault)
+        return setdefault
+
+    def update(fn):
+        def update(self, __other=NO_ARG, **kw):
+            if __other is not NO_ARG:
+                if hasattr(__other, "keys"):
+                    for key in list(__other):
+                        if key not in self or self[key] is not __other[key]:
+                            self[key] = __other[key]
+                        else:
+                            __set_wo_mutation(self, __other[key], None)
+                else:
+                    for key, value in __other:
+                        if key not in self or self[key] is not value:
+                            self[key] = value
+                        else:
+                            __set_wo_mutation(self, value, None)
+            for key in kw:
+                if key not in self or self[key] is not kw[key]:
+                    self[key] = kw[key]
+                else:
+                    __set_wo_mutation(self, kw[key], None)
+
+        _tidy(update)
+        return update
+
+    l = locals().copy()
+    l.pop("_tidy")
+    return l
+
+
+_set_binop_bases = (set, frozenset)
+
+
+def _set_binops_check_strict(self: Any, obj: Any) -> bool:
+    """Allow only set, frozenset and self.__class__-derived
+    objects in binops."""
+    return isinstance(obj, _set_binop_bases + (self.__class__,))
+
+
+def _set_binops_check_loose(self: Any, obj: Any) -> bool:
+    """Allow anything set-like to participate in set binops."""
+    return (
+        isinstance(obj, _set_binop_bases + (self.__class__,))
+        or util.duck_type_collection(obj) == set
+    )
+
+
+def _set_decorators() -> Dict[str, Callable[[_FN], _FN]]:
+    """Tailored instrumentation wrappers for any set-like class."""
+
+    def _tidy(fn):
+        fn._sa_instrumented = True
+        fn.__doc__ = getattr(set, fn.__name__).__doc__
+
+    def add(fn):
+        def add(self, value, _sa_initiator=None):
+            if value not in self:
+                value = __set(self, value, _sa_initiator, NO_KEY)
+            else:
+                __set_wo_mutation(self, value, _sa_initiator)
+            # testlib.pragma exempt:__hash__
+            fn(self, value)
+
+        _tidy(add)
+        return add
+
+    def discard(fn):
+        def discard(self, value, _sa_initiator=None):
+            # testlib.pragma exempt:__hash__
+            if value in self:
+                __del(self, value, _sa_initiator, NO_KEY)
+                # testlib.pragma exempt:__hash__
+            fn(self, value)
+
+        _tidy(discard)
+        return discard
+
+    def remove(fn):
+        def remove(self, value, _sa_initiator=None):
+            # testlib.pragma exempt:__hash__
+            if value in self:
+                __del(self, value, _sa_initiator, NO_KEY)
+            # testlib.pragma exempt:__hash__
+            fn(self, value)
+
+        _tidy(remove)
+        return remove
+
+    def pop(fn):
+        def pop(self):
+            __before_pop(self)
+            item = fn(self)
+            # for set in particular, we have no way to access the item
+            # that will be popped before pop is called.
+            __del(self, item, None, NO_KEY)
+            return item
+
+        _tidy(pop)
+        return pop
+
+    def clear(fn):
+        def clear(self):
+            for item in list(self):
+                self.remove(item)
+
+        _tidy(clear)
+        return clear
+
+    def update(fn):
+        def update(self, value):
+            for item in value:
+                self.add(item)
+
+        _tidy(update)
+        return update
+
+    def __ior__(fn):
+        def __ior__(self, value):
+            if not _set_binops_check_strict(self, value):
+                return NotImplemented
+            for item in value:
+                self.add(item)
+            return self
+
+        _tidy(__ior__)
+        return __ior__
+
+    def difference_update(fn):
+        def difference_update(self, value):
+            for item in value:
+                self.discard(item)
+
+        _tidy(difference_update)
+        return difference_update
+
+    def __isub__(fn):
+        def __isub__(self, value):
+            if not _set_binops_check_strict(self, value):
+                return NotImplemented
+            for item in value:
+                self.discard(item)
+            return self
+
+        _tidy(__isub__)
+        return __isub__
+
+    def intersection_update(fn):
+        def intersection_update(self, other):
+            want, have = self.intersection(other), set(self)
+            remove, add = have - want, want - have
+
+            for item in remove:
+                self.remove(item)
+            for item in add:
+                self.add(item)
+
+        _tidy(intersection_update)
+        return intersection_update
+
+    def __iand__(fn):
+        def __iand__(self, other):
+            if not _set_binops_check_strict(self, other):
+                return NotImplemented
+            want, have = self.intersection(other), set(self)
+            remove, add = have - want, want - have
+
+            for item in remove:
+                self.remove(item)
+            for item in add:
+                self.add(item)
+            return self
+
+        _tidy(__iand__)
+        return __iand__
+
+    def symmetric_difference_update(fn):
+        def symmetric_difference_update(self, other):
+            want, have = self.symmetric_difference(other), set(self)
+            remove, add = have - want, want - have
+
+            for item in remove:
+                self.remove(item)
+            for item in add:
+                self.add(item)
+
+        _tidy(symmetric_difference_update)
+        return symmetric_difference_update
+
+    def __ixor__(fn):
+        def __ixor__(self, other):
+            if not _set_binops_check_strict(self, other):
+                return NotImplemented
+            want, have = self.symmetric_difference(other), set(self)
+            remove, add = have - want, want - have
+
+            for item in remove:
+                self.remove(item)
+            for item in add:
+                self.add(item)
+            return self
+
+        _tidy(__ixor__)
+        return __ixor__
+
+    l = locals().copy()
+    l.pop("_tidy")
+    return l
+
+
+class InstrumentedList(List[_T]):
+    """An instrumented version of the built-in list."""
+
+
+class InstrumentedSet(Set[_T]):
+    """An instrumented version of the built-in set."""
+
+
+class InstrumentedDict(Dict[_KT, _VT]):
+    """An instrumented version of the built-in dict."""
+
+
+__canned_instrumentation: util.immutabledict[Any, _CollectionFactoryType] = (
+    util.immutabledict(
+        {
+            list: InstrumentedList,
+            set: InstrumentedSet,
+            dict: InstrumentedDict,
+        }
+    )
+)
+
+__interfaces: util.immutabledict[
+    Any,
+    Tuple[
+        Dict[str, str],
+        Dict[str, Callable[..., Any]],
+    ],
+] = util.immutabledict(
+    {
+        list: (
+            {
+                "appender": "append",
+                "remover": "remove",
+                "iterator": "__iter__",
+            },
+            _list_decorators(),
+        ),
+        set: (
+            {"appender": "add", "remover": "remove", "iterator": "__iter__"},
+            _set_decorators(),
+        ),
+        # decorators are required for dicts and object collections.
+        dict: ({"iterator": "values"}, _dict_decorators()),
+    }
+)
+
+
+def __go(lcls):
+    global keyfunc_mapping, mapped_collection
+    global column_keyed_dict, column_mapped_collection
+    global MappedCollection, KeyFuncDict
+    global attribute_keyed_dict, attribute_mapped_collection
+
+    from .mapped_collection import keyfunc_mapping
+    from .mapped_collection import column_keyed_dict
+    from .mapped_collection import attribute_keyed_dict
+    from .mapped_collection import KeyFuncDict
+
+    from .mapped_collection import mapped_collection
+    from .mapped_collection import column_mapped_collection
+    from .mapped_collection import attribute_mapped_collection
+    from .mapped_collection import MappedCollection
+
+    # ensure instrumentation is associated with
+    # these built-in classes; if a user-defined class
+    # subclasses these and uses @internally_instrumented,
+    # the superclass is otherwise not instrumented.
+    # see [ticket:2406].
+    _instrument_class(InstrumentedList)
+    _instrument_class(InstrumentedSet)
+    _instrument_class(KeyFuncDict)
+
+
+__go(locals())
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py
new file mode 100644
index 00000000..b04d6d48
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/context.py
@@ -0,0 +1,3336 @@
+# orm/context.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+from __future__ import annotations
+
+import itertools
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import attributes
+from . import interfaces
+from . import loading
+from .base import _is_aliased_class
+from .interfaces import ORMColumnDescription
+from .interfaces import ORMColumnsClauseRole
+from .path_registry import PathRegistry
+from .util import _entity_corresponds_to
+from .util import _ORMJoin
+from .util import _TraceAdaptRole
+from .util import AliasedClass
+from .util import Bundle
+from .util import ORMAdapter
+from .util import ORMStatementAdapter
+from .. import exc as sa_exc
+from .. import future
+from .. import inspect
+from .. import sql
+from .. import util
+from ..sql import coercions
+from ..sql import expression
+from ..sql import roles
+from ..sql import util as sql_util
+from ..sql import visitors
+from ..sql._typing import _TP
+from ..sql._typing import is_dml
+from ..sql._typing import is_insert_update
+from ..sql._typing import is_select_base
+from ..sql.base import _select_iterables
+from ..sql.base import CacheableOptions
+from ..sql.base import CompileState
+from ..sql.base import Executable
+from ..sql.base import Generative
+from ..sql.base import Options
+from ..sql.dml import UpdateBase
+from ..sql.elements import GroupedElement
+from ..sql.elements import TextClause
+from ..sql.selectable import CompoundSelectState
+from ..sql.selectable import LABEL_STYLE_DISAMBIGUATE_ONLY
+from ..sql.selectable import LABEL_STYLE_NONE
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+from ..sql.selectable import Select
+from ..sql.selectable import SelectLabelStyle
+from ..sql.selectable import SelectState
+from ..sql.selectable import TypedReturnsRows
+from ..sql.visitors import InternalTraversal
+
+if TYPE_CHECKING:
+    from ._typing import _InternalEntityType
+    from ._typing import OrmExecuteOptionsParameter
+    from .loading import PostLoad
+    from .mapper import Mapper
+    from .query import Query
+    from .session import _BindArguments
+    from .session import Session
+    from ..engine import Result
+    from ..engine.interfaces import _CoreSingleExecuteParams
+    from ..sql._typing import _ColumnsClauseArgument
+    from ..sql.compiler import SQLCompiler
+    from ..sql.dml import _DMLTableElement
+    from ..sql.elements import ColumnElement
+    from ..sql.selectable import _JoinTargetElement
+    from ..sql.selectable import _LabelConventionCallable
+    from ..sql.selectable import _SetupJoinsElement
+    from ..sql.selectable import ExecutableReturnsRows
+    from ..sql.selectable import SelectBase
+    from ..sql.type_api import TypeEngine
+
+_T = TypeVar("_T", bound=Any)
+_path_registry = PathRegistry.root
+
+_EMPTY_DICT = util.immutabledict()
+
+
+LABEL_STYLE_LEGACY_ORM = SelectLabelStyle.LABEL_STYLE_LEGACY_ORM
+
+
+class QueryContext:
+    __slots__ = (
+        "top_level_context",
+        "compile_state",
+        "query",
+        "user_passed_query",
+        "params",
+        "load_options",
+        "bind_arguments",
+        "execution_options",
+        "session",
+        "autoflush",
+        "populate_existing",
+        "invoke_all_eagers",
+        "version_check",
+        "refresh_state",
+        "create_eager_joins",
+        "propagated_loader_options",
+        "attributes",
+        "runid",
+        "partials",
+        "post_load_paths",
+        "identity_token",
+        "yield_per",
+        "loaders_require_buffering",
+        "loaders_require_uniquing",
+    )
+
+    runid: int
+    post_load_paths: Dict[PathRegistry, PostLoad]
+    compile_state: ORMCompileState
+
+    class default_load_options(Options):
+        _only_return_tuples = False
+        _populate_existing = False
+        _version_check = False
+        _invoke_all_eagers = True
+        _autoflush = True
+        _identity_token = None
+        _yield_per = None
+        _refresh_state = None
+        _lazy_loaded_from = None
+        _legacy_uniquing = False
+        _sa_top_level_orm_context = None
+        _is_user_refresh = False
+
+    def __init__(
+        self,
+        compile_state: CompileState,
+        statement: Union[Select[Any], FromStatement[Any], UpdateBase],
+        user_passed_query: Union[
+            Select[Any],
+            FromStatement[Any],
+            UpdateBase,
+        ],
+        params: _CoreSingleExecuteParams,
+        session: Session,
+        load_options: Union[
+            Type[QueryContext.default_load_options],
+            QueryContext.default_load_options,
+        ],
+        execution_options: Optional[OrmExecuteOptionsParameter] = None,
+        bind_arguments: Optional[_BindArguments] = None,
+    ):
+        self.load_options = load_options
+        self.execution_options = execution_options or _EMPTY_DICT
+        self.bind_arguments = bind_arguments or _EMPTY_DICT
+        self.compile_state = compile_state
+        self.query = statement
+
+        # the query that the end user passed to Session.execute() or similar.
+        # this is usually the same as .query, except in the bulk_persistence
+        # routines where a separate FromStatement is manufactured in the
+        # compile stage; this allows differentiation in that case.
+        self.user_passed_query = user_passed_query
+
+        self.session = session
+        self.loaders_require_buffering = False
+        self.loaders_require_uniquing = False
+        self.params = params
+        self.top_level_context = load_options._sa_top_level_orm_context
+
+        cached_options = compile_state.select_statement._with_options
+        uncached_options = user_passed_query._with_options
+
+        # see issue #7447 , #8399 for some background
+        # propagated loader options will be present on loaded InstanceState
+        # objects under state.load_options and are typically used by
+        # LazyLoader to apply options to the SELECT statement it emits.
+        # For compile state options (i.e. loader strategy options), these
+        # need to line up with the ".load_path" attribute which in
+        # loader.py is pulled from context.compile_state.current_path.
+        # so, this means these options have to be the ones from the
+        # *cached* statement that's travelling with compile_state, not the
+        # *current* statement which won't match up for an ad-hoc
+        # AliasedClass
+        self.propagated_loader_options = tuple(
+            opt._adapt_cached_option_to_uncached_option(self, uncached_opt)
+            for opt, uncached_opt in zip(cached_options, uncached_options)
+            if opt.propagate_to_loaders
+        )
+
+        self.attributes = dict(compile_state.attributes)
+
+        self.autoflush = load_options._autoflush
+        self.populate_existing = load_options._populate_existing
+        self.invoke_all_eagers = load_options._invoke_all_eagers
+        self.version_check = load_options._version_check
+        self.refresh_state = load_options._refresh_state
+        self.yield_per = load_options._yield_per
+        self.identity_token = load_options._identity_token
+
+    def _get_top_level_context(self) -> QueryContext:
+        return self.top_level_context or self
+
+
+_orm_load_exec_options = util.immutabledict(
+    {"_result_disable_adapt_to_context": True}
+)
+
+
+class AbstractORMCompileState(CompileState):
+    is_dml_returning = False
+
+    def _init_global_attributes(
+        self, statement, compiler, *, toplevel, process_criteria_for_toplevel
+    ):
+        self.attributes = {}
+
+        if compiler is None:
+            # this is the legacy / testing only ORM _compile_state() use case.
+            # there is no need to apply criteria options for this.
+            self.global_attributes = ga = {}
+            assert toplevel
+            return
+        else:
+            self.global_attributes = ga = compiler._global_attributes
+
+        if toplevel:
+            ga["toplevel_orm"] = True
+
+            if process_criteria_for_toplevel:
+                for opt in statement._with_options:
+                    if opt._is_criteria_option:
+                        opt.process_compile_state(self)
+
+            return
+        elif ga.get("toplevel_orm", False):
+            return
+
+        stack_0 = compiler.stack[0]
+
+        try:
+            toplevel_stmt = stack_0["selectable"]
+        except KeyError:
+            pass
+        else:
+            for opt in toplevel_stmt._with_options:
+                if opt._is_compile_state and opt._is_criteria_option:
+                    opt.process_compile_state(self)
+
+        ga["toplevel_orm"] = True
+
+    @classmethod
+    def create_for_statement(
+        cls,
+        statement: Executable,
+        compiler: SQLCompiler,
+        **kw: Any,
+    ) -> CompileState:
+        """Create a context for a statement given a :class:`.Compiler`.
+
+        This method is always invoked in the context of SQLCompiler.process().
+
+        For a Select object, this would be invoked from
+        SQLCompiler.visit_select(). For the special FromStatement object used
+        by Query to indicate "Query.from_statement()", this is called by
+        FromStatement._compiler_dispatch() that would be called by
+        SQLCompiler.process().
+        """
+        return super().create_for_statement(statement, compiler, **kw)
+
+    @classmethod
+    def orm_pre_session_exec(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        is_pre_event,
+    ):
+        raise NotImplementedError()
+
+    @classmethod
+    def orm_execute_statement(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        conn,
+    ) -> Result:
+        result = conn.execute(
+            statement, params or {}, execution_options=execution_options
+        )
+        return cls.orm_setup_cursor_result(
+            session,
+            statement,
+            params,
+            execution_options,
+            bind_arguments,
+            result,
+        )
+
+    @classmethod
+    def orm_setup_cursor_result(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        result,
+    ):
+        raise NotImplementedError()
+
+
+class AutoflushOnlyORMCompileState(AbstractORMCompileState):
+    """ORM compile state that is a passthrough, except for autoflush."""
+
+    @classmethod
+    def orm_pre_session_exec(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        is_pre_event,
+    ):
+        # consume result-level load_options.  These may have been set up
+        # in an ORMExecuteState hook
+        (
+            load_options,
+            execution_options,
+        ) = QueryContext.default_load_options.from_execution_options(
+            "_sa_orm_load_options",
+            {
+                "autoflush",
+            },
+            execution_options,
+            statement._execution_options,
+        )
+
+        if not is_pre_event and load_options._autoflush:
+            session._autoflush()
+
+        return statement, execution_options
+
+    @classmethod
+    def orm_setup_cursor_result(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        result,
+    ):
+        return result
+
+
+class ORMCompileState(AbstractORMCompileState):
+    class default_compile_options(CacheableOptions):
+        _cache_key_traversal = [
+            ("_use_legacy_query_style", InternalTraversal.dp_boolean),
+            ("_for_statement", InternalTraversal.dp_boolean),
+            ("_bake_ok", InternalTraversal.dp_boolean),
+            ("_current_path", InternalTraversal.dp_has_cache_key),
+            ("_enable_single_crit", InternalTraversal.dp_boolean),
+            ("_enable_eagerloads", InternalTraversal.dp_boolean),
+            ("_only_load_props", InternalTraversal.dp_plain_obj),
+            ("_set_base_alias", InternalTraversal.dp_boolean),
+            ("_for_refresh_state", InternalTraversal.dp_boolean),
+            ("_render_for_subquery", InternalTraversal.dp_boolean),
+            ("_is_star", InternalTraversal.dp_boolean),
+        ]
+
+        # set to True by default from Query._statement_20(), to indicate
+        # the rendered query should look like a legacy ORM query.  right
+        # now this basically indicates we should use tablename_columnname
+        # style labels.    Generally indicates the statement originated
+        # from a Query object.
+        _use_legacy_query_style = False
+
+        # set *only* when we are coming from the Query.statement
+        # accessor, or a Query-level equivalent such as
+        # query.subquery().  this supersedes "toplevel".
+        _for_statement = False
+
+        _bake_ok = True
+        _current_path = _path_registry
+        _enable_single_crit = True
+        _enable_eagerloads = True
+        _only_load_props = None
+        _set_base_alias = False
+        _for_refresh_state = False
+        _render_for_subquery = False
+        _is_star = False
+
+    attributes: Dict[Any, Any]
+    global_attributes: Dict[Any, Any]
+
+    statement: Union[Select[Any], FromStatement[Any], UpdateBase]
+    select_statement: Union[Select[Any], FromStatement[Any], UpdateBase]
+    _entities: List[_QueryEntity]
+    _polymorphic_adapters: Dict[_InternalEntityType, ORMAdapter]
+    compile_options: Union[
+        Type[default_compile_options], default_compile_options
+    ]
+    _primary_entity: Optional[_QueryEntity]
+    use_legacy_query_style: bool
+    _label_convention: _LabelConventionCallable
+    primary_columns: List[ColumnElement[Any]]
+    secondary_columns: List[ColumnElement[Any]]
+    dedupe_columns: Set[ColumnElement[Any]]
+    create_eager_joins: List[
+        # TODO: this structure is set up by JoinedLoader
+        Tuple[Any, ...]
+    ]
+    current_path: PathRegistry = _path_registry
+    _has_mapper_entities = False
+
+    def __init__(self, *arg, **kw):
+        raise NotImplementedError()
+
+    @classmethod
+    def create_for_statement(
+        cls,
+        statement: Executable,
+        compiler: SQLCompiler,
+        **kw: Any,
+    ) -> ORMCompileState:
+        return cls._create_orm_context(
+            cast("Union[Select, FromStatement]", statement),
+            toplevel=not compiler.stack,
+            compiler=compiler,
+            **kw,
+        )
+
+    @classmethod
+    def _create_orm_context(
+        cls,
+        statement: Union[Select, FromStatement],
+        *,
+        toplevel: bool,
+        compiler: Optional[SQLCompiler],
+        **kw: Any,
+    ) -> ORMCompileState:
+        raise NotImplementedError()
+
+    def _append_dedupe_col_collection(self, obj, col_collection):
+        dedupe = self.dedupe_columns
+        if obj not in dedupe:
+            dedupe.add(obj)
+            col_collection.append(obj)
+
+    @classmethod
+    def _column_naming_convention(
+        cls, label_style: SelectLabelStyle, legacy: bool
+    ) -> _LabelConventionCallable:
+        if legacy:
+
+            def name(col, col_name=None):
+                if col_name:
+                    return col_name
+                else:
+                    return getattr(col, "key")
+
+            return name
+        else:
+            return SelectState._column_naming_convention(label_style)
+
+    @classmethod
+    def get_column_descriptions(cls, statement):
+        return _column_descriptions(statement)
+
+    @classmethod
+    def orm_pre_session_exec(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        is_pre_event,
+    ):
+        # consume result-level load_options.  These may have been set up
+        # in an ORMExecuteState hook
+        (
+            load_options,
+            execution_options,
+        ) = QueryContext.default_load_options.from_execution_options(
+            "_sa_orm_load_options",
+            {
+                "populate_existing",
+                "autoflush",
+                "yield_per",
+                "identity_token",
+                "sa_top_level_orm_context",
+            },
+            execution_options,
+            statement._execution_options,
+        )
+
+        # default execution options for ORM results:
+        # 1. _result_disable_adapt_to_context=True
+        #    this will disable the ResultSetMetadata._adapt_to_context()
+        #    step which we don't need, as we have result processors cached
+        #    against the original SELECT statement before caching.
+
+        if "sa_top_level_orm_context" in execution_options:
+            ctx = execution_options["sa_top_level_orm_context"]
+            execution_options = ctx.query._execution_options.merge_with(
+                ctx.execution_options, execution_options
+            )
+
+        if not execution_options:
+            execution_options = _orm_load_exec_options
+        else:
+            execution_options = execution_options.union(_orm_load_exec_options)
+
+        # would have been placed here by legacy Query only
+        if load_options._yield_per:
+            execution_options = execution_options.union(
+                {"yield_per": load_options._yield_per}
+            )
+
+        if (
+            getattr(statement._compile_options, "_current_path", None)
+            and len(statement._compile_options._current_path) > 10
+            and execution_options.get("compiled_cache", True) is not None
+        ):
+            execution_options: util.immutabledict[str, Any] = (
+                execution_options.union(
+                    {
+                        "compiled_cache": None,
+                        "_cache_disable_reason": "excess depth for "
+                        "ORM loader options",
+                    }
+                )
+            )
+
+        bind_arguments["clause"] = statement
+
+        # new in 1.4 - the coercions system is leveraged to allow the
+        # "subject" mapper of a statement be propagated to the top
+        # as the statement is built.   "subject" mapper is the generally
+        # standard object used as an identifier for multi-database schemes.
+
+        # we are here based on the fact that _propagate_attrs contains
+        # "compile_state_plugin": "orm".   The "plugin_subject"
+        # needs to be present as well.
+
+        try:
+            plugin_subject = statement._propagate_attrs["plugin_subject"]
+        except KeyError:
+            assert False, "statement had 'orm' plugin but no plugin_subject"
+        else:
+            if plugin_subject:
+                bind_arguments["mapper"] = plugin_subject.mapper
+
+        if not is_pre_event and load_options._autoflush:
+            session._autoflush()
+
+        return statement, execution_options
+
+    @classmethod
+    def orm_setup_cursor_result(
+        cls,
+        session,
+        statement,
+        params,
+        execution_options,
+        bind_arguments,
+        result,
+    ):
+        execution_context = result.context
+        compile_state = execution_context.compiled.compile_state
+
+        # cover edge case where ORM entities used in legacy select
+        # were passed to session.execute:
+        # session.execute(legacy_select([User.id, User.name]))
+        # see test_query->test_legacy_tuple_old_select
+
+        load_options = execution_options.get(
+            "_sa_orm_load_options", QueryContext.default_load_options
+        )
+
+        if compile_state.compile_options._is_star:
+            return result
+
+        querycontext = QueryContext(
+            compile_state,
+            statement,
+            statement,
+            params,
+            session,
+            load_options,
+            execution_options,
+            bind_arguments,
+        )
+        return loading.instances(result, querycontext)
+
+    @property
+    def _lead_mapper_entities(self):
+        """return all _MapperEntity objects in the lead entities collection.
+
+        Does **not** include entities that have been replaced by
+        with_entities(), with_only_columns()
+
+        """
+        return [
+            ent for ent in self._entities if isinstance(ent, _MapperEntity)
+        ]
+
+    def _create_with_polymorphic_adapter(self, ext_info, selectable):
+        """given MapperEntity or ORMColumnEntity, setup polymorphic loading
+        if called for by the Mapper.
+
+        As of #8168 in 2.0.0rc1, polymorphic adapters, which greatly increase
+        the complexity of the query creation process, are not used at all
+        except in the quasi-legacy cases of with_polymorphic referring to an
+        alias and/or subquery. This would apply to concrete polymorphic
+        loading, and joined inheritance where a subquery is
+        passed to with_polymorphic (which is completely unnecessary in modern
+        use).
+
+        """
+        if (
+            not ext_info.is_aliased_class
+            and ext_info.mapper.persist_selectable
+            not in self._polymorphic_adapters
+        ):
+            for mp in ext_info.mapper.iterate_to_root():
+                self._mapper_loads_polymorphically_with(
+                    mp,
+                    ORMAdapter(
+                        _TraceAdaptRole.WITH_POLYMORPHIC_ADAPTER,
+                        mp,
+                        equivalents=mp._equivalent_columns,
+                        selectable=selectable,
+                    ),
+                )
+
+    def _mapper_loads_polymorphically_with(self, mapper, adapter):
+        for m2 in mapper._with_polymorphic_mappers or [mapper]:
+            self._polymorphic_adapters[m2] = adapter
+
+            for m in m2.iterate_to_root():
+                self._polymorphic_adapters[m.local_table] = adapter
+
+    @classmethod
+    def _create_entities_collection(cls, query, legacy):
+        raise NotImplementedError(
+            "this method only works for ORMSelectCompileState"
+        )
+
+
+class _DMLReturningColFilter:
+    """a base for an adapter used for the DML RETURNING cases
+
+    Has a subset of the interface used by
+    :class:`.ORMAdapter` and is used for :class:`._QueryEntity`
+    instances to set up their columns as used in RETURNING for a
+    DML statement.
+
+    """
+
+    __slots__ = ("mapper", "columns", "__weakref__")
+
+    def __init__(self, target_mapper, immediate_dml_mapper):
+        if (
+            immediate_dml_mapper is not None
+            and target_mapper.local_table
+            is not immediate_dml_mapper.local_table
+        ):
+            # joined inh, or in theory other kinds of multi-table mappings
+            self.mapper = immediate_dml_mapper
+        else:
+            # single inh, normal mappings, etc.
+            self.mapper = target_mapper
+        self.columns = self.columns = util.WeakPopulateDict(
+            self.adapt_check_present  # type: ignore
+        )
+
+    def __call__(self, col, as_filter):
+        for cc in sql_util._find_columns(col):
+            c2 = self.adapt_check_present(cc)
+            if c2 is not None:
+                return col
+        else:
+            return None
+
+    def adapt_check_present(self, col):
+        raise NotImplementedError()
+
+
+class _DMLBulkInsertReturningColFilter(_DMLReturningColFilter):
+    """an adapter used for the DML RETURNING case specifically
+    for ORM bulk insert (or any hypothetical DML that is splitting out a class
+    hierarchy among multiple DML statements....ORM bulk insert is the only
+    example right now)
+
+    its main job is to limit the columns in a RETURNING to only a specific
+    mapped table in a hierarchy.
+
+    """
+
+    def adapt_check_present(self, col):
+        mapper = self.mapper
+        prop = mapper._columntoproperty.get(col, None)
+        if prop is None:
+            return None
+        return mapper.local_table.c.corresponding_column(col)
+
+
+class _DMLUpdateDeleteReturningColFilter(_DMLReturningColFilter):
+    """an adapter used for the DML RETURNING case specifically
+    for ORM enabled UPDATE/DELETE
+
+    its main job is to limit the columns in a RETURNING to include
+    only direct persisted columns from the immediate selectable, not
+    expressions like column_property(), or to also allow columns from other
+    mappers for the UPDATE..FROM use case.
+
+    """
+
+    def adapt_check_present(self, col):
+        mapper = self.mapper
+        prop = mapper._columntoproperty.get(col, None)
+        if prop is not None:
+            # if the col is from the immediate mapper, only return a persisted
+            # column, not any kind of column_property expression
+            return mapper.persist_selectable.c.corresponding_column(col)
+
+        # if the col is from some other mapper, just return it, assume the
+        # user knows what they are doing
+        return col
+
+
+@sql.base.CompileState.plugin_for("orm", "orm_from_statement")
+class ORMFromStatementCompileState(ORMCompileState):
+    _from_obj_alias = None
+    _has_mapper_entities = False
+
+    statement_container: FromStatement
+    requested_statement: Union[SelectBase, TextClause, UpdateBase]
+    dml_table: Optional[_DMLTableElement] = None
+
+    _has_orm_entities = False
+    multi_row_eager_loaders = False
+    eager_adding_joins = False
+    compound_eager_adapter = None
+
+    extra_criteria_entities = _EMPTY_DICT
+    eager_joins = _EMPTY_DICT
+
+    @classmethod
+    def _create_orm_context(
+        cls,
+        statement: Union[Select, FromStatement],
+        *,
+        toplevel: bool,
+        compiler: Optional[SQLCompiler],
+        **kw: Any,
+    ) -> ORMFromStatementCompileState:
+        statement_container = statement
+
+        assert isinstance(statement_container, FromStatement)
+
+        if compiler is not None and compiler.stack:
+            raise sa_exc.CompileError(
+                "The ORM FromStatement construct only supports being "
+                "invoked as the topmost statement, as it is only intended to "
+                "define how result rows should be returned."
+            )
+
+        self = cls.__new__(cls)
+        self._primary_entity = None
+
+        self.use_legacy_query_style = (
+            statement_container._compile_options._use_legacy_query_style
+        )
+        self.statement_container = self.select_statement = statement_container
+        self.requested_statement = statement = statement_container.element
+
+        if statement.is_dml:
+            self.dml_table = statement.table
+            self.is_dml_returning = True
+
+        self._entities = []
+        self._polymorphic_adapters = {}
+
+        self.compile_options = statement_container._compile_options
+
+        if (
+            self.use_legacy_query_style
+            and isinstance(statement, expression.SelectBase)
+            and not statement._is_textual
+            and not statement.is_dml
+            and statement._label_style is LABEL_STYLE_NONE
+        ):
+            self.statement = statement.set_label_style(
+                LABEL_STYLE_TABLENAME_PLUS_COL
+            )
+        else:
+            self.statement = statement
+
+        self._label_convention = self._column_naming_convention(
+            (
+                statement._label_style
+                if not statement._is_textual and not statement.is_dml
+                else LABEL_STYLE_NONE
+            ),
+            self.use_legacy_query_style,
+        )
+
+        _QueryEntity.to_compile_state(
+            self,
+            statement_container._raw_columns,
+            self._entities,
+            is_current_entities=True,
+        )
+
+        self.current_path = statement_container._compile_options._current_path
+
+        self._init_global_attributes(
+            statement_container,
+            compiler,
+            process_criteria_for_toplevel=False,
+            toplevel=True,
+        )
+
+        if statement_container._with_options:
+            for opt in statement_container._with_options:
+                if opt._is_compile_state:
+                    opt.process_compile_state(self)
+
+        if statement_container._with_context_options:
+            for fn, key in statement_container._with_context_options:
+                fn(self)
+
+        self.primary_columns = []
+        self.secondary_columns = []
+        self.dedupe_columns = set()
+        self.create_eager_joins = []
+        self._fallback_from_clauses = []
+
+        self.order_by = None
+
+        if isinstance(self.statement, expression.TextClause):
+            # TextClause has no "column" objects at all.  for this case,
+            # we generate columns from our _QueryEntity objects, then
+            # flip on all the "please match no matter what" parameters.
+            self.extra_criteria_entities = {}
+
+            for entity in self._entities:
+                entity.setup_compile_state(self)
+
+            compiler._ordered_columns = compiler._textual_ordered_columns = (
+                False
+            )
+
+            # enable looser result column matching.  this is shown to be
+            # needed by test_query.py::TextTest
+            compiler._loose_column_name_matching = True
+
+            for c in self.primary_columns:
+                compiler.process(
+                    c,
+                    within_columns_clause=True,
+                    add_to_result_map=compiler._add_to_result_map,
+                )
+        else:
+            # for everyone else, Select, Insert, Update, TextualSelect, they
+            # have column objects already.  After much
+            # experimentation here, the best approach seems to be, use
+            # those columns completely, don't interfere with the compiler
+            # at all; just in ORM land, use an adapter to convert from
+            # our ORM columns to whatever columns are in the statement,
+            # before we look in the result row. Adapt on names
+            # to accept cases such as issue #9217, however also allow
+            # this to be overridden for cases such as #9273.
+            self._from_obj_alias = ORMStatementAdapter(
+                _TraceAdaptRole.ADAPT_FROM_STATEMENT,
+                self.statement,
+                adapt_on_names=statement_container._adapt_on_names,
+            )
+
+        return self
+
+    def _adapt_col_list(self, cols, current_adapter):
+        return cols
+
+    def _get_current_adapter(self):
+        return None
+
+    def setup_dml_returning_compile_state(self, dml_mapper):
+        """used by BulkORMInsert, Update, Delete to set up a handler
+        for RETURNING to return ORM objects and expressions
+
+        """
+        target_mapper = self.statement._propagate_attrs.get(
+            "plugin_subject", None
+        )
+
+        if self.statement.is_insert:
+            adapter = _DMLBulkInsertReturningColFilter(
+                target_mapper, dml_mapper
+            )
+        elif self.statement.is_update or self.statement.is_delete:
+            adapter = _DMLUpdateDeleteReturningColFilter(
+                target_mapper, dml_mapper
+            )
+        else:
+            adapter = None
+
+        if self.compile_options._is_star and (len(self._entities) != 1):
+            raise sa_exc.CompileError(
+                "Can't generate ORM query that includes multiple expressions "
+                "at the same time as '*'; query for '*' alone if present"
+            )
+
+        for entity in self._entities:
+            entity.setup_dml_returning_compile_state(self, adapter)
+
+
+class FromStatement(GroupedElement, Generative, TypedReturnsRows[_TP]):
+    """Core construct that represents a load of ORM objects from various
+    :class:`.ReturnsRows` and other classes including:
+
+    :class:`.Select`, :class:`.TextClause`, :class:`.TextualSelect`,
+    :class:`.CompoundSelect`, :class`.Insert`, :class:`.Update`,
+    and in theory, :class:`.Delete`.
+
+    """
+
+    __visit_name__ = "orm_from_statement"
+
+    _compile_options = ORMFromStatementCompileState.default_compile_options
+
+    _compile_state_factory = ORMFromStatementCompileState.create_for_statement
+
+    _for_update_arg = None
+
+    element: Union[ExecutableReturnsRows, TextClause]
+
+    _adapt_on_names: bool
+
+    _traverse_internals = [
+        ("_raw_columns", InternalTraversal.dp_clauseelement_list),
+        ("element", InternalTraversal.dp_clauseelement),
+    ] + Executable._executable_traverse_internals
+
+    _cache_key_traversal = _traverse_internals + [
+        ("_compile_options", InternalTraversal.dp_has_cache_key)
+    ]
+
+    is_from_statement = True
+
+    def __init__(
+        self,
+        entities: Iterable[_ColumnsClauseArgument[Any]],
+        element: Union[ExecutableReturnsRows, TextClause],
+        _adapt_on_names: bool = True,
+    ):
+        self._raw_columns = [
+            coercions.expect(
+                roles.ColumnsClauseRole,
+                ent,
+                apply_propagate_attrs=self,
+                post_inspect=True,
+            )
+            for ent in util.to_list(entities)
+        ]
+        self.element = element
+        self.is_dml = element.is_dml
+        self.is_select = element.is_select
+        self.is_delete = element.is_delete
+        self.is_insert = element.is_insert
+        self.is_update = element.is_update
+        self._label_style = (
+            element._label_style if is_select_base(element) else None
+        )
+        self._adapt_on_names = _adapt_on_names
+
+    def _compiler_dispatch(self, compiler, **kw):
+        """provide a fixed _compiler_dispatch method.
+
+        This is roughly similar to using the sqlalchemy.ext.compiler
+        ``@compiles`` extension.
+
+        """
+
+        compile_state = self._compile_state_factory(self, compiler, **kw)
+
+        toplevel = not compiler.stack
+
+        if toplevel:
+            compiler.compile_state = compile_state
+
+        return compiler.process(compile_state.statement, **kw)
+
+    @property
+    def column_descriptions(self):
+        """Return a :term:`plugin-enabled` 'column descriptions' structure
+        referring to the columns which are SELECTed by this statement.
+
+        See the section :ref:`queryguide_inspection` for an overview
+        of this feature.
+
+        .. seealso::
+
+            :ref:`queryguide_inspection` - ORM background
+
+        """
+        meth = cast(
+            ORMSelectCompileState, SelectState.get_plugin_class(self)
+        ).get_column_descriptions
+        return meth(self)
+
+    def _ensure_disambiguated_names(self):
+        return self
+
+    def get_children(self, **kw):
+        yield from itertools.chain.from_iterable(
+            element._from_objects for element in self._raw_columns
+        )
+        yield from super().get_children(**kw)
+
+    @property
+    def _all_selected_columns(self):
+        return self.element._all_selected_columns
+
+    @property
+    def _return_defaults(self):
+        return self.element._return_defaults if is_dml(self.element) else None
+
+    @property
+    def _returning(self):
+        return self.element._returning if is_dml(self.element) else None
+
+    @property
+    def _inline(self):
+        return self.element._inline if is_insert_update(self.element) else None
+
+
+@sql.base.CompileState.plugin_for("orm", "compound_select")
+class CompoundSelectCompileState(
+    AutoflushOnlyORMCompileState, CompoundSelectState
+):
+    pass
+
+
+@sql.base.CompileState.plugin_for("orm", "select")
+class ORMSelectCompileState(ORMCompileState, SelectState):
+    _already_joined_edges = ()
+
+    _memoized_entities = _EMPTY_DICT
+
+    _from_obj_alias = None
+    _has_mapper_entities = False
+
+    _has_orm_entities = False
+    multi_row_eager_loaders = False
+    eager_adding_joins = False
+    compound_eager_adapter = None
+
+    correlate = None
+    correlate_except = None
+    _where_criteria = ()
+    _having_criteria = ()
+
+    @classmethod
+    def _create_orm_context(
+        cls,
+        statement: Union[Select, FromStatement],
+        *,
+        toplevel: bool,
+        compiler: Optional[SQLCompiler],
+        **kw: Any,
+    ) -> ORMSelectCompileState:
+
+        self = cls.__new__(cls)
+
+        select_statement = statement
+
+        # if we are a select() that was never a legacy Query, we won't
+        # have ORM level compile options.
+        statement._compile_options = cls.default_compile_options.safe_merge(
+            statement._compile_options
+        )
+
+        if select_statement._execution_options:
+            # execution options should not impact the compilation of a
+            # query, and at the moment subqueryloader is putting some things
+            # in here that we explicitly don't want stuck in a cache.
+            self.select_statement = select_statement._clone()
+            self.select_statement._execution_options = util.immutabledict()
+        else:
+            self.select_statement = select_statement
+
+        # indicates this select() came from Query.statement
+        self.for_statement = select_statement._compile_options._for_statement
+
+        # generally if we are from Query or directly from a select()
+        self.use_legacy_query_style = (
+            select_statement._compile_options._use_legacy_query_style
+        )
+
+        self._entities = []
+        self._primary_entity = None
+        self._polymorphic_adapters = {}
+
+        self.compile_options = select_statement._compile_options
+
+        if not toplevel:
+            # for subqueries, turn off eagerloads and set
+            # "render_for_subquery".
+            self.compile_options += {
+                "_enable_eagerloads": False,
+                "_render_for_subquery": True,
+            }
+
+        # determine label style.   we can make different decisions here.
+        # at the moment, trying to see if we can always use DISAMBIGUATE_ONLY
+        # rather than LABEL_STYLE_NONE, and if we can use disambiguate style
+        # for new style ORM selects too.
+        if (
+            self.use_legacy_query_style
+            and self.select_statement._label_style is LABEL_STYLE_LEGACY_ORM
+        ):
+            if not self.for_statement:
+                self.label_style = LABEL_STYLE_TABLENAME_PLUS_COL
+            else:
+                self.label_style = LABEL_STYLE_DISAMBIGUATE_ONLY
+        else:
+            self.label_style = self.select_statement._label_style
+
+        if select_statement._memoized_select_entities:
+            self._memoized_entities = {
+                memoized_entities: _QueryEntity.to_compile_state(
+                    self,
+                    memoized_entities._raw_columns,
+                    [],
+                    is_current_entities=False,
+                )
+                for memoized_entities in (
+                    select_statement._memoized_select_entities
+                )
+            }
+
+        # label_convention is stateful and will yield deduping keys if it
+        # sees the same key twice.  therefore it's important that it is not
+        # invoked for the above "memoized" entities that aren't actually
+        # in the columns clause
+        self._label_convention = self._column_naming_convention(
+            statement._label_style, self.use_legacy_query_style
+        )
+
+        _QueryEntity.to_compile_state(
+            self,
+            select_statement._raw_columns,
+            self._entities,
+            is_current_entities=True,
+        )
+
+        self.current_path = select_statement._compile_options._current_path
+
+        self.eager_order_by = ()
+
+        self._init_global_attributes(
+            select_statement,
+            compiler,
+            toplevel=toplevel,
+            process_criteria_for_toplevel=False,
+        )
+
+        if toplevel and (
+            select_statement._with_options
+            or select_statement._memoized_select_entities
+        ):
+            for (
+                memoized_entities
+            ) in select_statement._memoized_select_entities:
+                for opt in memoized_entities._with_options:
+                    if opt._is_compile_state:
+                        opt.process_compile_state_replaced_entities(
+                            self,
+                            [
+                                ent
+                                for ent in self._memoized_entities[
+                                    memoized_entities
+                                ]
+                                if isinstance(ent, _MapperEntity)
+                            ],
+                        )
+
+            for opt in self.select_statement._with_options:
+                if opt._is_compile_state:
+                    opt.process_compile_state(self)
+
+        # uncomment to print out the context.attributes structure
+        # after it's been set up above
+        # self._dump_option_struct()
+
+        if select_statement._with_context_options:
+            for fn, key in select_statement._with_context_options:
+                fn(self)
+
+        self.primary_columns = []
+        self.secondary_columns = []
+        self.dedupe_columns = set()
+        self.eager_joins = {}
+        self.extra_criteria_entities = {}
+        self.create_eager_joins = []
+        self._fallback_from_clauses = []
+
+        # normalize the FROM clauses early by themselves, as this makes
+        # it an easier job when we need to assemble a JOIN onto these,
+        # for select.join() as well as joinedload().   As of 1.4 there are now
+        # potentially more complex sets of FROM objects here as the use
+        # of lambda statements for lazyload, load_on_pk etc. uses more
+        # cloning of the select() construct.  See #6495
+        self.from_clauses = self._normalize_froms(
+            info.selectable for info in select_statement._from_obj
+        )
+
+        # this is a fairly arbitrary break into a second method,
+        # so it might be nicer to break up create_for_statement()
+        # and _setup_for_generate into three or four logical sections
+        self._setup_for_generate()
+
+        SelectState.__init__(self, self.statement, compiler, **kw)
+        return self
+
+    def _dump_option_struct(self):
+        print("\n---------------------------------------------------\n")
+        print(f"current path: {self.current_path}")
+        for key in self.attributes:
+            if isinstance(key, tuple) and key[0] == "loader":
+                print(f"\nLoader:           {PathRegistry.coerce(key[1])}")
+                print(f"    {self.attributes[key]}")
+                print(f"    {self.attributes[key].__dict__}")
+            elif isinstance(key, tuple) and key[0] == "path_with_polymorphic":
+                print(f"\nWith Polymorphic: {PathRegistry.coerce(key[1])}")
+                print(f"    {self.attributes[key]}")
+
+    def _setup_for_generate(self):
+        query = self.select_statement
+
+        self.statement = None
+        self._join_entities = ()
+
+        if self.compile_options._set_base_alias:
+            # legacy Query only
+            self._set_select_from_alias()
+
+        for memoized_entities in query._memoized_select_entities:
+            if memoized_entities._setup_joins:
+                self._join(
+                    memoized_entities._setup_joins,
+                    self._memoized_entities[memoized_entities],
+                )
+
+        if query._setup_joins:
+            self._join(query._setup_joins, self._entities)
+
+        current_adapter = self._get_current_adapter()
+
+        if query._where_criteria:
+            self._where_criteria = query._where_criteria
+
+            if current_adapter:
+                self._where_criteria = tuple(
+                    current_adapter(crit, True)
+                    for crit in self._where_criteria
+                )
+
+        # TODO: some complexity with order_by here was due to mapper.order_by.
+        # now that this is removed we can hopefully make order_by /
+        # group_by act identically to how they are in Core select.
+        self.order_by = (
+            self._adapt_col_list(query._order_by_clauses, current_adapter)
+            if current_adapter and query._order_by_clauses not in (None, False)
+            else query._order_by_clauses
+        )
+
+        if query._having_criteria:
+            self._having_criteria = tuple(
+                current_adapter(crit, True) if current_adapter else crit
+                for crit in query._having_criteria
+            )
+
+        self.group_by = (
+            self._adapt_col_list(
+                util.flatten_iterator(query._group_by_clauses), current_adapter
+            )
+            if current_adapter and query._group_by_clauses not in (None, False)
+            else query._group_by_clauses or None
+        )
+
+        if self.eager_order_by:
+            adapter = self.from_clauses[0]._target_adapter
+            self.eager_order_by = adapter.copy_and_process(self.eager_order_by)
+
+        if query._distinct_on:
+            self.distinct_on = self._adapt_col_list(
+                query._distinct_on, current_adapter
+            )
+        else:
+            self.distinct_on = ()
+
+        self.distinct = query._distinct
+
+        if query._correlate:
+            # ORM mapped entities that are mapped to joins can be passed
+            # to .correlate, so here they are broken into their component
+            # tables.
+            self.correlate = tuple(
+                util.flatten_iterator(
+                    sql_util.surface_selectables(s) if s is not None else None
+                    for s in query._correlate
+                )
+            )
+        elif query._correlate_except is not None:
+            self.correlate_except = tuple(
+                util.flatten_iterator(
+                    sql_util.surface_selectables(s) if s is not None else None
+                    for s in query._correlate_except
+                )
+            )
+        elif not query._auto_correlate:
+            self.correlate = (None,)
+
+        # PART II
+
+        self._for_update_arg = query._for_update_arg
+
+        if self.compile_options._is_star and (len(self._entities) != 1):
+            raise sa_exc.CompileError(
+                "Can't generate ORM query that includes multiple expressions "
+                "at the same time as '*'; query for '*' alone if present"
+            )
+        for entity in self._entities:
+            entity.setup_compile_state(self)
+
+        for rec in self.create_eager_joins:
+            strategy = rec[0]
+            strategy(self, *rec[1:])
+
+        # else "load from discrete FROMs" mode,
+        # i.e. when each _MappedEntity has its own FROM
+
+        if self.compile_options._enable_single_crit:
+            self._adjust_for_extra_criteria()
+
+        if not self.primary_columns:
+            if self.compile_options._only_load_props:
+                assert False, "no columns were included in _only_load_props"
+
+            raise sa_exc.InvalidRequestError(
+                "Query contains no columns with which to SELECT from."
+            )
+
+        if not self.from_clauses:
+            self.from_clauses = list(self._fallback_from_clauses)
+
+        if self.order_by is False:
+            self.order_by = None
+
+        if (
+            self.multi_row_eager_loaders
+            and self.eager_adding_joins
+            and self._should_nest_selectable
+        ):
+            self.statement = self._compound_eager_statement()
+        else:
+            self.statement = self._simple_statement()
+
+        if self.for_statement:
+            ezero = self._mapper_zero()
+            if ezero is not None:
+                # TODO: this goes away once we get rid of the deep entity
+                # thing
+                self.statement = self.statement._annotate(
+                    {"deepentity": ezero}
+                )
+
+    @classmethod
+    def _create_entities_collection(cls, query, legacy):
+        """Creates a partial ORMSelectCompileState that includes
+        the full collection of _MapperEntity and other _QueryEntity objects.
+
+        Supports a few remaining use cases that are pre-compilation
+        but still need to gather some of the column  / adaption information.
+
+        """
+        self = cls.__new__(cls)
+
+        self._entities = []
+        self._primary_entity = None
+        self._polymorphic_adapters = {}
+
+        self._label_convention = self._column_naming_convention(
+            query._label_style, legacy
+        )
+
+        # entities will also set up polymorphic adapters for mappers
+        # that have with_polymorphic configured
+        _QueryEntity.to_compile_state(
+            self, query._raw_columns, self._entities, is_current_entities=True
+        )
+        return self
+
+    @classmethod
+    def determine_last_joined_entity(cls, statement):
+        setup_joins = statement._setup_joins
+
+        return _determine_last_joined_entity(setup_joins, None)
+
+    @classmethod
+    def all_selected_columns(cls, statement):
+        for element in statement._raw_columns:
+            if (
+                element.is_selectable
+                and "entity_namespace" in element._annotations
+            ):
+                ens = element._annotations["entity_namespace"]
+                if not ens.is_mapper and not ens.is_aliased_class:
+                    yield from _select_iterables([element])
+                else:
+                    yield from _select_iterables(ens._all_column_expressions)
+            else:
+                yield from _select_iterables([element])
+
+    @classmethod
+    def get_columns_clause_froms(cls, statement):
+        return cls._normalize_froms(
+            itertools.chain.from_iterable(
+                (
+                    element._from_objects
+                    if "parententity" not in element._annotations
+                    else [
+                        element._annotations[
+                            "parententity"
+                        ].__clause_element__()
+                    ]
+                )
+                for element in statement._raw_columns
+            )
+        )
+
+    @classmethod
+    def from_statement(cls, statement, from_statement):
+        from_statement = coercions.expect(
+            roles.ReturnsRowsRole,
+            from_statement,
+            apply_propagate_attrs=statement,
+        )
+
+        stmt = FromStatement(statement._raw_columns, from_statement)
+
+        stmt.__dict__.update(
+            _with_options=statement._with_options,
+            _with_context_options=statement._with_context_options,
+            _execution_options=statement._execution_options,
+            _propagate_attrs=statement._propagate_attrs,
+        )
+        return stmt
+
+    def _set_select_from_alias(self):
+        """used only for legacy Query cases"""
+
+        query = self.select_statement  # query
+
+        assert self.compile_options._set_base_alias
+        assert len(query._from_obj) == 1
+
+        adapter = self._get_select_from_alias_from_obj(query._from_obj[0])
+        if adapter:
+            self.compile_options += {"_enable_single_crit": False}
+            self._from_obj_alias = adapter
+
+    def _get_select_from_alias_from_obj(self, from_obj):
+        """used only for legacy Query cases"""
+
+        info = from_obj
+
+        if "parententity" in info._annotations:
+            info = info._annotations["parententity"]
+
+        if hasattr(info, "mapper"):
+            if not info.is_aliased_class:
+                raise sa_exc.ArgumentError(
+                    "A selectable (FromClause) instance is "
+                    "expected when the base alias is being set."
+                )
+            else:
+                return info._adapter
+
+        elif isinstance(info.selectable, sql.selectable.AliasedReturnsRows):
+            equivs = self._all_equivs()
+            assert info is info.selectable
+            return ORMStatementAdapter(
+                _TraceAdaptRole.LEGACY_SELECT_FROM_ALIAS,
+                info.selectable,
+                equivalents=equivs,
+            )
+        else:
+            return None
+
+    def _mapper_zero(self):
+        """return the Mapper associated with the first QueryEntity."""
+        return self._entities[0].mapper
+
+    def _entity_zero(self):
+        """Return the 'entity' (mapper or AliasedClass) associated
+        with the first QueryEntity, or alternatively the 'select from'
+        entity if specified."""
+
+        for ent in self.from_clauses:
+            if "parententity" in ent._annotations:
+                return ent._annotations["parententity"]
+        for qent in self._entities:
+            if qent.entity_zero:
+                return qent.entity_zero
+
+        return None
+
+    def _only_full_mapper_zero(self, methname):
+        if self._entities != [self._primary_entity]:
+            raise sa_exc.InvalidRequestError(
+                "%s() can only be used against "
+                "a single mapped class." % methname
+            )
+        return self._primary_entity.entity_zero
+
+    def _only_entity_zero(self, rationale=None):
+        if len(self._entities) > 1:
+            raise sa_exc.InvalidRequestError(
+                rationale
+                or "This operation requires a Query "
+                "against a single mapper."
+            )
+        return self._entity_zero()
+
+    def _all_equivs(self):
+        equivs = {}
+
+        for memoized_entities in self._memoized_entities.values():
+            for ent in [
+                ent
+                for ent in memoized_entities
+                if isinstance(ent, _MapperEntity)
+            ]:
+                equivs.update(ent.mapper._equivalent_columns)
+
+        for ent in [
+            ent for ent in self._entities if isinstance(ent, _MapperEntity)
+        ]:
+            equivs.update(ent.mapper._equivalent_columns)
+        return equivs
+
+    def _compound_eager_statement(self):
+        # for eager joins present and LIMIT/OFFSET/DISTINCT,
+        # wrap the query inside a select,
+        # then append eager joins onto that
+
+        if self.order_by:
+            # the default coercion for ORDER BY is now the OrderByRole,
+            # which adds an additional post coercion to ByOfRole in that
+            # elements are converted into label references.  For the
+            # eager load / subquery wrapping case, we need to un-coerce
+            # the original expressions outside of the label references
+            # in order to have them render.
+            unwrapped_order_by = [
+                (
+                    elem.element
+                    if isinstance(elem, sql.elements._label_reference)
+                    else elem
+                )
+                for elem in self.order_by
+            ]
+
+            order_by_col_expr = sql_util.expand_column_list_from_order_by(
+                self.primary_columns, unwrapped_order_by
+            )
+        else:
+            order_by_col_expr = []
+            unwrapped_order_by = None
+
+        # put FOR UPDATE on the inner query, where MySQL will honor it,
+        # as well as if it has an OF so PostgreSQL can use it.
+        inner = self._select_statement(
+            self.primary_columns
+            + [c for c in order_by_col_expr if c not in self.dedupe_columns],
+            self.from_clauses,
+            self._where_criteria,
+            self._having_criteria,
+            self.label_style,
+            self.order_by,
+            for_update=self._for_update_arg,
+            hints=self.select_statement._hints,
+            statement_hints=self.select_statement._statement_hints,
+            correlate=self.correlate,
+            correlate_except=self.correlate_except,
+            **self._select_args,
+        )
+
+        inner = inner.alias()
+
+        equivs = self._all_equivs()
+
+        self.compound_eager_adapter = ORMStatementAdapter(
+            _TraceAdaptRole.COMPOUND_EAGER_STATEMENT, inner, equivalents=equivs
+        )
+
+        statement = future.select(
+            *([inner] + self.secondary_columns)  # use_labels=self.labels
+        )
+        statement._label_style = self.label_style
+
+        # Oracle Database however does not allow FOR UPDATE on the subquery,
+        # and the Oracle Database dialects ignore it, plus for PostgreSQL,
+        # MySQL we expect that all elements of the row are locked, so also put
+        # it on the outside (except in the case of PG when OF is used)
+        if (
+            self._for_update_arg is not None
+            and self._for_update_arg.of is None
+        ):
+            statement._for_update_arg = self._for_update_arg
+
+        from_clause = inner
+        for eager_join in self.eager_joins.values():
+            # EagerLoader places a 'stop_on' attribute on the join,
+            # giving us a marker as to where the "splice point" of
+            # the join should be
+            from_clause = sql_util.splice_joins(
+                from_clause, eager_join, eager_join.stop_on
+            )
+
+        statement.select_from.non_generative(statement, from_clause)
+
+        if unwrapped_order_by:
+            statement.order_by.non_generative(
+                statement,
+                *self.compound_eager_adapter.copy_and_process(
+                    unwrapped_order_by
+                ),
+            )
+
+        statement.order_by.non_generative(statement, *self.eager_order_by)
+        return statement
+
+    def _simple_statement(self):
+        statement = self._select_statement(
+            self.primary_columns + self.secondary_columns,
+            tuple(self.from_clauses) + tuple(self.eager_joins.values()),
+            self._where_criteria,
+            self._having_criteria,
+            self.label_style,
+            self.order_by,
+            for_update=self._for_update_arg,
+            hints=self.select_statement._hints,
+            statement_hints=self.select_statement._statement_hints,
+            correlate=self.correlate,
+            correlate_except=self.correlate_except,
+            **self._select_args,
+        )
+
+        if self.eager_order_by:
+            statement.order_by.non_generative(statement, *self.eager_order_by)
+        return statement
+
+    def _select_statement(
+        self,
+        raw_columns,
+        from_obj,
+        where_criteria,
+        having_criteria,
+        label_style,
+        order_by,
+        for_update,
+        hints,
+        statement_hints,
+        correlate,
+        correlate_except,
+        limit_clause,
+        offset_clause,
+        fetch_clause,
+        fetch_clause_options,
+        distinct,
+        distinct_on,
+        prefixes,
+        suffixes,
+        group_by,
+        independent_ctes,
+        independent_ctes_opts,
+    ):
+        statement = Select._create_raw_select(
+            _raw_columns=raw_columns,
+            _from_obj=from_obj,
+            _label_style=label_style,
+        )
+
+        if where_criteria:
+            statement._where_criteria = where_criteria
+        if having_criteria:
+            statement._having_criteria = having_criteria
+
+        if order_by:
+            statement._order_by_clauses += tuple(order_by)
+
+        if distinct_on:
+            statement.distinct.non_generative(statement, *distinct_on)
+        elif distinct:
+            statement.distinct.non_generative(statement)
+
+        if group_by:
+            statement._group_by_clauses += tuple(group_by)
+
+        statement._limit_clause = limit_clause
+        statement._offset_clause = offset_clause
+        statement._fetch_clause = fetch_clause
+        statement._fetch_clause_options = fetch_clause_options
+        statement._independent_ctes = independent_ctes
+        statement._independent_ctes_opts = independent_ctes_opts
+
+        if prefixes:
+            statement._prefixes = prefixes
+
+        if suffixes:
+            statement._suffixes = suffixes
+
+        statement._for_update_arg = for_update
+
+        if hints:
+            statement._hints = hints
+        if statement_hints:
+            statement._statement_hints = statement_hints
+
+        if correlate:
+            statement.correlate.non_generative(statement, *correlate)
+
+        if correlate_except is not None:
+            statement.correlate_except.non_generative(
+                statement, *correlate_except
+            )
+
+        return statement
+
+    def _adapt_polymorphic_element(self, element):
+        if "parententity" in element._annotations:
+            search = element._annotations["parententity"]
+            alias = self._polymorphic_adapters.get(search, None)
+            if alias:
+                return alias.adapt_clause(element)
+
+        if isinstance(element, expression.FromClause):
+            search = element
+        elif hasattr(element, "table"):
+            search = element.table
+        else:
+            return None
+
+        alias = self._polymorphic_adapters.get(search, None)
+        if alias:
+            return alias.adapt_clause(element)
+
+    def _adapt_col_list(self, cols, current_adapter):
+        if current_adapter:
+            return [current_adapter(o, True) for o in cols]
+        else:
+            return cols
+
+    def _get_current_adapter(self):
+        adapters = []
+
+        if self._from_obj_alias:
+            # used for legacy going forward for query set_ops, e.g.
+            # union(), union_all(), etc.
+            # 1.4 and previously, also used for from_self(),
+            # select_entity_from()
+            #
+            # for the "from obj" alias, apply extra rule to the
+            # 'ORM only' check, if this query were generated from a
+            # subquery of itself, i.e. _from_selectable(), apply adaption
+            # to all SQL constructs.
+            adapters.append(
+                (
+                    True,
+                    self._from_obj_alias.replace,
+                )
+            )
+
+        # this was *hopefully* the only adapter we were going to need
+        # going forward...however, we unfortunately need _from_obj_alias
+        # for query.union(), which we can't drop
+        if self._polymorphic_adapters:
+            adapters.append((False, self._adapt_polymorphic_element))
+
+        if not adapters:
+            return None
+
+        def _adapt_clause(clause, as_filter):
+            # do we adapt all expression elements or only those
+            # tagged as 'ORM' constructs ?
+
+            def replace(elem):
+                is_orm_adapt = (
+                    "_orm_adapt" in elem._annotations
+                    or "parententity" in elem._annotations
+                )
+                for always_adapt, adapter in adapters:
+                    if is_orm_adapt or always_adapt:
+                        e = adapter(elem)
+                        if e is not None:
+                            return e
+
+            return visitors.replacement_traverse(clause, {}, replace)
+
+        return _adapt_clause
+
+    def _join(self, args, entities_collection):
+        for right, onclause, from_, flags in args:
+            isouter = flags["isouter"]
+            full = flags["full"]
+
+            right = inspect(right)
+            if onclause is not None:
+                onclause = inspect(onclause)
+
+            if isinstance(right, interfaces.PropComparator):
+                if onclause is not None:
+                    raise sa_exc.InvalidRequestError(
+                        "No 'on clause' argument may be passed when joining "
+                        "to a relationship path as a target"
+                    )
+
+                onclause = right
+                right = None
+            elif "parententity" in right._annotations:
+                right = right._annotations["parententity"]
+
+            if onclause is None:
+                if not right.is_selectable and not hasattr(right, "mapper"):
+                    raise sa_exc.ArgumentError(
+                        "Expected mapped entity or "
+                        "selectable/table as join target"
+                    )
+
+            of_type = None
+
+            if isinstance(onclause, interfaces.PropComparator):
+                # descriptor/property given (or determined); this tells us
+                # explicitly what the expected "left" side of the join is.
+
+                of_type = getattr(onclause, "_of_type", None)
+
+                if right is None:
+                    if of_type:
+                        right = of_type
+                    else:
+                        right = onclause.property
+
+                        try:
+                            right = right.entity
+                        except AttributeError as err:
+                            raise sa_exc.ArgumentError(
+                                "Join target %s does not refer to a "
+                                "mapped entity" % right
+                            ) from err
+
+                left = onclause._parententity
+
+                prop = onclause.property
+                if not isinstance(onclause, attributes.QueryableAttribute):
+                    onclause = prop
+
+                # check for this path already present.  don't render in that
+                # case.
+                if (left, right, prop.key) in self._already_joined_edges:
+                    continue
+
+                if from_ is not None:
+                    if (
+                        from_ is not left
+                        and from_._annotations.get("parententity", None)
+                        is not left
+                    ):
+                        raise sa_exc.InvalidRequestError(
+                            "explicit from clause %s does not match left side "
+                            "of relationship attribute %s"
+                            % (
+                                from_._annotations.get("parententity", from_),
+                                onclause,
+                            )
+                        )
+            elif from_ is not None:
+                prop = None
+                left = from_
+            else:
+                # no descriptor/property given; we will need to figure out
+                # what the effective "left" side is
+                prop = left = None
+
+            # figure out the final "left" and "right" sides and create an
+            # ORMJoin to add to our _from_obj tuple
+            self._join_left_to_right(
+                entities_collection,
+                left,
+                right,
+                onclause,
+                prop,
+                isouter,
+                full,
+            )
+
+    def _join_left_to_right(
+        self,
+        entities_collection,
+        left,
+        right,
+        onclause,
+        prop,
+        outerjoin,
+        full,
+    ):
+        """given raw "left", "right", "onclause" parameters consumed from
+        a particular key within _join(), add a real ORMJoin object to
+        our _from_obj list (or augment an existing one)
+
+        """
+
+        if left is None:
+            # left not given (e.g. no relationship object/name specified)
+            # figure out the best "left" side based on our existing froms /
+            # entities
+            assert prop is None
+            (
+                left,
+                replace_from_obj_index,
+                use_entity_index,
+            ) = self._join_determine_implicit_left_side(
+                entities_collection, left, right, onclause
+            )
+        else:
+            # left is given via a relationship/name, or as explicit left side.
+            # Determine where in our
+            # "froms" list it should be spliced/appended as well as what
+            # existing entity it corresponds to.
+            (
+                replace_from_obj_index,
+                use_entity_index,
+            ) = self._join_place_explicit_left_side(entities_collection, left)
+
+        if left is right:
+            raise sa_exc.InvalidRequestError(
+                "Can't construct a join from %s to %s, they "
+                "are the same entity" % (left, right)
+            )
+
+        # the right side as given often needs to be adapted.  additionally
+        # a lot of things can be wrong with it.  handle all that and
+        # get back the new effective "right" side
+        r_info, right, onclause = self._join_check_and_adapt_right_side(
+            left, right, onclause, prop
+        )
+
+        if not r_info.is_selectable:
+            extra_criteria = self._get_extra_criteria(r_info)
+        else:
+            extra_criteria = ()
+
+        if replace_from_obj_index is not None:
+            # splice into an existing element in the
+            # self._from_obj list
+            left_clause = self.from_clauses[replace_from_obj_index]
+
+            self.from_clauses = (
+                self.from_clauses[:replace_from_obj_index]
+                + [
+                    _ORMJoin(
+                        left_clause,
+                        right,
+                        onclause,
+                        isouter=outerjoin,
+                        full=full,
+                        _extra_criteria=extra_criteria,
+                    )
+                ]
+                + self.from_clauses[replace_from_obj_index + 1 :]
+            )
+        else:
+            # add a new element to the self._from_obj list
+            if use_entity_index is not None:
+                # make use of _MapperEntity selectable, which is usually
+                # entity_zero.selectable, but if with_polymorphic() were used
+                # might be distinct
+                assert isinstance(
+                    entities_collection[use_entity_index], _MapperEntity
+                )
+                left_clause = entities_collection[use_entity_index].selectable
+            else:
+                left_clause = left
+
+            self.from_clauses = self.from_clauses + [
+                _ORMJoin(
+                    left_clause,
+                    r_info,
+                    onclause,
+                    isouter=outerjoin,
+                    full=full,
+                    _extra_criteria=extra_criteria,
+                )
+            ]
+
+    def _join_determine_implicit_left_side(
+        self, entities_collection, left, right, onclause
+    ):
+        """When join conditions don't express the left side explicitly,
+        determine if an existing FROM or entity in this query
+        can serve as the left hand side.
+
+        """
+
+        # when we are here, it means join() was called without an ORM-
+        # specific way of telling us what the "left" side is, e.g.:
+        #
+        # join(RightEntity)
+        #
+        # or
+        #
+        # join(RightEntity, RightEntity.foo == LeftEntity.bar)
+        #
+
+        r_info = inspect(right)
+
+        replace_from_obj_index = use_entity_index = None
+
+        if self.from_clauses:
+            # we have a list of FROMs already.  So by definition this
+            # join has to connect to one of those FROMs.
+
+            indexes = sql_util.find_left_clause_to_join_from(
+                self.from_clauses, r_info.selectable, onclause
+            )
+
+            if len(indexes) == 1:
+                replace_from_obj_index = indexes[0]
+                left = self.from_clauses[replace_from_obj_index]
+            elif len(indexes) > 1:
+                raise sa_exc.InvalidRequestError(
+                    "Can't determine which FROM clause to join "
+                    "from, there are multiple FROMS which can "
+                    "join to this entity. Please use the .select_from() "
+                    "method to establish an explicit left side, as well as "
+                    "providing an explicit ON clause if not present already "
+                    "to help resolve the ambiguity."
+                )
+            else:
+                raise sa_exc.InvalidRequestError(
+                    "Don't know how to join to %r. "
+                    "Please use the .select_from() "
+                    "method to establish an explicit left side, as well as "
+                    "providing an explicit ON clause if not present already "
+                    "to help resolve the ambiguity." % (right,)
+                )
+
+        elif entities_collection:
+            # we have no explicit FROMs, so the implicit left has to
+            # come from our list of entities.
+
+            potential = {}
+            for entity_index, ent in enumerate(entities_collection):
+                entity = ent.entity_zero_or_selectable
+                if entity is None:
+                    continue
+                ent_info = inspect(entity)
+                if ent_info is r_info:  # left and right are the same, skip
+                    continue
+
+                # by using a dictionary with the selectables as keys this
+                # de-duplicates those selectables as occurs when the query is
+                # against a series of columns from the same selectable
+                if isinstance(ent, _MapperEntity):
+                    potential[ent.selectable] = (entity_index, entity)
+                else:
+                    potential[ent_info.selectable] = (None, entity)
+
+            all_clauses = list(potential.keys())
+            indexes = sql_util.find_left_clause_to_join_from(
+                all_clauses, r_info.selectable, onclause
+            )
+
+            if len(indexes) == 1:
+                use_entity_index, left = potential[all_clauses[indexes[0]]]
+            elif len(indexes) > 1:
+                raise sa_exc.InvalidRequestError(
+                    "Can't determine which FROM clause to join "
+                    "from, there are multiple FROMS which can "
+                    "join to this entity. Please use the .select_from() "
+                    "method to establish an explicit left side, as well as "
+                    "providing an explicit ON clause if not present already "
+                    "to help resolve the ambiguity."
+                )
+            else:
+                raise sa_exc.InvalidRequestError(
+                    "Don't know how to join to %r. "
+                    "Please use the .select_from() "
+                    "method to establish an explicit left side, as well as "
+                    "providing an explicit ON clause if not present already "
+                    "to help resolve the ambiguity." % (right,)
+                )
+        else:
+            raise sa_exc.InvalidRequestError(
+                "No entities to join from; please use "
+                "select_from() to establish the left "
+                "entity/selectable of this join"
+            )
+
+        return left, replace_from_obj_index, use_entity_index
+
+    def _join_place_explicit_left_side(self, entities_collection, left):
+        """When join conditions express a left side explicitly, determine
+        where in our existing list of FROM clauses we should join towards,
+        or if we need to make a new join, and if so is it from one of our
+        existing entities.
+
+        """
+
+        # when we are here, it means join() was called with an indicator
+        # as to an exact left side, which means a path to a
+        # Relationship was given, e.g.:
+        #
+        # join(RightEntity, LeftEntity.right)
+        #
+        # or
+        #
+        # join(LeftEntity.right)
+        #
+        # as well as string forms:
+        #
+        # join(RightEntity, "right")
+        #
+        # etc.
+        #
+
+        replace_from_obj_index = use_entity_index = None
+
+        l_info = inspect(left)
+        if self.from_clauses:
+            indexes = sql_util.find_left_clause_that_matches_given(
+                self.from_clauses, l_info.selectable
+            )
+
+            if len(indexes) > 1:
+                raise sa_exc.InvalidRequestError(
+                    "Can't identify which entity in which to assign the "
+                    "left side of this join.   Please use a more specific "
+                    "ON clause."
+                )
+
+            # have an index, means the left side is already present in
+            # an existing FROM in the self._from_obj tuple
+            if indexes:
+                replace_from_obj_index = indexes[0]
+
+            # no index, means we need to add a new element to the
+            # self._from_obj tuple
+
+        # no from element present, so we will have to add to the
+        # self._from_obj tuple.  Determine if this left side matches up
+        # with existing mapper entities, in which case we want to apply the
+        # aliasing / adaptation rules present on that entity if any
+        if (
+            replace_from_obj_index is None
+            and entities_collection
+            and hasattr(l_info, "mapper")
+        ):
+            for idx, ent in enumerate(entities_collection):
+                # TODO: should we be checking for multiple mapper entities
+                # matching?
+                if isinstance(ent, _MapperEntity) and ent.corresponds_to(left):
+                    use_entity_index = idx
+                    break
+
+        return replace_from_obj_index, use_entity_index
+
+    def _join_check_and_adapt_right_side(self, left, right, onclause, prop):
+        """transform the "right" side of the join as well as the onclause
+        according to polymorphic mapping translations, aliasing on the query
+        or on the join, special cases where the right and left side have
+        overlapping tables.
+
+        """
+
+        l_info = inspect(left)
+        r_info = inspect(right)
+
+        overlap = False
+
+        right_mapper = getattr(r_info, "mapper", None)
+        # if the target is a joined inheritance mapping,
+        # be more liberal about auto-aliasing.
+        if right_mapper and (
+            right_mapper.with_polymorphic
+            or isinstance(right_mapper.persist_selectable, expression.Join)
+        ):
+            for from_obj in self.from_clauses or [l_info.selectable]:
+                if sql_util.selectables_overlap(
+                    l_info.selectable, from_obj
+                ) and sql_util.selectables_overlap(
+                    from_obj, r_info.selectable
+                ):
+                    overlap = True
+                    break
+
+        if overlap and l_info.selectable is r_info.selectable:
+            raise sa_exc.InvalidRequestError(
+                "Can't join table/selectable '%s' to itself"
+                % l_info.selectable
+            )
+
+        right_mapper, right_selectable, right_is_aliased = (
+            getattr(r_info, "mapper", None),
+            r_info.selectable,
+            getattr(r_info, "is_aliased_class", False),
+        )
+
+        if (
+            right_mapper
+            and prop
+            and not right_mapper.common_parent(prop.mapper)
+        ):
+            raise sa_exc.InvalidRequestError(
+                "Join target %s does not correspond to "
+                "the right side of join condition %s" % (right, onclause)
+            )
+
+        # _join_entities is used as a hint for single-table inheritance
+        # purposes at the moment
+        if hasattr(r_info, "mapper"):
+            self._join_entities += (r_info,)
+
+        need_adapter = False
+
+        # test for joining to an unmapped selectable as the target
+        if r_info.is_clause_element:
+            if prop:
+                right_mapper = prop.mapper
+
+            if right_selectable._is_lateral:
+                # orm_only is disabled to suit the case where we have to
+                # adapt an explicit correlate(Entity) - the select() loses
+                # the ORM-ness in this case right now, ideally it would not
+                current_adapter = self._get_current_adapter()
+                if current_adapter is not None:
+                    # TODO: we had orm_only=False here before, removing
+                    # it didn't break things.   if we identify the rationale,
+                    # may need to apply "_orm_only" annotation here.
+                    right = current_adapter(right, True)
+
+            elif prop:
+                # joining to selectable with a mapper property given
+                # as the ON clause
+
+                if not right_selectable.is_derived_from(
+                    right_mapper.persist_selectable
+                ):
+                    raise sa_exc.InvalidRequestError(
+                        "Selectable '%s' is not derived from '%s'"
+                        % (
+                            right_selectable.description,
+                            right_mapper.persist_selectable.description,
+                        )
+                    )
+
+                # if the destination selectable is a plain select(),
+                # turn it into an alias().
+                if isinstance(right_selectable, expression.SelectBase):
+                    right_selectable = coercions.expect(
+                        roles.FromClauseRole, right_selectable
+                    )
+                    need_adapter = True
+
+                # make the right hand side target into an ORM entity
+                right = AliasedClass(right_mapper, right_selectable)
+
+                util.warn_deprecated(
+                    "An alias is being generated automatically against "
+                    "joined entity %s for raw clauseelement, which is "
+                    "deprecated and will be removed in a later release. "
+                    "Use the aliased() "
+                    "construct explicitly, see the linked example."
+                    % right_mapper,
+                    "1.4",
+                    code="xaj1",
+                )
+
+        # test for overlap:
+        # orm/inheritance/relationships.py
+        # SelfReferentialM2MTest
+        aliased_entity = right_mapper and not right_is_aliased and overlap
+
+        if not need_adapter and aliased_entity:
+            # there are a few places in the ORM that automatic aliasing
+            # is still desirable, and can't be automatic with a Core
+            # only approach.  For illustrations of "overlaps" see
+            # test/orm/inheritance/test_relationships.py.  There are also
+            # general overlap cases with many-to-many tables where automatic
+            # aliasing is desirable.
+            right = AliasedClass(right, flat=True)
+            need_adapter = True
+
+            util.warn(
+                "An alias is being generated automatically against "
+                "joined entity %s due to overlapping tables.  This is a "
+                "legacy pattern which may be "
+                "deprecated in a later release.  Use the "
+                "aliased(<entity>, flat=True) "
+                "construct explicitly, see the linked example." % right_mapper,
+                code="xaj2",
+            )
+
+        if need_adapter:
+            # if need_adapter is True, we are in a deprecated case and
+            # a warning has been emitted.
+            assert right_mapper
+
+            adapter = ORMAdapter(
+                _TraceAdaptRole.DEPRECATED_JOIN_ADAPT_RIGHT_SIDE,
+                inspect(right),
+                equivalents=right_mapper._equivalent_columns,
+            )
+
+            # if an alias() on the right side was generated,
+            # which is intended to wrap a the right side in a subquery,
+            # ensure that columns retrieved from this target in the result
+            # set are also adapted.
+            self._mapper_loads_polymorphically_with(right_mapper, adapter)
+        elif (
+            not r_info.is_clause_element
+            and not right_is_aliased
+            and right_mapper._has_aliased_polymorphic_fromclause
+        ):
+            # for the case where the target mapper has a with_polymorphic
+            # set up, ensure an adapter is set up for criteria that works
+            # against this mapper.  Previously, this logic used to
+            # use the "create_aliases or aliased_entity" case to generate
+            # an aliased() object, but this creates an alias that isn't
+            # strictly necessary.
+            # see test/orm/test_core_compilation.py
+            # ::RelNaturalAliasedJoinsTest::test_straight
+            # and similar
+            self._mapper_loads_polymorphically_with(
+                right_mapper,
+                ORMAdapter(
+                    _TraceAdaptRole.WITH_POLYMORPHIC_ADAPTER_RIGHT_JOIN,
+                    right_mapper,
+                    selectable=right_mapper.selectable,
+                    equivalents=right_mapper._equivalent_columns,
+                ),
+            )
+        # if the onclause is a ClauseElement, adapt it with any
+        # adapters that are in place right now
+        if isinstance(onclause, expression.ClauseElement):
+            current_adapter = self._get_current_adapter()
+            if current_adapter:
+                onclause = current_adapter(onclause, True)
+
+        # if joining on a MapperProperty path,
+        # track the path to prevent redundant joins
+        if prop:
+            self._already_joined_edges += ((left, right, prop.key),)
+
+        return inspect(right), right, onclause
+
+    @property
+    def _select_args(self):
+        return {
+            "limit_clause": self.select_statement._limit_clause,
+            "offset_clause": self.select_statement._offset_clause,
+            "distinct": self.distinct,
+            "distinct_on": self.distinct_on,
+            "prefixes": self.select_statement._prefixes,
+            "suffixes": self.select_statement._suffixes,
+            "group_by": self.group_by or None,
+            "fetch_clause": self.select_statement._fetch_clause,
+            "fetch_clause_options": (
+                self.select_statement._fetch_clause_options
+            ),
+            "independent_ctes": self.select_statement._independent_ctes,
+            "independent_ctes_opts": (
+                self.select_statement._independent_ctes_opts
+            ),
+        }
+
+    @property
+    def _should_nest_selectable(self):
+        kwargs = self._select_args
+        return (
+            kwargs.get("limit_clause") is not None
+            or kwargs.get("offset_clause") is not None
+            or kwargs.get("distinct", False)
+            or kwargs.get("distinct_on", ())
+            or kwargs.get("group_by", False)
+        )
+
+    def _get_extra_criteria(self, ext_info):
+        if (
+            "additional_entity_criteria",
+            ext_info.mapper,
+        ) in self.global_attributes:
+            return tuple(
+                ae._resolve_where_criteria(ext_info)
+                for ae in self.global_attributes[
+                    ("additional_entity_criteria", ext_info.mapper)
+                ]
+                if (ae.include_aliases or ae.entity is ext_info)
+                and ae._should_include(self)
+            )
+        else:
+            return ()
+
+    def _adjust_for_extra_criteria(self):
+        """Apply extra criteria filtering.
+
+        For all distinct single-table-inheritance mappers represented in
+        the columns clause of this query, as well as the "select from entity",
+        add criterion to the WHERE
+        clause of the given QueryContext such that only the appropriate
+        subtypes are selected from the total results.
+
+        Additionally, add WHERE criteria originating from LoaderCriteriaOptions
+        associated with the global context.
+
+        """
+
+        for fromclause in self.from_clauses:
+            ext_info = fromclause._annotations.get("parententity", None)
+
+            if (
+                ext_info
+                and (
+                    ext_info.mapper._single_table_criterion is not None
+                    or ("additional_entity_criteria", ext_info.mapper)
+                    in self.global_attributes
+                )
+                and ext_info not in self.extra_criteria_entities
+            ):
+                self.extra_criteria_entities[ext_info] = (
+                    ext_info,
+                    ext_info._adapter if ext_info.is_aliased_class else None,
+                )
+
+        search = set(self.extra_criteria_entities.values())
+
+        for ext_info, adapter in search:
+            if ext_info in self._join_entities:
+                continue
+
+            single_crit = ext_info.mapper._single_table_criterion
+
+            if self.compile_options._for_refresh_state:
+                additional_entity_criteria = []
+            else:
+                additional_entity_criteria = self._get_extra_criteria(ext_info)
+
+            if single_crit is not None:
+                additional_entity_criteria += (single_crit,)
+
+            current_adapter = self._get_current_adapter()
+            for crit in additional_entity_criteria:
+                if adapter:
+                    crit = adapter.traverse(crit)
+
+                if current_adapter:
+                    crit = sql_util._deep_annotate(crit, {"_orm_adapt": True})
+                    crit = current_adapter(crit, False)
+                self._where_criteria += (crit,)
+
+
+def _column_descriptions(
+    query_or_select_stmt: Union[Query, Select, FromStatement],
+    compile_state: Optional[ORMSelectCompileState] = None,
+    legacy: bool = False,
+) -> List[ORMColumnDescription]:
+    if compile_state is None:
+        compile_state = ORMSelectCompileState._create_entities_collection(
+            query_or_select_stmt, legacy=legacy
+        )
+    ctx = compile_state
+    d = [
+        {
+            "name": ent._label_name,
+            "type": ent.type,
+            "aliased": getattr(insp_ent, "is_aliased_class", False),
+            "expr": ent.expr,
+            "entity": (
+                getattr(insp_ent, "entity", None)
+                if ent.entity_zero is not None
+                and not insp_ent.is_clause_element
+                else None
+            ),
+        }
+        for ent, insp_ent in [
+            (_ent, _ent.entity_zero) for _ent in ctx._entities
+        ]
+    ]
+    return d
+
+
+def _legacy_filter_by_entity_zero(
+    query_or_augmented_select: Union[Query[Any], Select[Any]]
+) -> Optional[_InternalEntityType[Any]]:
+    self = query_or_augmented_select
+    if self._setup_joins:
+        _last_joined_entity = self._last_joined_entity
+        if _last_joined_entity is not None:
+            return _last_joined_entity
+
+    if self._from_obj and "parententity" in self._from_obj[0]._annotations:
+        return self._from_obj[0]._annotations["parententity"]
+
+    return _entity_from_pre_ent_zero(self)
+
+
+def _entity_from_pre_ent_zero(
+    query_or_augmented_select: Union[Query[Any], Select[Any]]
+) -> Optional[_InternalEntityType[Any]]:
+    self = query_or_augmented_select
+    if not self._raw_columns:
+        return None
+
+    ent = self._raw_columns[0]
+
+    if "parententity" in ent._annotations:
+        return ent._annotations["parententity"]
+    elif isinstance(ent, ORMColumnsClauseRole):
+        return ent.entity
+    elif "bundle" in ent._annotations:
+        return ent._annotations["bundle"]
+    else:
+        return ent
+
+
+def _determine_last_joined_entity(
+    setup_joins: Tuple[_SetupJoinsElement, ...],
+    entity_zero: Optional[_InternalEntityType[Any]] = None,
+) -> Optional[Union[_InternalEntityType[Any], _JoinTargetElement]]:
+    if not setup_joins:
+        return None
+
+    (target, onclause, from_, flags) = setup_joins[-1]
+
+    if isinstance(
+        target,
+        attributes.QueryableAttribute,
+    ):
+        return target.entity
+    else:
+        return target
+
+
+class _QueryEntity:
+    """represent an entity column returned within a Query result."""
+
+    __slots__ = ()
+
+    supports_single_entity: bool
+
+    _non_hashable_value = False
+    _null_column_type = False
+    use_id_for_hash = False
+
+    _label_name: Optional[str]
+    type: Union[Type[Any], TypeEngine[Any]]
+    expr: Union[_InternalEntityType, ColumnElement[Any]]
+    entity_zero: Optional[_InternalEntityType]
+
+    def setup_compile_state(self, compile_state: ORMCompileState) -> None:
+        raise NotImplementedError()
+
+    def setup_dml_returning_compile_state(
+        self,
+        compile_state: ORMCompileState,
+        adapter: Optional[_DMLReturningColFilter],
+    ) -> None:
+        raise NotImplementedError()
+
+    def row_processor(self, context, result):
+        raise NotImplementedError()
+
+    @classmethod
+    def to_compile_state(
+        cls, compile_state, entities, entities_collection, is_current_entities
+    ):
+        for idx, entity in enumerate(entities):
+            if entity._is_lambda_element:
+                if entity._is_sequence:
+                    cls.to_compile_state(
+                        compile_state,
+                        entity._resolved,
+                        entities_collection,
+                        is_current_entities,
+                    )
+                    continue
+                else:
+                    entity = entity._resolved
+
+            if entity.is_clause_element:
+                if entity.is_selectable:
+                    if "parententity" in entity._annotations:
+                        _MapperEntity(
+                            compile_state,
+                            entity,
+                            entities_collection,
+                            is_current_entities,
+                        )
+                    else:
+                        _ColumnEntity._for_columns(
+                            compile_state,
+                            entity._select_iterable,
+                            entities_collection,
+                            idx,
+                            is_current_entities,
+                        )
+                else:
+                    if entity._annotations.get("bundle", False):
+                        _BundleEntity(
+                            compile_state,
+                            entity,
+                            entities_collection,
+                            is_current_entities,
+                        )
+                    elif entity._is_clause_list:
+                        # this is legacy only - test_composites.py
+                        # test_query_cols_legacy
+                        _ColumnEntity._for_columns(
+                            compile_state,
+                            entity._select_iterable,
+                            entities_collection,
+                            idx,
+                            is_current_entities,
+                        )
+                    else:
+                        _ColumnEntity._for_columns(
+                            compile_state,
+                            [entity],
+                            entities_collection,
+                            idx,
+                            is_current_entities,
+                        )
+            elif entity.is_bundle:
+                _BundleEntity(compile_state, entity, entities_collection)
+
+        return entities_collection
+
+
+class _MapperEntity(_QueryEntity):
+    """mapper/class/AliasedClass entity"""
+
+    __slots__ = (
+        "expr",
+        "mapper",
+        "entity_zero",
+        "is_aliased_class",
+        "path",
+        "_extra_entities",
+        "_label_name",
+        "_with_polymorphic_mappers",
+        "selectable",
+        "_polymorphic_discriminator",
+    )
+
+    expr: _InternalEntityType
+    mapper: Mapper[Any]
+    entity_zero: _InternalEntityType
+    is_aliased_class: bool
+    path: PathRegistry
+    _label_name: str
+
+    def __init__(
+        self, compile_state, entity, entities_collection, is_current_entities
+    ):
+        entities_collection.append(self)
+        if is_current_entities:
+            if compile_state._primary_entity is None:
+                compile_state._primary_entity = self
+            compile_state._has_mapper_entities = True
+            compile_state._has_orm_entities = True
+
+        entity = entity._annotations["parententity"]
+        entity._post_inspect
+        ext_info = self.entity_zero = entity
+        entity = ext_info.entity
+
+        self.expr = entity
+        self.mapper = mapper = ext_info.mapper
+
+        self._extra_entities = (self.expr,)
+
+        if ext_info.is_aliased_class:
+            self._label_name = ext_info.name
+        else:
+            self._label_name = mapper.class_.__name__
+
+        self.is_aliased_class = ext_info.is_aliased_class
+        self.path = ext_info._path_registry
+
+        self.selectable = ext_info.selectable
+        self._with_polymorphic_mappers = ext_info.with_polymorphic_mappers
+        self._polymorphic_discriminator = ext_info.polymorphic_on
+
+        if mapper._should_select_with_poly_adapter:
+            compile_state._create_with_polymorphic_adapter(
+                ext_info, self.selectable
+            )
+
+    supports_single_entity = True
+
+    _non_hashable_value = True
+    use_id_for_hash = True
+
+    @property
+    def type(self):
+        return self.mapper.class_
+
+    @property
+    def entity_zero_or_selectable(self):
+        return self.entity_zero
+
+    def corresponds_to(self, entity):
+        return _entity_corresponds_to(self.entity_zero, entity)
+
+    def _get_entity_clauses(self, compile_state):
+        adapter = None
+
+        if not self.is_aliased_class:
+            if compile_state._polymorphic_adapters:
+                adapter = compile_state._polymorphic_adapters.get(
+                    self.mapper, None
+                )
+        else:
+            adapter = self.entity_zero._adapter
+
+        if adapter:
+            if compile_state._from_obj_alias:
+                ret = adapter.wrap(compile_state._from_obj_alias)
+            else:
+                ret = adapter
+        else:
+            ret = compile_state._from_obj_alias
+
+        return ret
+
+    def row_processor(self, context, result):
+        compile_state = context.compile_state
+        adapter = self._get_entity_clauses(compile_state)
+
+        if compile_state.compound_eager_adapter and adapter:
+            adapter = adapter.wrap(compile_state.compound_eager_adapter)
+        elif not adapter:
+            adapter = compile_state.compound_eager_adapter
+
+        if compile_state._primary_entity is self:
+            only_load_props = compile_state.compile_options._only_load_props
+            refresh_state = context.refresh_state
+        else:
+            only_load_props = refresh_state = None
+
+        _instance = loading._instance_processor(
+            self,
+            self.mapper,
+            context,
+            result,
+            self.path,
+            adapter,
+            only_load_props=only_load_props,
+            refresh_state=refresh_state,
+            polymorphic_discriminator=self._polymorphic_discriminator,
+        )
+
+        return _instance, self._label_name, self._extra_entities
+
+    def setup_dml_returning_compile_state(
+        self,
+        compile_state: ORMCompileState,
+        adapter: Optional[_DMLReturningColFilter],
+    ) -> None:
+        loading._setup_entity_query(
+            compile_state,
+            self.mapper,
+            self,
+            self.path,
+            adapter,
+            compile_state.primary_columns,
+            with_polymorphic=self._with_polymorphic_mappers,
+            only_load_props=compile_state.compile_options._only_load_props,
+            polymorphic_discriminator=self._polymorphic_discriminator,
+        )
+
+    def setup_compile_state(self, compile_state):
+        adapter = self._get_entity_clauses(compile_state)
+
+        single_table_crit = self.mapper._single_table_criterion
+        if (
+            single_table_crit is not None
+            or ("additional_entity_criteria", self.mapper)
+            in compile_state.global_attributes
+        ):
+            ext_info = self.entity_zero
+            compile_state.extra_criteria_entities[ext_info] = (
+                ext_info,
+                ext_info._adapter if ext_info.is_aliased_class else None,
+            )
+
+        loading._setup_entity_query(
+            compile_state,
+            self.mapper,
+            self,
+            self.path,
+            adapter,
+            compile_state.primary_columns,
+            with_polymorphic=self._with_polymorphic_mappers,
+            only_load_props=compile_state.compile_options._only_load_props,
+            polymorphic_discriminator=self._polymorphic_discriminator,
+        )
+        compile_state._fallback_from_clauses.append(self.selectable)
+
+
+class _BundleEntity(_QueryEntity):
+    _extra_entities = ()
+
+    __slots__ = (
+        "bundle",
+        "expr",
+        "type",
+        "_label_name",
+        "_entities",
+        "supports_single_entity",
+    )
+
+    _entities: List[_QueryEntity]
+    bundle: Bundle
+    type: Type[Any]
+    _label_name: str
+    supports_single_entity: bool
+    expr: Bundle
+
+    def __init__(
+        self,
+        compile_state,
+        expr,
+        entities_collection,
+        is_current_entities,
+        setup_entities=True,
+        parent_bundle=None,
+    ):
+        compile_state._has_orm_entities = True
+
+        expr = expr._annotations["bundle"]
+        if parent_bundle:
+            parent_bundle._entities.append(self)
+        else:
+            entities_collection.append(self)
+
+        if isinstance(
+            expr, (attributes.QueryableAttribute, interfaces.PropComparator)
+        ):
+            bundle = expr.__clause_element__()
+        else:
+            bundle = expr
+
+        self.bundle = self.expr = bundle
+        self.type = type(bundle)
+        self._label_name = bundle.name
+        self._entities = []
+
+        if setup_entities:
+            for expr in bundle.exprs:
+                if "bundle" in expr._annotations:
+                    _BundleEntity(
+                        compile_state,
+                        expr,
+                        entities_collection,
+                        is_current_entities,
+                        parent_bundle=self,
+                    )
+                elif isinstance(expr, Bundle):
+                    _BundleEntity(
+                        compile_state,
+                        expr,
+                        entities_collection,
+                        is_current_entities,
+                        parent_bundle=self,
+                    )
+                else:
+                    _ORMColumnEntity._for_columns(
+                        compile_state,
+                        [expr],
+                        entities_collection,
+                        None,
+                        is_current_entities,
+                        parent_bundle=self,
+                    )
+
+        self.supports_single_entity = self.bundle.single_entity
+
+    @property
+    def mapper(self):
+        ezero = self.entity_zero
+        if ezero is not None:
+            return ezero.mapper
+        else:
+            return None
+
+    @property
+    def entity_zero(self):
+        for ent in self._entities:
+            ezero = ent.entity_zero
+            if ezero is not None:
+                return ezero
+        else:
+            return None
+
+    def corresponds_to(self, entity):
+        # TODO: we might be able to implement this but for now
+        # we are working around it
+        return False
+
+    @property
+    def entity_zero_or_selectable(self):
+        for ent in self._entities:
+            ezero = ent.entity_zero_or_selectable
+            if ezero is not None:
+                return ezero
+        else:
+            return None
+
+    def setup_compile_state(self, compile_state):
+        for ent in self._entities:
+            ent.setup_compile_state(compile_state)
+
+    def setup_dml_returning_compile_state(
+        self,
+        compile_state: ORMCompileState,
+        adapter: Optional[_DMLReturningColFilter],
+    ) -> None:
+        return self.setup_compile_state(compile_state)
+
+    def row_processor(self, context, result):
+        procs, labels, extra = zip(
+            *[ent.row_processor(context, result) for ent in self._entities]
+        )
+
+        proc = self.bundle.create_row_processor(context.query, procs, labels)
+
+        return proc, self._label_name, self._extra_entities
+
+
+class _ColumnEntity(_QueryEntity):
+    __slots__ = (
+        "_fetch_column",
+        "_row_processor",
+        "raw_column_index",
+        "translate_raw_column",
+    )
+
+    @classmethod
+    def _for_columns(
+        cls,
+        compile_state,
+        columns,
+        entities_collection,
+        raw_column_index,
+        is_current_entities,
+        parent_bundle=None,
+    ):
+        for column in columns:
+            annotations = column._annotations
+            if "parententity" in annotations:
+                _entity = annotations["parententity"]
+            else:
+                _entity = sql_util.extract_first_column_annotation(
+                    column, "parententity"
+                )
+
+            if _entity:
+                if "identity_token" in column._annotations:
+                    _IdentityTokenEntity(
+                        compile_state,
+                        column,
+                        entities_collection,
+                        _entity,
+                        raw_column_index,
+                        is_current_entities,
+                        parent_bundle=parent_bundle,
+                    )
+                else:
+                    _ORMColumnEntity(
+                        compile_state,
+                        column,
+                        entities_collection,
+                        _entity,
+                        raw_column_index,
+                        is_current_entities,
+                        parent_bundle=parent_bundle,
+                    )
+            else:
+                _RawColumnEntity(
+                    compile_state,
+                    column,
+                    entities_collection,
+                    raw_column_index,
+                    is_current_entities,
+                    parent_bundle=parent_bundle,
+                )
+
+    @property
+    def type(self):
+        return self.column.type
+
+    @property
+    def _non_hashable_value(self):
+        return not self.column.type.hashable
+
+    @property
+    def _null_column_type(self):
+        return self.column.type._isnull
+
+    def row_processor(self, context, result):
+        compile_state = context.compile_state
+
+        # the resulting callable is entirely cacheable so just return
+        # it if we already made one
+        if self._row_processor is not None:
+            getter, label_name, extra_entities = self._row_processor
+            if self.translate_raw_column:
+                extra_entities += (
+                    context.query._raw_columns[self.raw_column_index],
+                )
+
+            return getter, label_name, extra_entities
+
+        # retrieve the column that would have been set up in
+        # setup_compile_state, to avoid doing redundant work
+        if self._fetch_column is not None:
+            column = self._fetch_column
+        else:
+            # fetch_column will be None when we are doing a from_statement
+            # and setup_compile_state may not have been called.
+            column = self.column
+
+            # previously, the RawColumnEntity didn't look for from_obj_alias
+            # however I can't think of a case where we would be here and
+            # we'd want to ignore it if this is the from_statement use case.
+            # it's not really a use case to have raw columns + from_statement
+            if compile_state._from_obj_alias:
+                column = compile_state._from_obj_alias.columns[column]
+
+            if column._annotations:
+                # annotated columns perform more slowly in compiler and
+                # result due to the __eq__() method, so use deannotated
+                column = column._deannotate()
+
+        if compile_state.compound_eager_adapter:
+            column = compile_state.compound_eager_adapter.columns[column]
+
+        getter = result._getter(column)
+        ret = getter, self._label_name, self._extra_entities
+        self._row_processor = ret
+
+        if self.translate_raw_column:
+            extra_entities = self._extra_entities + (
+                context.query._raw_columns[self.raw_column_index],
+            )
+            return getter, self._label_name, extra_entities
+        else:
+            return ret
+
+
+class _RawColumnEntity(_ColumnEntity):
+    entity_zero = None
+    mapper = None
+    supports_single_entity = False
+
+    __slots__ = (
+        "expr",
+        "column",
+        "_label_name",
+        "entity_zero_or_selectable",
+        "_extra_entities",
+    )
+
+    def __init__(
+        self,
+        compile_state,
+        column,
+        entities_collection,
+        raw_column_index,
+        is_current_entities,
+        parent_bundle=None,
+    ):
+        self.expr = column
+        self.raw_column_index = raw_column_index
+        self.translate_raw_column = raw_column_index is not None
+
+        if column._is_star:
+            compile_state.compile_options += {"_is_star": True}
+
+        if not is_current_entities or column._is_text_clause:
+            self._label_name = None
+        else:
+            if parent_bundle:
+                self._label_name = column._proxy_key
+            else:
+                self._label_name = compile_state._label_convention(column)
+
+        if parent_bundle:
+            parent_bundle._entities.append(self)
+        else:
+            entities_collection.append(self)
+
+        self.column = column
+        self.entity_zero_or_selectable = (
+            self.column._from_objects[0] if self.column._from_objects else None
+        )
+        self._extra_entities = (self.expr, self.column)
+        self._fetch_column = self._row_processor = None
+
+    def corresponds_to(self, entity):
+        return False
+
+    def setup_dml_returning_compile_state(
+        self,
+        compile_state: ORMCompileState,
+        adapter: Optional[_DMLReturningColFilter],
+    ) -> None:
+        return self.setup_compile_state(compile_state)
+
+    def setup_compile_state(self, compile_state):
+        current_adapter = compile_state._get_current_adapter()
+        if current_adapter:
+            column = current_adapter(self.column, False)
+            if column is None:
+                return
+        else:
+            column = self.column
+
+        if column._annotations:
+            # annotated columns perform more slowly in compiler and
+            # result due to the __eq__() method, so use deannotated
+            column = column._deannotate()
+
+        compile_state.dedupe_columns.add(column)
+        compile_state.primary_columns.append(column)
+        self._fetch_column = column
+
+
+class _ORMColumnEntity(_ColumnEntity):
+    """Column/expression based entity."""
+
+    supports_single_entity = False
+
+    __slots__ = (
+        "expr",
+        "mapper",
+        "column",
+        "_label_name",
+        "entity_zero_or_selectable",
+        "entity_zero",
+        "_extra_entities",
+    )
+
+    def __init__(
+        self,
+        compile_state,
+        column,
+        entities_collection,
+        parententity,
+        raw_column_index,
+        is_current_entities,
+        parent_bundle=None,
+    ):
+        annotations = column._annotations
+
+        _entity = parententity
+
+        # an AliasedClass won't have proxy_key in the annotations for
+        # a column if it was acquired using the class' adapter directly,
+        # such as using AliasedInsp._adapt_element().  this occurs
+        # within internal loaders.
+
+        orm_key = annotations.get("proxy_key", None)
+        proxy_owner = annotations.get("proxy_owner", _entity)
+        if orm_key:
+            self.expr = getattr(proxy_owner.entity, orm_key)
+            self.translate_raw_column = False
+        else:
+            # if orm_key is not present, that means this is an ad-hoc
+            # SQL ColumnElement, like a CASE() or other expression.
+            # include this column position from the invoked statement
+            # in the ORM-level ResultSetMetaData on each execute, so that
+            # it can be targeted by identity after caching
+            self.expr = column
+            self.translate_raw_column = raw_column_index is not None
+
+        self.raw_column_index = raw_column_index
+
+        if is_current_entities:
+            if parent_bundle:
+                self._label_name = orm_key if orm_key else column._proxy_key
+            else:
+                self._label_name = compile_state._label_convention(
+                    column, col_name=orm_key
+                )
+        else:
+            self._label_name = None
+
+        _entity._post_inspect
+        self.entity_zero = self.entity_zero_or_selectable = ezero = _entity
+        self.mapper = mapper = _entity.mapper
+
+        if parent_bundle:
+            parent_bundle._entities.append(self)
+        else:
+            entities_collection.append(self)
+
+        compile_state._has_orm_entities = True
+
+        self.column = column
+
+        self._fetch_column = self._row_processor = None
+
+        self._extra_entities = (self.expr, self.column)
+
+        if mapper._should_select_with_poly_adapter:
+            compile_state._create_with_polymorphic_adapter(
+                ezero, ezero.selectable
+            )
+
+    def corresponds_to(self, entity):
+        if _is_aliased_class(entity):
+            # TODO: polymorphic subclasses ?
+            return entity is self.entity_zero
+        else:
+            return not _is_aliased_class(
+                self.entity_zero
+            ) and entity.common_parent(self.entity_zero)
+
+    def setup_dml_returning_compile_state(
+        self,
+        compile_state: ORMCompileState,
+        adapter: Optional[_DMLReturningColFilter],
+    ) -> None:
+
+        self._fetch_column = column = self.column
+        if adapter:
+            column = adapter(column, False)
+
+        if column is not None:
+            compile_state.dedupe_columns.add(column)
+            compile_state.primary_columns.append(column)
+
+    def setup_compile_state(self, compile_state):
+        current_adapter = compile_state._get_current_adapter()
+        if current_adapter:
+            column = current_adapter(self.column, False)
+            if column is None:
+                assert compile_state.is_dml_returning
+                self._fetch_column = self.column
+                return
+        else:
+            column = self.column
+
+        ezero = self.entity_zero
+
+        single_table_crit = self.mapper._single_table_criterion
+        if (
+            single_table_crit is not None
+            or ("additional_entity_criteria", self.mapper)
+            in compile_state.global_attributes
+        ):
+            compile_state.extra_criteria_entities[ezero] = (
+                ezero,
+                ezero._adapter if ezero.is_aliased_class else None,
+            )
+
+        if column._annotations and not column._expression_label:
+            # annotated columns perform more slowly in compiler and
+            # result due to the __eq__() method, so use deannotated
+            column = column._deannotate()
+
+        # use entity_zero as the from if we have it. this is necessary
+        # for polymorphic scenarios where our FROM is based on ORM entity,
+        # not the FROM of the column.  but also, don't use it if our column
+        # doesn't actually have any FROMs that line up, such as when its
+        # a scalar subquery.
+        if set(self.column._from_objects).intersection(
+            ezero.selectable._from_objects
+        ):
+            compile_state._fallback_from_clauses.append(ezero.selectable)
+
+        compile_state.dedupe_columns.add(column)
+        compile_state.primary_columns.append(column)
+        self._fetch_column = column
+
+
+class _IdentityTokenEntity(_ORMColumnEntity):
+    translate_raw_column = False
+
+    def setup_compile_state(self, compile_state):
+        pass
+
+    def row_processor(self, context, result):
+        def getter(row):
+            return context.load_options._identity_token
+
+        return getter, self._label_name, self._extra_entities
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py
new file mode 100644
index 00000000..c32851de
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_api.py
@@ -0,0 +1,1917 @@
+# orm/decl_api.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Public API functions and helpers for declarative."""
+
+from __future__ import annotations
+
+import itertools
+import re
+import typing
+from typing import Any
+from typing import Callable
+from typing import ClassVar
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import clsregistry
+from . import instrumentation
+from . import interfaces
+from . import mapperlib
+from ._orm_constructors import composite
+from ._orm_constructors import deferred
+from ._orm_constructors import mapped_column
+from ._orm_constructors import relationship
+from ._orm_constructors import synonym
+from .attributes import InstrumentedAttribute
+from .base import _inspect_mapped_class
+from .base import _is_mapped_class
+from .base import Mapped
+from .base import ORMDescriptor
+from .decl_base import _add_attribute
+from .decl_base import _as_declarative
+from .decl_base import _ClassScanMapperConfig
+from .decl_base import _declarative_constructor
+from .decl_base import _DeferredMapperConfig
+from .decl_base import _del_attribute
+from .decl_base import _mapper
+from .descriptor_props import Composite
+from .descriptor_props import Synonym
+from .descriptor_props import Synonym as _orm_synonym
+from .mapper import Mapper
+from .properties import MappedColumn
+from .relationships import RelationshipProperty
+from .state import InstanceState
+from .. import exc
+from .. import inspection
+from .. import util
+from ..sql import sqltypes
+from ..sql.base import _NoArg
+from ..sql.elements import SQLCoreOperations
+from ..sql.schema import MetaData
+from ..sql.selectable import FromClause
+from ..util import hybridmethod
+from ..util import hybridproperty
+from ..util import typing as compat_typing
+from ..util import warn_deprecated
+from ..util.typing import CallableReference
+from ..util.typing import de_optionalize_union_types
+from ..util.typing import flatten_newtype
+from ..util.typing import is_generic
+from ..util.typing import is_literal
+from ..util.typing import is_newtype
+from ..util.typing import is_pep695
+from ..util.typing import Literal
+from ..util.typing import LITERAL_TYPES
+from ..util.typing import Self
+
+if TYPE_CHECKING:
+    from ._typing import _O
+    from ._typing import _RegistryType
+    from .decl_base import _DataclassArguments
+    from .instrumentation import ClassManager
+    from .interfaces import MapperProperty
+    from .state import InstanceState  # noqa
+    from ..sql._typing import _TypeEngineArgument
+    from ..sql.type_api import _MatchedOnType
+
+_T = TypeVar("_T", bound=Any)
+
+_TT = TypeVar("_TT", bound=Any)
+
+# it's not clear how to have Annotated, Union objects etc. as keys here
+# from a typing perspective so just leave it open ended for now
+_TypeAnnotationMapType = Mapping[Any, "_TypeEngineArgument[Any]"]
+_MutableTypeAnnotationMapType = Dict[Any, "_TypeEngineArgument[Any]"]
+
+_DeclaredAttrDecorated = Callable[
+    ..., Union[Mapped[_T], ORMDescriptor[_T], SQLCoreOperations[_T]]
+]
+
+
+def has_inherited_table(cls: Type[_O]) -> bool:
+    """Given a class, return True if any of the classes it inherits from has a
+    mapped table, otherwise return False.
+
+    This is used in declarative mixins to build attributes that behave
+    differently for the base class vs. a subclass in an inheritance
+    hierarchy.
+
+    .. seealso::
+
+        :ref:`decl_mixin_inheritance`
+
+    """
+    for class_ in cls.__mro__[1:]:
+        if getattr(class_, "__table__", None) is not None:
+            return True
+    return False
+
+
+class _DynamicAttributesType(type):
+    def __setattr__(cls, key: str, value: Any) -> None:
+        if "__mapper__" in cls.__dict__:
+            _add_attribute(cls, key, value)
+        else:
+            type.__setattr__(cls, key, value)
+
+    def __delattr__(cls, key: str) -> None:
+        if "__mapper__" in cls.__dict__:
+            _del_attribute(cls, key)
+        else:
+            type.__delattr__(cls, key)
+
+
+class DeclarativeAttributeIntercept(
+    _DynamicAttributesType,
+    # Inspectable is used only by the mypy plugin
+    inspection.Inspectable[Mapper[Any]],
+):
+    """Metaclass that may be used in conjunction with the
+    :class:`_orm.DeclarativeBase` class to support addition of class
+    attributes dynamically.
+
+    """
+
+
+@compat_typing.dataclass_transform(
+    field_specifiers=(
+        MappedColumn,
+        RelationshipProperty,
+        Composite,
+        Synonym,
+        mapped_column,
+        relationship,
+        composite,
+        synonym,
+        deferred,
+    ),
+)
+class DCTransformDeclarative(DeclarativeAttributeIntercept):
+    """metaclass that includes @dataclass_transforms"""
+
+
+class DeclarativeMeta(DeclarativeAttributeIntercept):
+    metadata: MetaData
+    registry: RegistryType
+
+    def __init__(
+        cls, classname: Any, bases: Any, dict_: Any, **kw: Any
+    ) -> None:
+        # use cls.__dict__, which can be modified by an
+        # __init_subclass__() method (#7900)
+        dict_ = cls.__dict__
+
+        # early-consume registry from the initial declarative base,
+        # assign privately to not conflict with subclass attributes named
+        # "registry"
+        reg = getattr(cls, "_sa_registry", None)
+        if reg is None:
+            reg = dict_.get("registry", None)
+            if not isinstance(reg, registry):
+                raise exc.InvalidRequestError(
+                    "Declarative base class has no 'registry' attribute, "
+                    "or registry is not a sqlalchemy.orm.registry() object"
+                )
+            else:
+                cls._sa_registry = reg
+
+        if not cls.__dict__.get("__abstract__", False):
+            _as_declarative(reg, cls, dict_)
+        type.__init__(cls, classname, bases, dict_)
+
+
+def synonym_for(
+    name: str, map_column: bool = False
+) -> Callable[[Callable[..., Any]], Synonym[Any]]:
+    """Decorator that produces an :func:`_orm.synonym`
+    attribute in conjunction with a Python descriptor.
+
+    The function being decorated is passed to :func:`_orm.synonym` as the
+    :paramref:`.orm.synonym.descriptor` parameter::
+
+        class MyClass(Base):
+            __tablename__ = "my_table"
+
+            id = Column(Integer, primary_key=True)
+            _job_status = Column("job_status", String(50))
+
+            @synonym_for("job_status")
+            @property
+            def job_status(self):
+                return "Status: %s" % self._job_status
+
+    The :ref:`hybrid properties <mapper_hybrids>` feature of SQLAlchemy
+    is typically preferred instead of synonyms, which is a more legacy
+    feature.
+
+    .. seealso::
+
+        :ref:`synonyms` - Overview of synonyms
+
+        :func:`_orm.synonym` - the mapper-level function
+
+        :ref:`mapper_hybrids` - The Hybrid Attribute extension provides an
+        updated approach to augmenting attribute behavior more flexibly than
+        can be achieved with synonyms.
+
+    """
+
+    def decorate(fn: Callable[..., Any]) -> Synonym[Any]:
+        return _orm_synonym(name, map_column=map_column, descriptor=fn)
+
+    return decorate
+
+
+class _declared_attr_common:
+    def __init__(
+        self,
+        fn: Callable[..., Any],
+        cascading: bool = False,
+        quiet: bool = False,
+    ):
+        # suppport
+        # @declared_attr
+        # @classmethod
+        # def foo(cls) -> Mapped[thing]:
+        #    ...
+        # which seems to help typing tools interpret the fn as a classmethod
+        # for situations where needed
+        if isinstance(fn, classmethod):
+            fn = fn.__func__
+
+        self.fget = fn
+        self._cascading = cascading
+        self._quiet = quiet
+        self.__doc__ = fn.__doc__
+
+    def _collect_return_annotation(self) -> Optional[Type[Any]]:
+        return util.get_annotations(self.fget).get("return")
+
+    def __get__(self, instance: Optional[object], owner: Any) -> Any:
+        # the declared_attr needs to make use of a cache that exists
+        # for the span of the declarative scan_attributes() phase.
+        # to achieve this we look at the class manager that's configured.
+
+        # note this method should not be called outside of the declarative
+        # setup phase
+
+        cls = owner
+        manager = attributes.opt_manager_of_class(cls)
+        if manager is None:
+            if not re.match(r"^__.+__$", self.fget.__name__):
+                # if there is no manager at all, then this class hasn't been
+                # run through declarative or mapper() at all, emit a warning.
+                util.warn(
+                    "Unmanaged access of declarative attribute %s from "
+                    "non-mapped class %s" % (self.fget.__name__, cls.__name__)
+                )
+            return self.fget(cls)
+        elif manager.is_mapped:
+            # the class is mapped, which means we're outside of the declarative
+            # scan setup, just run the function.
+            return self.fget(cls)
+
+        # here, we are inside of the declarative scan.  use the registry
+        # that is tracking the values of these attributes.
+        declarative_scan = manager.declarative_scan()
+
+        # assert that we are in fact in the declarative scan
+        assert declarative_scan is not None
+
+        reg = declarative_scan.declared_attr_reg
+
+        if self in reg:
+            return reg[self]
+        else:
+            reg[self] = obj = self.fget(cls)
+            return obj
+
+
+class _declared_directive(_declared_attr_common, Generic[_T]):
+    # see mapping_api.rst for docstring
+
+    if typing.TYPE_CHECKING:
+
+        def __init__(
+            self,
+            fn: Callable[..., _T],
+            cascading: bool = False,
+        ): ...
+
+        def __get__(self, instance: Optional[object], owner: Any) -> _T: ...
+
+        def __set__(self, instance: Any, value: Any) -> None: ...
+
+        def __delete__(self, instance: Any) -> None: ...
+
+        def __call__(self, fn: Callable[..., _TT]) -> _declared_directive[_TT]:
+            # extensive fooling of mypy underway...
+            ...
+
+
+class declared_attr(interfaces._MappedAttribute[_T], _declared_attr_common):
+    """Mark a class-level method as representing the definition of
+    a mapped property or Declarative directive.
+
+    :class:`_orm.declared_attr` is typically applied as a decorator to a class
+    level method, turning the attribute into a scalar-like property that can be
+    invoked from the uninstantiated class. The Declarative mapping process
+    looks for these :class:`_orm.declared_attr` callables as it scans classes,
+    and assumes any attribute marked with :class:`_orm.declared_attr` will be a
+    callable that will produce an object specific to the Declarative mapping or
+    table configuration.
+
+    :class:`_orm.declared_attr` is usually applicable to
+    :ref:`mixins <orm_mixins_toplevel>`, to define relationships that are to be
+    applied to different implementors of the class. It may also be used to
+    define dynamically generated column expressions and other Declarative
+    attributes.
+
+    Example::
+
+        class ProvidesUserMixin:
+            "A mixin that adds a 'user' relationship to classes."
+
+            user_id: Mapped[int] = mapped_column(ForeignKey("user_table.id"))
+
+            @declared_attr
+            def user(cls) -> Mapped["User"]:
+                return relationship("User")
+
+    When used with Declarative directives such as ``__tablename__``, the
+    :meth:`_orm.declared_attr.directive` modifier may be used which indicates
+    to :pep:`484` typing tools that the given method is not dealing with
+    :class:`_orm.Mapped` attributes::
+
+        class CreateTableName:
+            @declared_attr.directive
+            def __tablename__(cls) -> str:
+                return cls.__name__.lower()
+
+    :class:`_orm.declared_attr` can also be applied directly to mapped
+    classes, to allow for attributes that dynamically configure themselves
+    on subclasses when using mapped inheritance schemes.   Below
+    illustrates :class:`_orm.declared_attr` to create a dynamic scheme
+    for generating the :paramref:`_orm.Mapper.polymorphic_identity` parameter
+    for subclasses::
+
+        class Employee(Base):
+            __tablename__ = "employee"
+
+            id: Mapped[int] = mapped_column(primary_key=True)
+            type: Mapped[str] = mapped_column(String(50))
+
+            @declared_attr.directive
+            def __mapper_args__(cls) -> Dict[str, Any]:
+                if cls.__name__ == "Employee":
+                    return {
+                        "polymorphic_on": cls.type,
+                        "polymorphic_identity": "Employee",
+                    }
+                else:
+                    return {"polymorphic_identity": cls.__name__}
+
+
+        class Engineer(Employee):
+            pass
+
+    :class:`_orm.declared_attr` supports decorating functions that are
+    explicitly decorated with ``@classmethod``. This is never necessary from a
+    runtime perspective, however may be needed in order to support :pep:`484`
+    typing tools that don't otherwise recognize the decorated function as
+    having class-level behaviors for the ``cls`` parameter::
+
+        class SomethingMixin:
+            x: Mapped[int]
+            y: Mapped[int]
+
+            @declared_attr
+            @classmethod
+            def x_plus_y(cls) -> Mapped[int]:
+                return column_property(cls.x + cls.y)
+
+    .. versionadded:: 2.0 - :class:`_orm.declared_attr` can accommodate a
+       function decorated with ``@classmethod`` to help with :pep:`484`
+       integration where needed.
+
+
+    .. seealso::
+
+        :ref:`orm_mixins_toplevel` - Declarative Mixin documentation with
+        background on use patterns for :class:`_orm.declared_attr`.
+
+    """  # noqa: E501
+
+    if typing.TYPE_CHECKING:
+
+        def __init__(
+            self,
+            fn: _DeclaredAttrDecorated[_T],
+            cascading: bool = False,
+        ): ...
+
+        def __set__(self, instance: Any, value: Any) -> None: ...
+
+        def __delete__(self, instance: Any) -> None: ...
+
+        # this is the Mapped[] API where at class descriptor get time we want
+        # the type checker to see InstrumentedAttribute[_T].   However the
+        # callable function prior to mapping in fact calls the given
+        # declarative function that does not return InstrumentedAttribute
+        @overload
+        def __get__(
+            self, instance: None, owner: Any
+        ) -> InstrumentedAttribute[_T]: ...
+
+        @overload
+        def __get__(self, instance: object, owner: Any) -> _T: ...
+
+        def __get__(
+            self, instance: Optional[object], owner: Any
+        ) -> Union[InstrumentedAttribute[_T], _T]: ...
+
+    @hybridmethod
+    def _stateful(cls, **kw: Any) -> _stateful_declared_attr[_T]:
+        return _stateful_declared_attr(**kw)
+
+    @hybridproperty
+    def directive(cls) -> _declared_directive[Any]:
+        # see mapping_api.rst for docstring
+        return _declared_directive  # type: ignore
+
+    @hybridproperty
+    def cascading(cls) -> _stateful_declared_attr[_T]:
+        # see mapping_api.rst for docstring
+        return cls._stateful(cascading=True)
+
+
+class _stateful_declared_attr(declared_attr[_T]):
+    kw: Dict[str, Any]
+
+    def __init__(self, **kw: Any):
+        self.kw = kw
+
+    @hybridmethod
+    def _stateful(self, **kw: Any) -> _stateful_declared_attr[_T]:
+        new_kw = self.kw.copy()
+        new_kw.update(kw)
+        return _stateful_declared_attr(**new_kw)
+
+    def __call__(self, fn: _DeclaredAttrDecorated[_T]) -> declared_attr[_T]:
+        return declared_attr(fn, **self.kw)
+
+
+def declarative_mixin(cls: Type[_T]) -> Type[_T]:
+    """Mark a class as providing the feature of "declarative mixin".
+
+    E.g.::
+
+        from sqlalchemy.orm import declared_attr
+        from sqlalchemy.orm import declarative_mixin
+
+
+        @declarative_mixin
+        class MyMixin:
+
+            @declared_attr
+            def __tablename__(cls):
+                return cls.__name__.lower()
+
+            __table_args__ = {"mysql_engine": "InnoDB"}
+            __mapper_args__ = {"always_refresh": True}
+
+            id = Column(Integer, primary_key=True)
+
+
+        class MyModel(MyMixin, Base):
+            name = Column(String(1000))
+
+    The :func:`_orm.declarative_mixin` decorator currently does not modify
+    the given class in any way; it's current purpose is strictly to assist
+    the :ref:`Mypy plugin <mypy_toplevel>` in being able to identify
+    SQLAlchemy declarative mixin classes when no other context is present.
+
+    .. versionadded:: 1.4.6
+
+    .. seealso::
+
+        :ref:`orm_mixins_toplevel`
+
+        :ref:`mypy_declarative_mixins` - in the
+        :ref:`Mypy plugin documentation <mypy_toplevel>`
+
+    """  # noqa: E501
+
+    return cls
+
+
+def _setup_declarative_base(cls: Type[Any]) -> None:
+    if "metadata" in cls.__dict__:
+        metadata = cls.__dict__["metadata"]
+    else:
+        metadata = None
+
+    if "type_annotation_map" in cls.__dict__:
+        type_annotation_map = cls.__dict__["type_annotation_map"]
+    else:
+        type_annotation_map = None
+
+    reg = cls.__dict__.get("registry", None)
+    if reg is not None:
+        if not isinstance(reg, registry):
+            raise exc.InvalidRequestError(
+                "Declarative base class has a 'registry' attribute that is "
+                "not an instance of sqlalchemy.orm.registry()"
+            )
+        elif type_annotation_map is not None:
+            raise exc.InvalidRequestError(
+                "Declarative base class has both a 'registry' attribute and a "
+                "type_annotation_map entry.  Per-base type_annotation_maps "
+                "are not supported.  Please apply the type_annotation_map "
+                "to this registry directly."
+            )
+
+    else:
+        reg = registry(
+            metadata=metadata, type_annotation_map=type_annotation_map
+        )
+        cls.registry = reg
+
+    cls._sa_registry = reg
+
+    if "metadata" not in cls.__dict__:
+        cls.metadata = cls.registry.metadata
+
+    if getattr(cls, "__init__", object.__init__) is object.__init__:
+        cls.__init__ = cls.registry.constructor
+
+
+class MappedAsDataclass(metaclass=DCTransformDeclarative):
+    """Mixin class to indicate when mapping this class, also convert it to be
+    a dataclass.
+
+    .. seealso::
+
+        :ref:`orm_declarative_native_dataclasses` - complete background
+        on SQLAlchemy native dataclass mapping
+
+    .. versionadded:: 2.0
+
+    """
+
+    def __init_subclass__(
+        cls,
+        init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+        eq: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        order: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        unsafe_hash: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        match_args: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        dataclass_callable: Union[
+            _NoArg, Callable[..., Type[Any]]
+        ] = _NoArg.NO_ARG,
+        **kw: Any,
+    ) -> None:
+        apply_dc_transforms: _DataclassArguments = {
+            "init": init,
+            "repr": repr,
+            "eq": eq,
+            "order": order,
+            "unsafe_hash": unsafe_hash,
+            "match_args": match_args,
+            "kw_only": kw_only,
+            "dataclass_callable": dataclass_callable,
+        }
+
+        current_transforms: _DataclassArguments
+
+        if hasattr(cls, "_sa_apply_dc_transforms"):
+            current = cls._sa_apply_dc_transforms
+
+            _ClassScanMapperConfig._assert_dc_arguments(current)
+
+            cls._sa_apply_dc_transforms = current_transforms = {  # type: ignore  # noqa: E501
+                k: current.get(k, _NoArg.NO_ARG) if v is _NoArg.NO_ARG else v
+                for k, v in apply_dc_transforms.items()
+            }
+        else:
+            cls._sa_apply_dc_transforms = current_transforms = (
+                apply_dc_transforms
+            )
+
+        super().__init_subclass__(**kw)
+
+        if not _is_mapped_class(cls):
+            new_anno = (
+                _ClassScanMapperConfig._update_annotations_for_non_mapped_class
+            )(cls)
+            _ClassScanMapperConfig._apply_dataclasses_to_any_class(
+                current_transforms, cls, new_anno
+            )
+
+
+class DeclarativeBase(
+    # Inspectable is used only by the mypy plugin
+    inspection.Inspectable[InstanceState[Any]],
+    metaclass=DeclarativeAttributeIntercept,
+):
+    """Base class used for declarative class definitions.
+
+    The :class:`_orm.DeclarativeBase` allows for the creation of new
+    declarative bases in such a way that is compatible with type checkers::
+
+
+        from sqlalchemy.orm import DeclarativeBase
+
+
+        class Base(DeclarativeBase):
+            pass
+
+    The above ``Base`` class is now usable as the base for new declarative
+    mappings.  The superclass makes use of the ``__init_subclass__()``
+    method to set up new classes and metaclasses aren't used.
+
+    When first used, the :class:`_orm.DeclarativeBase` class instantiates a new
+    :class:`_orm.registry` to be used with the base, assuming one was not
+    provided explicitly. The :class:`_orm.DeclarativeBase` class supports
+    class-level attributes which act as parameters for the construction of this
+    registry; such as to indicate a specific :class:`_schema.MetaData`
+    collection as well as a specific value for
+    :paramref:`_orm.registry.type_annotation_map`::
+
+        from typing_extensions import Annotated
+
+        from sqlalchemy import BigInteger
+        from sqlalchemy import MetaData
+        from sqlalchemy import String
+        from sqlalchemy.orm import DeclarativeBase
+
+        bigint = Annotated[int, "bigint"]
+        my_metadata = MetaData()
+
+
+        class Base(DeclarativeBase):
+            metadata = my_metadata
+            type_annotation_map = {
+                str: String().with_variant(String(255), "mysql", "mariadb"),
+                bigint: BigInteger(),
+            }
+
+    Class-level attributes which may be specified include:
+
+    :param metadata: optional :class:`_schema.MetaData` collection.
+     If a :class:`_orm.registry` is constructed automatically, this
+     :class:`_schema.MetaData` collection will be used to construct it.
+     Otherwise, the local :class:`_schema.MetaData` collection will supercede
+     that used by an existing :class:`_orm.registry` passed using the
+     :paramref:`_orm.DeclarativeBase.registry` parameter.
+    :param type_annotation_map: optional type annotation map that will be
+     passed to the :class:`_orm.registry` as
+     :paramref:`_orm.registry.type_annotation_map`.
+    :param registry: supply a pre-existing :class:`_orm.registry` directly.
+
+    .. versionadded:: 2.0  Added :class:`.DeclarativeBase`, so that declarative
+       base classes may be constructed in such a way that is also recognized
+       by :pep:`484` type checkers.   As a result, :class:`.DeclarativeBase`
+       and other subclassing-oriented APIs should be seen as
+       superseding previous "class returned by a function" APIs, namely
+       :func:`_orm.declarative_base` and :meth:`_orm.registry.generate_base`,
+       where the base class returned cannot be recognized by type checkers
+       without using plugins.
+
+    **__init__ behavior**
+
+    In a plain Python class, the base-most ``__init__()`` method in the class
+    hierarchy is ``object.__init__()``, which accepts no arguments. However,
+    when the :class:`_orm.DeclarativeBase` subclass is first declared, the
+    class is given an ``__init__()`` method that links to the
+    :paramref:`_orm.registry.constructor` constructor function, if no
+    ``__init__()`` method is already present; this is the usual declarative
+    constructor that will assign keyword arguments as attributes on the
+    instance, assuming those attributes are established at the class level
+    (i.e. are mapped, or are linked to a descriptor). This constructor is
+    **never accessed by a mapped class without being called explicitly via
+    super()**, as mapped classes are themselves given an ``__init__()`` method
+    directly which calls :paramref:`_orm.registry.constructor`, so in the
+    default case works independently of what the base-most ``__init__()``
+    method does.
+
+    .. versionchanged:: 2.0.1  :class:`_orm.DeclarativeBase` has a default
+       constructor that links to :paramref:`_orm.registry.constructor` by
+       default, so that calls to ``super().__init__()`` can access this
+       constructor. Previously, due to an implementation mistake, this default
+       constructor was missing, and calling ``super().__init__()`` would invoke
+       ``object.__init__()``.
+
+    The :class:`_orm.DeclarativeBase` subclass may also declare an explicit
+    ``__init__()`` method which will replace the use of the
+    :paramref:`_orm.registry.constructor` function at this level::
+
+        class Base(DeclarativeBase):
+            def __init__(self, id=None):
+                self.id = id
+
+    Mapped classes still will not invoke this constructor implicitly; it
+    remains only accessible by calling ``super().__init__()``::
+
+        class MyClass(Base):
+            def __init__(self, id=None, name=None):
+                self.name = name
+                super().__init__(id=id)
+
+    Note that this is a different behavior from what functions like the legacy
+    :func:`_orm.declarative_base` would do; the base created by those functions
+    would always install :paramref:`_orm.registry.constructor` for
+    ``__init__()``.
+
+
+    """
+
+    if typing.TYPE_CHECKING:
+
+        def _sa_inspect_type(self) -> Mapper[Self]: ...
+
+        def _sa_inspect_instance(self) -> InstanceState[Self]: ...
+
+        _sa_registry: ClassVar[_RegistryType]
+
+        registry: ClassVar[_RegistryType]
+        """Refers to the :class:`_orm.registry` in use where new
+        :class:`_orm.Mapper` objects will be associated."""
+
+        metadata: ClassVar[MetaData]
+        """Refers to the :class:`_schema.MetaData` collection that will be used
+        for new :class:`_schema.Table` objects.
+
+        .. seealso::
+
+            :ref:`orm_declarative_metadata`
+
+        """
+
+        __name__: ClassVar[str]
+
+        # this ideally should be Mapper[Self], but mypy as of 1.4.1 does not
+        # like it, and breaks the declared_attr_one test. Pyright/pylance is
+        # ok with it.
+        __mapper__: ClassVar[Mapper[Any]]
+        """The :class:`_orm.Mapper` object to which a particular class is
+        mapped.
+
+        May also be acquired using :func:`_sa.inspect`, e.g.
+        ``inspect(klass)``.
+
+        """
+
+        __table__: ClassVar[FromClause]
+        """The :class:`_sql.FromClause` to which a particular subclass is
+        mapped.
+
+        This is usually an instance of :class:`_schema.Table` but may also
+        refer to other kinds of :class:`_sql.FromClause` such as
+        :class:`_sql.Subquery`, depending on how the class is mapped.
+
+        .. seealso::
+
+            :ref:`orm_declarative_metadata`
+
+        """
+
+        # pyright/pylance do not consider a classmethod a ClassVar so use Any
+        # https://github.com/microsoft/pylance-release/issues/3484
+        __tablename__: Any
+        """String name to assign to the generated
+        :class:`_schema.Table` object, if not specified directly via
+        :attr:`_orm.DeclarativeBase.__table__`.
+
+        .. seealso::
+
+            :ref:`orm_declarative_table`
+
+        """
+
+        __mapper_args__: Any
+        """Dictionary of arguments which will be passed to the
+        :class:`_orm.Mapper` constructor.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapper_options`
+
+        """
+
+        __table_args__: Any
+        """A dictionary or tuple of arguments that will be passed to the
+        :class:`_schema.Table` constructor.  See
+        :ref:`orm_declarative_table_configuration`
+        for background on the specific structure of this collection.
+
+        .. seealso::
+
+            :ref:`orm_declarative_table_configuration`
+
+        """
+
+        def __init__(self, **kw: Any): ...
+
+    def __init_subclass__(cls, **kw: Any) -> None:
+        if DeclarativeBase in cls.__bases__:
+            _check_not_declarative(cls, DeclarativeBase)
+            _setup_declarative_base(cls)
+        else:
+            _as_declarative(cls._sa_registry, cls, cls.__dict__)
+        super().__init_subclass__(**kw)
+
+
+def _check_not_declarative(cls: Type[Any], base: Type[Any]) -> None:
+    cls_dict = cls.__dict__
+    if (
+        "__table__" in cls_dict
+        and not (
+            callable(cls_dict["__table__"])
+            or hasattr(cls_dict["__table__"], "__get__")
+        )
+    ) or isinstance(cls_dict.get("__tablename__", None), str):
+        raise exc.InvalidRequestError(
+            f"Cannot use {base.__name__!r} directly as a declarative base "
+            "class. Create a Base by creating a subclass of it."
+        )
+
+
+class DeclarativeBaseNoMeta(
+    # Inspectable is used only by the mypy plugin
+    inspection.Inspectable[InstanceState[Any]]
+):
+    """Same as :class:`_orm.DeclarativeBase`, but does not use a metaclass
+    to intercept new attributes.
+
+    The :class:`_orm.DeclarativeBaseNoMeta` base may be used when use of
+    custom metaclasses is desirable.
+
+    .. versionadded:: 2.0
+
+
+    """
+
+    _sa_registry: ClassVar[_RegistryType]
+
+    registry: ClassVar[_RegistryType]
+    """Refers to the :class:`_orm.registry` in use where new
+    :class:`_orm.Mapper` objects will be associated."""
+
+    metadata: ClassVar[MetaData]
+    """Refers to the :class:`_schema.MetaData` collection that will be used
+    for new :class:`_schema.Table` objects.
+
+    .. seealso::
+
+        :ref:`orm_declarative_metadata`
+
+    """
+
+    # this ideally should be Mapper[Self], but mypy as of 1.4.1 does not
+    # like it, and breaks the declared_attr_one test. Pyright/pylance is
+    # ok with it.
+    __mapper__: ClassVar[Mapper[Any]]
+    """The :class:`_orm.Mapper` object to which a particular class is
+    mapped.
+
+    May also be acquired using :func:`_sa.inspect`, e.g.
+    ``inspect(klass)``.
+
+    """
+
+    __table__: Optional[FromClause]
+    """The :class:`_sql.FromClause` to which a particular subclass is
+    mapped.
+
+    This is usually an instance of :class:`_schema.Table` but may also
+    refer to other kinds of :class:`_sql.FromClause` such as
+    :class:`_sql.Subquery`, depending on how the class is mapped.
+
+    .. seealso::
+
+        :ref:`orm_declarative_metadata`
+
+    """
+
+    if typing.TYPE_CHECKING:
+
+        def _sa_inspect_type(self) -> Mapper[Self]: ...
+
+        def _sa_inspect_instance(self) -> InstanceState[Self]: ...
+
+        __tablename__: Any
+        """String name to assign to the generated
+        :class:`_schema.Table` object, if not specified directly via
+        :attr:`_orm.DeclarativeBase.__table__`.
+
+        .. seealso::
+
+            :ref:`orm_declarative_table`
+
+        """
+
+        __mapper_args__: Any
+        """Dictionary of arguments which will be passed to the
+        :class:`_orm.Mapper` constructor.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapper_options`
+
+        """
+
+        __table_args__: Any
+        """A dictionary or tuple of arguments that will be passed to the
+        :class:`_schema.Table` constructor.  See
+        :ref:`orm_declarative_table_configuration`
+        for background on the specific structure of this collection.
+
+        .. seealso::
+
+            :ref:`orm_declarative_table_configuration`
+
+        """
+
+        def __init__(self, **kw: Any): ...
+
+    def __init_subclass__(cls, **kw: Any) -> None:
+        if DeclarativeBaseNoMeta in cls.__bases__:
+            _check_not_declarative(cls, DeclarativeBaseNoMeta)
+            _setup_declarative_base(cls)
+        else:
+            _as_declarative(cls._sa_registry, cls, cls.__dict__)
+        super().__init_subclass__(**kw)
+
+
+def add_mapped_attribute(
+    target: Type[_O], key: str, attr: MapperProperty[Any]
+) -> None:
+    """Add a new mapped attribute to an ORM mapped class.
+
+    E.g.::
+
+        add_mapped_attribute(User, "addresses", relationship(Address))
+
+    This may be used for ORM mappings that aren't using a declarative
+    metaclass that intercepts attribute set operations.
+
+    .. versionadded:: 2.0
+
+
+    """
+    _add_attribute(target, key, attr)
+
+
+def declarative_base(
+    *,
+    metadata: Optional[MetaData] = None,
+    mapper: Optional[Callable[..., Mapper[Any]]] = None,
+    cls: Type[Any] = object,
+    name: str = "Base",
+    class_registry: Optional[clsregistry._ClsRegistryType] = None,
+    type_annotation_map: Optional[_TypeAnnotationMapType] = None,
+    constructor: Callable[..., None] = _declarative_constructor,
+    metaclass: Type[Any] = DeclarativeMeta,
+) -> Any:
+    r"""Construct a base class for declarative class definitions.
+
+    The new base class will be given a metaclass that produces
+    appropriate :class:`~sqlalchemy.schema.Table` objects and makes
+    the appropriate :class:`_orm.Mapper` calls based on the
+    information provided declaratively in the class and any subclasses
+    of the class.
+
+    .. versionchanged:: 2.0 Note that the :func:`_orm.declarative_base`
+       function is superseded by the new :class:`_orm.DeclarativeBase` class,
+       which generates a new "base" class using subclassing, rather than
+       return value of a function.  This allows an approach that is compatible
+       with :pep:`484` typing tools.
+
+    The :func:`_orm.declarative_base` function is a shorthand version
+    of using the :meth:`_orm.registry.generate_base`
+    method.  That is, the following::
+
+        from sqlalchemy.orm import declarative_base
+
+        Base = declarative_base()
+
+    Is equivalent to::
+
+        from sqlalchemy.orm import registry
+
+        mapper_registry = registry()
+        Base = mapper_registry.generate_base()
+
+    See the docstring for :class:`_orm.registry`
+    and :meth:`_orm.registry.generate_base`
+    for more details.
+
+    .. versionchanged:: 1.4  The :func:`_orm.declarative_base`
+       function is now a specialization of the more generic
+       :class:`_orm.registry` class.  The function also moves to the
+       ``sqlalchemy.orm`` package from the ``declarative.ext`` package.
+
+
+    :param metadata:
+      An optional :class:`~sqlalchemy.schema.MetaData` instance.  All
+      :class:`~sqlalchemy.schema.Table` objects implicitly declared by
+      subclasses of the base will share this MetaData.  A MetaData instance
+      will be created if none is provided.  The
+      :class:`~sqlalchemy.schema.MetaData` instance will be available via the
+      ``metadata`` attribute of the generated declarative base class.
+
+    :param mapper:
+      An optional callable, defaults to :class:`_orm.Mapper`. Will
+      be used to map subclasses to their Tables.
+
+    :param cls:
+      Defaults to :class:`object`. A type to use as the base for the generated
+      declarative base class. May be a class or tuple of classes.
+
+    :param name:
+      Defaults to ``Base``.  The display name for the generated
+      class.  Customizing this is not required, but can improve clarity in
+      tracebacks and debugging.
+
+    :param constructor:
+      Specify the implementation for the ``__init__`` function on a mapped
+      class that has no ``__init__`` of its own.  Defaults to an
+      implementation that assigns \**kwargs for declared
+      fields and relationships to an instance.  If ``None`` is supplied,
+      no __init__ will be provided and construction will fall back to
+      cls.__init__ by way of the normal Python semantics.
+
+    :param class_registry: optional dictionary that will serve as the
+      registry of class names-> mapped classes when string names
+      are used to identify classes inside of :func:`_orm.relationship`
+      and others.  Allows two or more declarative base classes
+      to share the same registry of class names for simplified
+      inter-base relationships.
+
+    :param type_annotation_map: optional dictionary of Python types to
+        SQLAlchemy :class:`_types.TypeEngine` classes or instances.  This
+        is used exclusively by the :class:`_orm.MappedColumn` construct
+        to produce column types based on annotations within the
+        :class:`_orm.Mapped` type.
+
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapped_column_type_map`
+
+    :param metaclass:
+      Defaults to :class:`.DeclarativeMeta`.  A metaclass or __metaclass__
+      compatible callable to use as the meta type of the generated
+      declarative base class.
+
+    .. seealso::
+
+        :class:`_orm.registry`
+
+    """
+
+    return registry(
+        metadata=metadata,
+        class_registry=class_registry,
+        constructor=constructor,
+        type_annotation_map=type_annotation_map,
+    ).generate_base(
+        mapper=mapper,
+        cls=cls,
+        name=name,
+        metaclass=metaclass,
+    )
+
+
+class registry:
+    """Generalized registry for mapping classes.
+
+    The :class:`_orm.registry` serves as the basis for maintaining a collection
+    of mappings, and provides configurational hooks used to map classes.
+
+    The three general kinds of mappings supported are Declarative Base,
+    Declarative Decorator, and Imperative Mapping.   All of these mapping
+    styles may be used interchangeably:
+
+    * :meth:`_orm.registry.generate_base` returns a new declarative base
+      class, and is the underlying implementation of the
+      :func:`_orm.declarative_base` function.
+
+    * :meth:`_orm.registry.mapped` provides a class decorator that will
+      apply declarative mapping to a class without the use of a declarative
+      base class.
+
+    * :meth:`_orm.registry.map_imperatively` will produce a
+      :class:`_orm.Mapper` for a class without scanning the class for
+      declarative class attributes. This method suits the use case historically
+      provided by the ``sqlalchemy.orm.mapper()`` classical mapping function,
+      which is removed as of SQLAlchemy 2.0.
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`orm_mapping_classes_toplevel` - overview of class mapping
+        styles.
+
+    """
+
+    _class_registry: clsregistry._ClsRegistryType
+    _managers: weakref.WeakKeyDictionary[ClassManager[Any], Literal[True]]
+    _non_primary_mappers: weakref.WeakKeyDictionary[Mapper[Any], Literal[True]]
+    metadata: MetaData
+    constructor: CallableReference[Callable[..., None]]
+    type_annotation_map: _MutableTypeAnnotationMapType
+    _dependents: Set[_RegistryType]
+    _dependencies: Set[_RegistryType]
+    _new_mappers: bool
+
+    def __init__(
+        self,
+        *,
+        metadata: Optional[MetaData] = None,
+        class_registry: Optional[clsregistry._ClsRegistryType] = None,
+        type_annotation_map: Optional[_TypeAnnotationMapType] = None,
+        constructor: Callable[..., None] = _declarative_constructor,
+    ):
+        r"""Construct a new :class:`_orm.registry`
+
+        :param metadata:
+          An optional :class:`_schema.MetaData` instance.  All
+          :class:`_schema.Table` objects generated using declarative
+          table mapping will make use of this :class:`_schema.MetaData`
+          collection.  If this argument is left at its default of ``None``,
+          a blank :class:`_schema.MetaData` collection is created.
+
+        :param constructor:
+          Specify the implementation for the ``__init__`` function on a mapped
+          class that has no ``__init__`` of its own.  Defaults to an
+          implementation that assigns \**kwargs for declared
+          fields and relationships to an instance.  If ``None`` is supplied,
+          no __init__ will be provided and construction will fall back to
+          cls.__init__ by way of the normal Python semantics.
+
+        :param class_registry: optional dictionary that will serve as the
+          registry of class names-> mapped classes when string names
+          are used to identify classes inside of :func:`_orm.relationship`
+          and others.  Allows two or more declarative base classes
+          to share the same registry of class names for simplified
+          inter-base relationships.
+
+        :param type_annotation_map: optional dictionary of Python types to
+          SQLAlchemy :class:`_types.TypeEngine` classes or instances.
+          The provided dict will update the default type mapping.  This
+          is used exclusively by the :class:`_orm.MappedColumn` construct
+          to produce column types based on annotations within the
+          :class:`_orm.Mapped` type.
+
+          .. versionadded:: 2.0
+
+          .. seealso::
+
+              :ref:`orm_declarative_mapped_column_type_map`
+
+
+        """
+        lcl_metadata = metadata or MetaData()
+
+        if class_registry is None:
+            class_registry = weakref.WeakValueDictionary()
+
+        self._class_registry = class_registry
+        self._managers = weakref.WeakKeyDictionary()
+        self._non_primary_mappers = weakref.WeakKeyDictionary()
+        self.metadata = lcl_metadata
+        self.constructor = constructor
+        self.type_annotation_map = {}
+        if type_annotation_map is not None:
+            self.update_type_annotation_map(type_annotation_map)
+        self._dependents = set()
+        self._dependencies = set()
+
+        self._new_mappers = False
+
+        with mapperlib._CONFIGURE_MUTEX:
+            mapperlib._mapper_registries[self] = True
+
+    def update_type_annotation_map(
+        self,
+        type_annotation_map: _TypeAnnotationMapType,
+    ) -> None:
+        """update the :paramref:`_orm.registry.type_annotation_map` with new
+        values."""
+
+        self.type_annotation_map.update(
+            {
+                de_optionalize_union_types(typ): sqltype
+                for typ, sqltype in type_annotation_map.items()
+            }
+        )
+
+    def _resolve_type(
+        self, python_type: _MatchedOnType, _do_fallbacks: bool = True
+    ) -> Optional[sqltypes.TypeEngine[Any]]:
+        python_type_type: Type[Any]
+        search: Iterable[Tuple[_MatchedOnType, Type[Any]]]
+
+        if is_generic(python_type):
+            if is_literal(python_type):
+                python_type_type = python_type  # type: ignore[assignment]
+
+                search = (
+                    (python_type, python_type_type),
+                    *((lt, python_type_type) for lt in LITERAL_TYPES),  # type: ignore[arg-type] # noqa: E501
+                )
+            else:
+                python_type_type = python_type.__origin__
+                search = ((python_type, python_type_type),)
+        elif isinstance(python_type, type):
+            python_type_type = python_type
+            search = ((pt, pt) for pt in python_type_type.__mro__)
+        else:
+            python_type_type = python_type  # type: ignore[assignment]
+            search = ((python_type, python_type_type),)
+
+        for pt, flattened in search:
+            # we search through full __mro__ for types.  however...
+            sql_type = self.type_annotation_map.get(pt)
+            if sql_type is None:
+                sql_type = sqltypes._type_map_get(pt)  # type: ignore  # noqa: E501
+
+            if sql_type is not None:
+                sql_type_inst = sqltypes.to_instance(sql_type)
+
+                # ... this additional step will reject most
+                # type -> supertype matches, such as if we had
+                # a MyInt(int) subclass.  note also we pass NewType()
+                # here directly; these always have to be in the
+                # type_annotation_map to be useful
+                resolved_sql_type = sql_type_inst._resolve_for_python_type(
+                    python_type_type,
+                    pt,
+                    flattened,
+                )
+                if resolved_sql_type is not None:
+                    return resolved_sql_type
+
+        # 2.0 fallbacks
+        if _do_fallbacks:
+            python_type_to_check: Any = None
+            kind = None
+            if is_pep695(python_type):
+                # NOTE: assume there aren't type alias types of new types.
+                python_type_to_check = python_type
+                while is_pep695(python_type_to_check):
+                    python_type_to_check = python_type_to_check.__value__
+                python_type_to_check = de_optionalize_union_types(
+                    python_type_to_check
+                )
+                kind = "TypeAliasType"
+            if is_newtype(python_type):
+                python_type_to_check = flatten_newtype(python_type)
+                kind = "NewType"
+
+            if python_type_to_check is not None:
+                res_after_fallback = self._resolve_type(
+                    python_type_to_check, False
+                )
+                if res_after_fallback is not None:
+                    assert kind is not None
+                    warn_deprecated(
+                        f"Matching the provided {kind} '{python_type}' on "
+                        "its resolved value without matching it in the "
+                        "type_annotation_map is deprecated; add this type to "
+                        "the type_annotation_map to allow it to match "
+                        "explicitly.",
+                        "2.0",
+                    )
+                    return res_after_fallback
+
+        return None
+
+    @property
+    def mappers(self) -> FrozenSet[Mapper[Any]]:
+        """read only collection of all :class:`_orm.Mapper` objects."""
+
+        return frozenset(manager.mapper for manager in self._managers).union(
+            self._non_primary_mappers
+        )
+
+    def _set_depends_on(self, registry: RegistryType) -> None:
+        if registry is self:
+            return
+        registry._dependents.add(self)
+        self._dependencies.add(registry)
+
+    def _flag_new_mapper(self, mapper: Mapper[Any]) -> None:
+        mapper._ready_for_configure = True
+        if self._new_mappers:
+            return
+
+        for reg in self._recurse_with_dependents({self}):
+            reg._new_mappers = True
+
+    @classmethod
+    def _recurse_with_dependents(
+        cls, registries: Set[RegistryType]
+    ) -> Iterator[RegistryType]:
+        todo = registries
+        done = set()
+        while todo:
+            reg = todo.pop()
+            done.add(reg)
+
+            # if yielding would remove dependents, make sure we have
+            # them before
+            todo.update(reg._dependents.difference(done))
+            yield reg
+
+            # if yielding would add dependents, make sure we have them
+            # after
+            todo.update(reg._dependents.difference(done))
+
+    @classmethod
+    def _recurse_with_dependencies(
+        cls, registries: Set[RegistryType]
+    ) -> Iterator[RegistryType]:
+        todo = registries
+        done = set()
+        while todo:
+            reg = todo.pop()
+            done.add(reg)
+
+            # if yielding would remove dependencies, make sure we have
+            # them before
+            todo.update(reg._dependencies.difference(done))
+
+            yield reg
+
+            # if yielding would remove dependencies, make sure we have
+            # them before
+            todo.update(reg._dependencies.difference(done))
+
+    def _mappers_to_configure(self) -> Iterator[Mapper[Any]]:
+        return itertools.chain(
+            (
+                manager.mapper
+                for manager in list(self._managers)
+                if manager.is_mapped
+                and not manager.mapper.configured
+                and manager.mapper._ready_for_configure
+            ),
+            (
+                npm
+                for npm in list(self._non_primary_mappers)
+                if not npm.configured and npm._ready_for_configure
+            ),
+        )
+
+    def _add_non_primary_mapper(self, np_mapper: Mapper[Any]) -> None:
+        self._non_primary_mappers[np_mapper] = True
+
+    def _dispose_cls(self, cls: Type[_O]) -> None:
+        clsregistry.remove_class(cls.__name__, cls, self._class_registry)
+
+    def _add_manager(self, manager: ClassManager[Any]) -> None:
+        self._managers[manager] = True
+        if manager.is_mapped:
+            raise exc.ArgumentError(
+                "Class '%s' already has a primary mapper defined. "
+                % manager.class_
+            )
+        assert manager.registry is None
+        manager.registry = self
+
+    def configure(self, cascade: bool = False) -> None:
+        """Configure all as-yet unconfigured mappers in this
+        :class:`_orm.registry`.
+
+        The configure step is used to reconcile and initialize the
+        :func:`_orm.relationship` linkages between mapped classes, as well as
+        to invoke configuration events such as the
+        :meth:`_orm.MapperEvents.before_configured` and
+        :meth:`_orm.MapperEvents.after_configured`, which may be used by ORM
+        extensions or user-defined extension hooks.
+
+        If one or more mappers in this registry contain
+        :func:`_orm.relationship` constructs that refer to mapped classes in
+        other registries, this registry is said to be *dependent* on those
+        registries. In order to configure those dependent registries
+        automatically, the :paramref:`_orm.registry.configure.cascade` flag
+        should be set to ``True``. Otherwise, if they are not configured, an
+        exception will be raised.  The rationale behind this behavior is to
+        allow an application to programmatically invoke configuration of
+        registries while controlling whether or not the process implicitly
+        reaches other registries.
+
+        As an alternative to invoking :meth:`_orm.registry.configure`, the ORM
+        function :func:`_orm.configure_mappers` function may be used to ensure
+        configuration is complete for all :class:`_orm.registry` objects in
+        memory. This is generally simpler to use and also predates the usage of
+        :class:`_orm.registry` objects overall. However, this function will
+        impact all mappings throughout the running Python process and may be
+        more memory/time consuming for an application that has many registries
+        in use for different purposes that may not be needed immediately.
+
+        .. seealso::
+
+            :func:`_orm.configure_mappers`
+
+
+        .. versionadded:: 1.4.0b2
+
+        """
+        mapperlib._configure_registries({self}, cascade=cascade)
+
+    def dispose(self, cascade: bool = False) -> None:
+        """Dispose of all mappers in this :class:`_orm.registry`.
+
+        After invocation, all the classes that were mapped within this registry
+        will no longer have class instrumentation associated with them. This
+        method is the per-:class:`_orm.registry` analogue to the
+        application-wide :func:`_orm.clear_mappers` function.
+
+        If this registry contains mappers that are dependencies of other
+        registries, typically via :func:`_orm.relationship` links, then those
+        registries must be disposed as well. When such registries exist in
+        relation to this one, their :meth:`_orm.registry.dispose` method will
+        also be called, if the :paramref:`_orm.registry.dispose.cascade` flag
+        is set to ``True``; otherwise, an error is raised if those registries
+        were not already disposed.
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :func:`_orm.clear_mappers`
+
+        """
+
+        mapperlib._dispose_registries({self}, cascade=cascade)
+
+    def _dispose_manager_and_mapper(self, manager: ClassManager[Any]) -> None:
+        if "mapper" in manager.__dict__:
+            mapper = manager.mapper
+
+            mapper._set_dispose_flags()
+
+        class_ = manager.class_
+        self._dispose_cls(class_)
+        instrumentation._instrumentation_factory.unregister(class_)
+
+    def generate_base(
+        self,
+        mapper: Optional[Callable[..., Mapper[Any]]] = None,
+        cls: Type[Any] = object,
+        name: str = "Base",
+        metaclass: Type[Any] = DeclarativeMeta,
+    ) -> Any:
+        """Generate a declarative base class.
+
+        Classes that inherit from the returned class object will be
+        automatically mapped using declarative mapping.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            Base = mapper_registry.generate_base()
+
+
+            class MyClass(Base):
+                __tablename__ = "my_table"
+                id = Column(Integer, primary_key=True)
+
+        The above dynamically generated class is equivalent to the
+        non-dynamic example below::
+
+            from sqlalchemy.orm import registry
+            from sqlalchemy.orm.decl_api import DeclarativeMeta
+
+            mapper_registry = registry()
+
+
+            class Base(metaclass=DeclarativeMeta):
+                __abstract__ = True
+                registry = mapper_registry
+                metadata = mapper_registry.metadata
+
+                __init__ = mapper_registry.constructor
+
+        .. versionchanged:: 2.0 Note that the
+           :meth:`_orm.registry.generate_base` method is superseded by the new
+           :class:`_orm.DeclarativeBase` class, which generates a new "base"
+           class using subclassing, rather than return value of a function.
+           This allows an approach that is compatible with :pep:`484` typing
+           tools.
+
+        The :meth:`_orm.registry.generate_base` method provides the
+        implementation for the :func:`_orm.declarative_base` function, which
+        creates the :class:`_orm.registry` and base class all at once.
+
+        See the section :ref:`orm_declarative_mapping` for background and
+        examples.
+
+        :param mapper:
+          An optional callable, defaults to :class:`_orm.Mapper`.
+          This function is used to generate new :class:`_orm.Mapper` objects.
+
+        :param cls:
+          Defaults to :class:`object`. A type to use as the base for the
+          generated declarative base class. May be a class or tuple of classes.
+
+        :param name:
+          Defaults to ``Base``.  The display name for the generated
+          class.  Customizing this is not required, but can improve clarity in
+          tracebacks and debugging.
+
+        :param metaclass:
+          Defaults to :class:`.DeclarativeMeta`.  A metaclass or __metaclass__
+          compatible callable to use as the meta type of the generated
+          declarative base class.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapping`
+
+            :func:`_orm.declarative_base`
+
+        """
+        metadata = self.metadata
+
+        bases = not isinstance(cls, tuple) and (cls,) or cls
+
+        class_dict: Dict[str, Any] = dict(registry=self, metadata=metadata)
+        if isinstance(cls, type):
+            class_dict["__doc__"] = cls.__doc__
+
+        if self.constructor is not None:
+            class_dict["__init__"] = self.constructor
+
+        class_dict["__abstract__"] = True
+        if mapper:
+            class_dict["__mapper_cls__"] = mapper
+
+        if hasattr(cls, "__class_getitem__"):
+
+            def __class_getitem__(cls: Type[_T], key: Any) -> Type[_T]:
+                # allow generic classes in py3.9+
+                return cls
+
+            class_dict["__class_getitem__"] = __class_getitem__
+
+        return metaclass(name, bases, class_dict)
+
+    @compat_typing.dataclass_transform(
+        field_specifiers=(
+            MappedColumn,
+            RelationshipProperty,
+            Composite,
+            Synonym,
+            mapped_column,
+            relationship,
+            composite,
+            synonym,
+            deferred,
+        ),
+    )
+    @overload
+    def mapped_as_dataclass(self, __cls: Type[_O]) -> Type[_O]: ...
+
+    @overload
+    def mapped_as_dataclass(
+        self,
+        __cls: Literal[None] = ...,
+        *,
+        init: Union[_NoArg, bool] = ...,
+        repr: Union[_NoArg, bool] = ...,  # noqa: A002
+        eq: Union[_NoArg, bool] = ...,
+        order: Union[_NoArg, bool] = ...,
+        unsafe_hash: Union[_NoArg, bool] = ...,
+        match_args: Union[_NoArg, bool] = ...,
+        kw_only: Union[_NoArg, bool] = ...,
+        dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]] = ...,
+    ) -> Callable[[Type[_O]], Type[_O]]: ...
+
+    def mapped_as_dataclass(
+        self,
+        __cls: Optional[Type[_O]] = None,
+        *,
+        init: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        repr: Union[_NoArg, bool] = _NoArg.NO_ARG,  # noqa: A002
+        eq: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        order: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        unsafe_hash: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        match_args: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
+        dataclass_callable: Union[
+            _NoArg, Callable[..., Type[Any]]
+        ] = _NoArg.NO_ARG,
+    ) -> Union[Type[_O], Callable[[Type[_O]], Type[_O]]]:
+        """Class decorator that will apply the Declarative mapping process
+        to a given class, and additionally convert the class to be a
+        Python dataclass.
+
+        .. seealso::
+
+            :ref:`orm_declarative_native_dataclasses` - complete background
+            on SQLAlchemy native dataclass mapping
+
+
+        .. versionadded:: 2.0
+
+
+        """
+
+        def decorate(cls: Type[_O]) -> Type[_O]:
+            setattr(
+                cls,
+                "_sa_apply_dc_transforms",
+                {
+                    "init": init,
+                    "repr": repr,
+                    "eq": eq,
+                    "order": order,
+                    "unsafe_hash": unsafe_hash,
+                    "match_args": match_args,
+                    "kw_only": kw_only,
+                    "dataclass_callable": dataclass_callable,
+                },
+            )
+            _as_declarative(self, cls, cls.__dict__)
+            return cls
+
+        if __cls:
+            return decorate(__cls)
+        else:
+            return decorate
+
+    def mapped(self, cls: Type[_O]) -> Type[_O]:
+        """Class decorator that will apply the Declarative mapping process
+        to a given class.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+
+            @mapper_registry.mapped
+            class Foo:
+                __tablename__ = "some_table"
+
+                id = Column(Integer, primary_key=True)
+                name = Column(String)
+
+        See the section :ref:`orm_declarative_mapping` for complete
+        details and examples.
+
+        :param cls: class to be mapped.
+
+        :return: the class that was passed.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapping`
+
+            :meth:`_orm.registry.generate_base` - generates a base class
+            that will apply Declarative mapping to subclasses automatically
+            using a Python metaclass.
+
+        .. seealso::
+
+            :meth:`_orm.registry.mapped_as_dataclass`
+
+        """
+        _as_declarative(self, cls, cls.__dict__)
+        return cls
+
+    def as_declarative_base(self, **kw: Any) -> Callable[[Type[_T]], Type[_T]]:
+        """
+        Class decorator which will invoke
+        :meth:`_orm.registry.generate_base`
+        for a given base class.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+
+            @mapper_registry.as_declarative_base()
+            class Base:
+                @declared_attr
+                def __tablename__(cls):
+                    return cls.__name__.lower()
+
+                id = Column(Integer, primary_key=True)
+
+
+            class MyMappedClass(Base): ...
+
+        All keyword arguments passed to
+        :meth:`_orm.registry.as_declarative_base` are passed
+        along to :meth:`_orm.registry.generate_base`.
+
+        """
+
+        def decorate(cls: Type[_T]) -> Type[_T]:
+            kw["cls"] = cls
+            kw["name"] = cls.__name__
+            return self.generate_base(**kw)  # type: ignore
+
+        return decorate
+
+    def map_declaratively(self, cls: Type[_O]) -> Mapper[_O]:
+        """Map a class declaratively.
+
+        In this form of mapping, the class is scanned for mapping information,
+        including for columns to be associated with a table, and/or an
+        actual table object.
+
+        Returns the :class:`_orm.Mapper` object.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+
+            class Foo:
+                __tablename__ = "some_table"
+
+                id = Column(Integer, primary_key=True)
+                name = Column(String)
+
+
+            mapper = mapper_registry.map_declaratively(Foo)
+
+        This function is more conveniently invoked indirectly via either the
+        :meth:`_orm.registry.mapped` class decorator or by subclassing a
+        declarative metaclass generated from
+        :meth:`_orm.registry.generate_base`.
+
+        See the section :ref:`orm_declarative_mapping` for complete
+        details and examples.
+
+        :param cls: class to be mapped.
+
+        :return: a :class:`_orm.Mapper` object.
+
+        .. seealso::
+
+            :ref:`orm_declarative_mapping`
+
+            :meth:`_orm.registry.mapped` - more common decorator interface
+            to this function.
+
+            :meth:`_orm.registry.map_imperatively`
+
+        """
+        _as_declarative(self, cls, cls.__dict__)
+        return cls.__mapper__  # type: ignore
+
+    def map_imperatively(
+        self,
+        class_: Type[_O],
+        local_table: Optional[FromClause] = None,
+        **kw: Any,
+    ) -> Mapper[_O]:
+        r"""Map a class imperatively.
+
+        In this form of mapping, the class is not scanned for any mapping
+        information.  Instead, all mapping constructs are passed as
+        arguments.
+
+        This method is intended to be fully equivalent to the now-removed
+        SQLAlchemy ``mapper()`` function, except that it's in terms of
+        a particular registry.
+
+        E.g.::
+
+            from sqlalchemy.orm import registry
+
+            mapper_registry = registry()
+
+            my_table = Table(
+                "my_table",
+                mapper_registry.metadata,
+                Column("id", Integer, primary_key=True),
+            )
+
+
+            class MyClass:
+                pass
+
+
+            mapper_registry.map_imperatively(MyClass, my_table)
+
+        See the section :ref:`orm_imperative_mapping` for complete background
+        and usage examples.
+
+        :param class\_: The class to be mapped.  Corresponds to the
+         :paramref:`_orm.Mapper.class_` parameter.
+
+        :param local_table: the :class:`_schema.Table` or other
+         :class:`_sql.FromClause` object that is the subject of the mapping.
+         Corresponds to the
+         :paramref:`_orm.Mapper.local_table` parameter.
+
+        :param \**kw: all other keyword arguments are passed to the
+         :class:`_orm.Mapper` constructor directly.
+
+        .. seealso::
+
+            :ref:`orm_imperative_mapping`
+
+            :ref:`orm_declarative_mapping`
+
+        """
+        return _mapper(self, class_, local_table, kw)
+
+
+RegistryType = registry
+
+if not TYPE_CHECKING:
+    # allow for runtime type resolution of ``ClassVar[_RegistryType]``
+    _RegistryType = registry  # noqa
+
+
+def as_declarative(**kw: Any) -> Callable[[Type[_T]], Type[_T]]:
+    """
+    Class decorator which will adapt a given class into a
+    :func:`_orm.declarative_base`.
+
+    This function makes use of the :meth:`_orm.registry.as_declarative_base`
+    method, by first creating a :class:`_orm.registry` automatically
+    and then invoking the decorator.
+
+    E.g.::
+
+        from sqlalchemy.orm import as_declarative
+
+
+        @as_declarative()
+        class Base:
+            @declared_attr
+            def __tablename__(cls):
+                return cls.__name__.lower()
+
+            id = Column(Integer, primary_key=True)
+
+
+        class MyMappedClass(Base): ...
+
+    .. seealso::
+
+        :meth:`_orm.registry.as_declarative_base`
+
+    """
+    metadata, class_registry = (
+        kw.pop("metadata", None),
+        kw.pop("class_registry", None),
+    )
+
+    return registry(
+        metadata=metadata, class_registry=class_registry
+    ).as_declarative_base(**kw)
+
+
+@inspection._inspects(
+    DeclarativeMeta, DeclarativeBase, DeclarativeAttributeIntercept
+)
+def _inspect_decl_meta(cls: Type[Any]) -> Optional[Mapper[Any]]:
+    mp: Optional[Mapper[Any]] = _inspect_mapped_class(cls)
+    if mp is None:
+        if _DeferredMapperConfig.has_cls(cls):
+            _DeferredMapperConfig.raise_unmapped_for_cls(cls)
+    return mp
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py
new file mode 100644
index 00000000..c480994d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/decl_base.py
@@ -0,0 +1,2188 @@
+# orm/decl_base.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Internal implementation for declarative."""
+
+from __future__ import annotations
+
+import collections
+import dataclasses
+import re
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import NamedTuple
+from typing import NoReturn
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import clsregistry
+from . import exc as orm_exc
+from . import instrumentation
+from . import mapperlib
+from ._typing import _O
+from ._typing import attr_is_internal_proxy
+from .attributes import InstrumentedAttribute
+from .attributes import QueryableAttribute
+from .base import _is_mapped_class
+from .base import InspectionAttr
+from .descriptor_props import CompositeProperty
+from .descriptor_props import SynonymProperty
+from .interfaces import _AttributeOptions
+from .interfaces import _DCAttributeOptions
+from .interfaces import _IntrospectsAnnotations
+from .interfaces import _MappedAttribute
+from .interfaces import _MapsColumns
+from .interfaces import MapperProperty
+from .mapper import Mapper
+from .properties import ColumnProperty
+from .properties import MappedColumn
+from .util import _extract_mapped_subtype
+from .util import _is_mapped_annotation
+from .util import class_mapper
+from .util import de_stringify_annotation
+from .. import event
+from .. import exc
+from .. import util
+from ..sql import expression
+from ..sql.base import _NoArg
+from ..sql.schema import Column
+from ..sql.schema import Table
+from ..util import topological
+from ..util.typing import _AnnotationScanType
+from ..util.typing import get_args
+from ..util.typing import is_fwd_ref
+from ..util.typing import is_literal
+from ..util.typing import Protocol
+from ..util.typing import TypedDict
+
+if TYPE_CHECKING:
+    from ._typing import _ClassDict
+    from ._typing import _RegistryType
+    from .base import Mapped
+    from .decl_api import declared_attr
+    from .instrumentation import ClassManager
+    from ..sql.elements import NamedColumn
+    from ..sql.schema import MetaData
+    from ..sql.selectable import FromClause
+
+_T = TypeVar("_T", bound=Any)
+
+_MapperKwArgs = Mapping[str, Any]
+_TableArgsType = Union[Tuple[Any, ...], Dict[str, Any]]
+
+
+class MappedClassProtocol(Protocol[_O]):
+    """A protocol representing a SQLAlchemy mapped class.
+
+    The protocol is generic on the type of class, use
+    ``MappedClassProtocol[Any]`` to allow any mapped class.
+    """
+
+    __name__: str
+    __mapper__: Mapper[_O]
+    __table__: FromClause
+
+    def __call__(self, **kw: Any) -> _O: ...
+
+
+class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol):
+    "Internal more detailed version of ``MappedClassProtocol``."
+    metadata: MetaData
+    __tablename__: str
+    __mapper_args__: _MapperKwArgs
+    __table_args__: Optional[_TableArgsType]
+
+    _sa_apply_dc_transforms: Optional[_DataclassArguments]
+
+    def __declare_first__(self) -> None: ...
+
+    def __declare_last__(self) -> None: ...
+
+
+class _DataclassArguments(TypedDict):
+    init: Union[_NoArg, bool]
+    repr: Union[_NoArg, bool]
+    eq: Union[_NoArg, bool]
+    order: Union[_NoArg, bool]
+    unsafe_hash: Union[_NoArg, bool]
+    match_args: Union[_NoArg, bool]
+    kw_only: Union[_NoArg, bool]
+    dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]]
+
+
+def _declared_mapping_info(
+    cls: Type[Any],
+) -> Optional[Union[_DeferredMapperConfig, Mapper[Any]]]:
+    # deferred mapping
+    if _DeferredMapperConfig.has_cls(cls):
+        return _DeferredMapperConfig.config_for_cls(cls)
+    # regular mapping
+    elif _is_mapped_class(cls):
+        return class_mapper(cls, configure=False)
+    else:
+        return None
+
+
+def _is_supercls_for_inherits(cls: Type[Any]) -> bool:
+    """return True if this class will be used as a superclass to set in
+    'inherits'.
+
+    This includes deferred mapper configs that aren't mapped yet, however does
+    not include classes with _sa_decl_prepare_nocascade (e.g.
+    ``AbstractConcreteBase``); these concrete-only classes are not set up as
+    "inherits" until after mappers are configured using
+    mapper._set_concrete_base()
+
+    """
+    if _DeferredMapperConfig.has_cls(cls):
+        return not _get_immediate_cls_attr(
+            cls, "_sa_decl_prepare_nocascade", strict=True
+        )
+    # regular mapping
+    elif _is_mapped_class(cls):
+        return True
+    else:
+        return False
+
+
+def _resolve_for_abstract_or_classical(cls: Type[Any]) -> Optional[Type[Any]]:
+    if cls is object:
+        return None
+
+    sup: Optional[Type[Any]]
+
+    if cls.__dict__.get("__abstract__", False):
+        for base_ in cls.__bases__:
+            sup = _resolve_for_abstract_or_classical(base_)
+            if sup is not None:
+                return sup
+        else:
+            return None
+    else:
+        clsmanager = _dive_for_cls_manager(cls)
+
+        if clsmanager:
+            return clsmanager.class_
+        else:
+            return cls
+
+
+def _get_immediate_cls_attr(
+    cls: Type[Any], attrname: str, strict: bool = False
+) -> Optional[Any]:
+    """return an attribute of the class that is either present directly
+    on the class, e.g. not on a superclass, or is from a superclass but
+    this superclass is a non-mapped mixin, that is, not a descendant of
+    the declarative base and is also not classically mapped.
+
+    This is used to detect attributes that indicate something about
+    a mapped class independently from any mapped classes that it may
+    inherit from.
+
+    """
+
+    # the rules are different for this name than others,
+    # make sure we've moved it out.  transitional
+    assert attrname != "__abstract__"
+
+    if not issubclass(cls, object):
+        return None
+
+    if attrname in cls.__dict__:
+        return getattr(cls, attrname)
+
+    for base in cls.__mro__[1:]:
+        _is_classical_inherits = _dive_for_cls_manager(base) is not None
+
+        if attrname in base.__dict__ and (
+            base is cls
+            or (
+                (base in cls.__bases__ if strict else True)
+                and not _is_classical_inherits
+            )
+        ):
+            return getattr(base, attrname)
+    else:
+        return None
+
+
+def _dive_for_cls_manager(cls: Type[_O]) -> Optional[ClassManager[_O]]:
+    # because the class manager registration is pluggable,
+    # we need to do the search for every class in the hierarchy,
+    # rather than just a simple "cls._sa_class_manager"
+
+    for base in cls.__mro__:
+        manager: Optional[ClassManager[_O]] = attributes.opt_manager_of_class(
+            base
+        )
+        if manager:
+            return manager
+    return None
+
+
+def _as_declarative(
+    registry: _RegistryType, cls: Type[Any], dict_: _ClassDict
+) -> Optional[_MapperConfig]:
+    # declarative scans the class for attributes.  no table or mapper
+    # args passed separately.
+    return _MapperConfig.setup_mapping(registry, cls, dict_, None, {})
+
+
+def _mapper(
+    registry: _RegistryType,
+    cls: Type[_O],
+    table: Optional[FromClause],
+    mapper_kw: _MapperKwArgs,
+) -> Mapper[_O]:
+    _ImperativeMapperConfig(registry, cls, table, mapper_kw)
+    return cast("MappedClassProtocol[_O]", cls).__mapper__
+
+
+@util.preload_module("sqlalchemy.orm.decl_api")
+def _is_declarative_props(obj: Any) -> bool:
+    _declared_attr_common = util.preloaded.orm_decl_api._declared_attr_common
+
+    return isinstance(obj, (_declared_attr_common, util.classproperty))
+
+
+def _check_declared_props_nocascade(
+    obj: Any, name: str, cls: Type[_O]
+) -> bool:
+    if _is_declarative_props(obj):
+        if getattr(obj, "_cascading", False):
+            util.warn(
+                "@declared_attr.cascading is not supported on the %s "
+                "attribute on class %s.  This attribute invokes for "
+                "subclasses in any case." % (name, cls)
+            )
+        return True
+    else:
+        return False
+
+
+class _MapperConfig:
+    __slots__ = (
+        "cls",
+        "classname",
+        "properties",
+        "declared_attr_reg",
+        "__weakref__",
+    )
+
+    cls: Type[Any]
+    classname: str
+    properties: util.OrderedDict[
+        str,
+        Union[
+            Sequence[NamedColumn[Any]], NamedColumn[Any], MapperProperty[Any]
+        ],
+    ]
+    declared_attr_reg: Dict[declared_attr[Any], Any]
+
+    @classmethod
+    def setup_mapping(
+        cls,
+        registry: _RegistryType,
+        cls_: Type[_O],
+        dict_: _ClassDict,
+        table: Optional[FromClause],
+        mapper_kw: _MapperKwArgs,
+    ) -> Optional[_MapperConfig]:
+        manager = attributes.opt_manager_of_class(cls)
+        if manager and manager.class_ is cls_:
+            raise exc.InvalidRequestError(
+                f"Class {cls!r} already has been instrumented declaratively"
+            )
+
+        if cls_.__dict__.get("__abstract__", False):
+            return None
+
+        defer_map = _get_immediate_cls_attr(
+            cls_, "_sa_decl_prepare_nocascade", strict=True
+        ) or hasattr(cls_, "_sa_decl_prepare")
+
+        if defer_map:
+            return _DeferredMapperConfig(
+                registry, cls_, dict_, table, mapper_kw
+            )
+        else:
+            return _ClassScanMapperConfig(
+                registry, cls_, dict_, table, mapper_kw
+            )
+
+    def __init__(
+        self,
+        registry: _RegistryType,
+        cls_: Type[Any],
+        mapper_kw: _MapperKwArgs,
+    ):
+        self.cls = util.assert_arg_type(cls_, type, "cls_")
+        self.classname = cls_.__name__
+        self.properties = util.OrderedDict()
+        self.declared_attr_reg = {}
+
+        if not mapper_kw.get("non_primary", False):
+            instrumentation.register_class(
+                self.cls,
+                finalize=False,
+                registry=registry,
+                declarative_scan=self,
+                init_method=registry.constructor,
+            )
+        else:
+            manager = attributes.opt_manager_of_class(self.cls)
+            if not manager or not manager.is_mapped:
+                raise exc.InvalidRequestError(
+                    "Class %s has no primary mapper configured.  Configure "
+                    "a primary mapper first before setting up a non primary "
+                    "Mapper." % self.cls
+                )
+
+    def set_cls_attribute(self, attrname: str, value: _T) -> _T:
+        manager = instrumentation.manager_of_class(self.cls)
+        manager.install_member(attrname, value)
+        return value
+
+    def map(self, mapper_kw: _MapperKwArgs = ...) -> Mapper[Any]:
+        raise NotImplementedError()
+
+    def _early_mapping(self, mapper_kw: _MapperKwArgs) -> None:
+        self.map(mapper_kw)
+
+
+class _ImperativeMapperConfig(_MapperConfig):
+    __slots__ = ("local_table", "inherits")
+
+    def __init__(
+        self,
+        registry: _RegistryType,
+        cls_: Type[_O],
+        table: Optional[FromClause],
+        mapper_kw: _MapperKwArgs,
+    ):
+        super().__init__(registry, cls_, mapper_kw)
+
+        self.local_table = self.set_cls_attribute("__table__", table)
+
+        with mapperlib._CONFIGURE_MUTEX:
+            if not mapper_kw.get("non_primary", False):
+                clsregistry.add_class(
+                    self.classname, self.cls, registry._class_registry
+                )
+
+            self._setup_inheritance(mapper_kw)
+
+            self._early_mapping(mapper_kw)
+
+    def map(self, mapper_kw: _MapperKwArgs = util.EMPTY_DICT) -> Mapper[Any]:
+        mapper_cls = Mapper
+
+        return self.set_cls_attribute(
+            "__mapper__",
+            mapper_cls(self.cls, self.local_table, **mapper_kw),
+        )
+
+    def _setup_inheritance(self, mapper_kw: _MapperKwArgs) -> None:
+        cls = self.cls
+
+        inherits = mapper_kw.get("inherits", None)
+
+        if inherits is None:
+            # since we search for classical mappings now, search for
+            # multiple mapped bases as well and raise an error.
+            inherits_search = []
+            for base_ in cls.__bases__:
+                c = _resolve_for_abstract_or_classical(base_)
+                if c is None:
+                    continue
+
+                if _is_supercls_for_inherits(c) and c not in inherits_search:
+                    inherits_search.append(c)
+
+            if inherits_search:
+                if len(inherits_search) > 1:
+                    raise exc.InvalidRequestError(
+                        "Class %s has multiple mapped bases: %r"
+                        % (cls, inherits_search)
+                    )
+                inherits = inherits_search[0]
+        elif isinstance(inherits, Mapper):
+            inherits = inherits.class_
+
+        self.inherits = inherits
+
+
+class _CollectedAnnotation(NamedTuple):
+    raw_annotation: _AnnotationScanType
+    mapped_container: Optional[Type[Mapped[Any]]]
+    extracted_mapped_annotation: Union[_AnnotationScanType, str]
+    is_dataclass: bool
+    attr_value: Any
+    originating_module: str
+    originating_class: Type[Any]
+
+
+class _ClassScanMapperConfig(_MapperConfig):
+    __slots__ = (
+        "registry",
+        "clsdict_view",
+        "collected_attributes",
+        "collected_annotations",
+        "local_table",
+        "persist_selectable",
+        "declared_columns",
+        "column_ordering",
+        "column_copies",
+        "table_args",
+        "tablename",
+        "mapper_args",
+        "mapper_args_fn",
+        "table_fn",
+        "inherits",
+        "single",
+        "allow_dataclass_fields",
+        "dataclass_setup_arguments",
+        "is_dataclass_prior_to_mapping",
+        "allow_unmapped_annotations",
+    )
+
+    is_deferred = False
+    registry: _RegistryType
+    clsdict_view: _ClassDict
+    collected_annotations: Dict[str, _CollectedAnnotation]
+    collected_attributes: Dict[str, Any]
+    local_table: Optional[FromClause]
+    persist_selectable: Optional[FromClause]
+    declared_columns: util.OrderedSet[Column[Any]]
+    column_ordering: Dict[Column[Any], int]
+    column_copies: Dict[
+        Union[MappedColumn[Any], Column[Any]],
+        Union[MappedColumn[Any], Column[Any]],
+    ]
+    tablename: Optional[str]
+    mapper_args: Mapping[str, Any]
+    table_args: Optional[_TableArgsType]
+    mapper_args_fn: Optional[Callable[[], Dict[str, Any]]]
+    inherits: Optional[Type[Any]]
+    single: bool
+
+    is_dataclass_prior_to_mapping: bool
+    allow_unmapped_annotations: bool
+
+    dataclass_setup_arguments: Optional[_DataclassArguments]
+    """if the class has SQLAlchemy native dataclass parameters, where
+    we will turn the class into a dataclass within the declarative mapping
+    process.
+
+    """
+
+    allow_dataclass_fields: bool
+    """if true, look for dataclass-processed Field objects on the target
+    class as well as superclasses and extract ORM mapping directives from
+    the "metadata" attribute of each Field.
+
+    if False, dataclass fields can still be used, however they won't be
+    mapped.
+
+    """
+
+    def __init__(
+        self,
+        registry: _RegistryType,
+        cls_: Type[_O],
+        dict_: _ClassDict,
+        table: Optional[FromClause],
+        mapper_kw: _MapperKwArgs,
+    ):
+        # grab class dict before the instrumentation manager has been added.
+        # reduces cycles
+        self.clsdict_view = (
+            util.immutabledict(dict_) if dict_ else util.EMPTY_DICT
+        )
+        super().__init__(registry, cls_, mapper_kw)
+        self.registry = registry
+        self.persist_selectable = None
+
+        self.collected_attributes = {}
+        self.collected_annotations = {}
+        self.declared_columns = util.OrderedSet()
+        self.column_ordering = {}
+        self.column_copies = {}
+        self.single = False
+        self.dataclass_setup_arguments = dca = getattr(
+            self.cls, "_sa_apply_dc_transforms", None
+        )
+
+        self.allow_unmapped_annotations = getattr(
+            self.cls, "__allow_unmapped__", False
+        ) or bool(self.dataclass_setup_arguments)
+
+        self.is_dataclass_prior_to_mapping = cld = dataclasses.is_dataclass(
+            cls_
+        )
+
+        sdk = _get_immediate_cls_attr(cls_, "__sa_dataclass_metadata_key__")
+
+        # we don't want to consume Field objects from a not-already-dataclass.
+        # the Field objects won't have their "name" or "type" populated,
+        # and while it seems like we could just set these on Field as we
+        # read them, Field is documented as "user read only" and we need to
+        # stay far away from any off-label use of dataclasses APIs.
+        if (not cld or dca) and sdk:
+            raise exc.InvalidRequestError(
+                "SQLAlchemy mapped dataclasses can't consume mapping "
+                "information from dataclass.Field() objects if the immediate "
+                "class is not already a dataclass."
+            )
+
+        # if already a dataclass, and __sa_dataclass_metadata_key__ present,
+        # then also look inside of dataclass.Field() objects yielded by
+        # dataclasses.get_fields(cls) when scanning for attributes
+        self.allow_dataclass_fields = bool(sdk and cld)
+
+        self._setup_declared_events()
+
+        self._scan_attributes()
+
+        self._setup_dataclasses_transforms()
+
+        with mapperlib._CONFIGURE_MUTEX:
+            clsregistry.add_class(
+                self.classname, self.cls, registry._class_registry
+            )
+
+            self._setup_inheriting_mapper(mapper_kw)
+
+            self._extract_mappable_attributes()
+
+            self._extract_declared_columns()
+
+            self._setup_table(table)
+
+            self._setup_inheriting_columns(mapper_kw)
+
+            self._early_mapping(mapper_kw)
+
+    def _setup_declared_events(self) -> None:
+        if _get_immediate_cls_attr(self.cls, "__declare_last__"):
+
+            @event.listens_for(Mapper, "after_configured")
+            def after_configured() -> None:
+                cast(
+                    "_DeclMappedClassProtocol[Any]", self.cls
+                ).__declare_last__()
+
+        if _get_immediate_cls_attr(self.cls, "__declare_first__"):
+
+            @event.listens_for(Mapper, "before_configured")
+            def before_configured() -> None:
+                cast(
+                    "_DeclMappedClassProtocol[Any]", self.cls
+                ).__declare_first__()
+
+    def _cls_attr_override_checker(
+        self, cls: Type[_O]
+    ) -> Callable[[str, Any], bool]:
+        """Produce a function that checks if a class has overridden an
+        attribute, taking SQLAlchemy-enabled dataclass fields into account.
+
+        """
+
+        if self.allow_dataclass_fields:
+            sa_dataclass_metadata_key = _get_immediate_cls_attr(
+                cls, "__sa_dataclass_metadata_key__"
+            )
+        else:
+            sa_dataclass_metadata_key = None
+
+        if not sa_dataclass_metadata_key:
+
+            def attribute_is_overridden(key: str, obj: Any) -> bool:
+                return getattr(cls, key, obj) is not obj
+
+        else:
+            all_datacls_fields = {
+                f.name: f.metadata[sa_dataclass_metadata_key]
+                for f in util.dataclass_fields(cls)
+                if sa_dataclass_metadata_key in f.metadata
+            }
+            local_datacls_fields = {
+                f.name: f.metadata[sa_dataclass_metadata_key]
+                for f in util.local_dataclass_fields(cls)
+                if sa_dataclass_metadata_key in f.metadata
+            }
+
+            absent = object()
+
+            def attribute_is_overridden(key: str, obj: Any) -> bool:
+                if _is_declarative_props(obj):
+                    obj = obj.fget
+
+                # this function likely has some failure modes still if
+                # someone is doing a deep mixing of the same attribute
+                # name as plain Python attribute vs. dataclass field.
+
+                ret = local_datacls_fields.get(key, absent)
+                if _is_declarative_props(ret):
+                    ret = ret.fget
+
+                if ret is obj:
+                    return False
+                elif ret is not absent:
+                    return True
+
+                all_field = all_datacls_fields.get(key, absent)
+
+                ret = getattr(cls, key, obj)
+
+                if ret is obj:
+                    return False
+
+                # for dataclasses, this could be the
+                # 'default' of the field.  so filter more specifically
+                # for an already-mapped InstrumentedAttribute
+                if ret is not absent and isinstance(
+                    ret, InstrumentedAttribute
+                ):
+                    return True
+
+                if all_field is obj:
+                    return False
+                elif all_field is not absent:
+                    return True
+
+                # can't find another attribute
+                return False
+
+        return attribute_is_overridden
+
+    _include_dunders = {
+        "__table__",
+        "__mapper_args__",
+        "__tablename__",
+        "__table_args__",
+    }
+
+    _match_exclude_dunders = re.compile(r"^(?:_sa_|__)")
+
+    def _cls_attr_resolver(
+        self, cls: Type[Any]
+    ) -> Callable[[], Iterable[Tuple[str, Any, Any, bool]]]:
+        """produce a function to iterate the "attributes" of a class
+        which we want to consider for mapping, adjusting for SQLAlchemy fields
+        embedded in dataclass fields.
+
+        """
+        cls_annotations = util.get_annotations(cls)
+
+        cls_vars = vars(cls)
+
+        _include_dunders = self._include_dunders
+        _match_exclude_dunders = self._match_exclude_dunders
+
+        names = [
+            n
+            for n in util.merge_lists_w_ordering(
+                list(cls_vars), list(cls_annotations)
+            )
+            if not _match_exclude_dunders.match(n) or n in _include_dunders
+        ]
+
+        if self.allow_dataclass_fields:
+            sa_dataclass_metadata_key: Optional[str] = _get_immediate_cls_attr(
+                cls, "__sa_dataclass_metadata_key__"
+            )
+        else:
+            sa_dataclass_metadata_key = None
+
+        if not sa_dataclass_metadata_key:
+
+            def local_attributes_for_class() -> (
+                Iterable[Tuple[str, Any, Any, bool]]
+            ):
+                return (
+                    (
+                        name,
+                        cls_vars.get(name),
+                        cls_annotations.get(name),
+                        False,
+                    )
+                    for name in names
+                )
+
+        else:
+            dataclass_fields = {
+                field.name: field for field in util.local_dataclass_fields(cls)
+            }
+
+            fixed_sa_dataclass_metadata_key = sa_dataclass_metadata_key
+
+            def local_attributes_for_class() -> (
+                Iterable[Tuple[str, Any, Any, bool]]
+            ):
+                for name in names:
+                    field = dataclass_fields.get(name, None)
+                    if field and sa_dataclass_metadata_key in field.metadata:
+                        yield field.name, _as_dc_declaredattr(
+                            field.metadata, fixed_sa_dataclass_metadata_key
+                        ), cls_annotations.get(field.name), True
+                    else:
+                        yield name, cls_vars.get(name), cls_annotations.get(
+                            name
+                        ), False
+
+        return local_attributes_for_class
+
+    def _scan_attributes(self) -> None:
+        cls = self.cls
+
+        cls_as_Decl = cast("_DeclMappedClassProtocol[Any]", cls)
+
+        clsdict_view = self.clsdict_view
+        collected_attributes = self.collected_attributes
+        column_copies = self.column_copies
+        _include_dunders = self._include_dunders
+        mapper_args_fn = None
+        table_args = inherited_table_args = None
+        table_fn = None
+        tablename = None
+        fixed_table = "__table__" in clsdict_view
+
+        attribute_is_overridden = self._cls_attr_override_checker(self.cls)
+
+        bases = []
+
+        for base in cls.__mro__:
+            # collect bases and make sure standalone columns are copied
+            # to be the column they will ultimately be on the class,
+            # so that declared_attr functions use the right columns.
+            # need to do this all the way up the hierarchy first
+            # (see #8190)
+
+            class_mapped = base is not cls and _is_supercls_for_inherits(base)
+
+            local_attributes_for_class = self._cls_attr_resolver(base)
+
+            if not class_mapped and base is not cls:
+                locally_collected_columns = self._produce_column_copies(
+                    local_attributes_for_class,
+                    attribute_is_overridden,
+                    fixed_table,
+                    base,
+                )
+            else:
+                locally_collected_columns = {}
+
+            bases.append(
+                (
+                    base,
+                    class_mapped,
+                    local_attributes_for_class,
+                    locally_collected_columns,
+                )
+            )
+
+        for (
+            base,
+            class_mapped,
+            local_attributes_for_class,
+            locally_collected_columns,
+        ) in bases:
+            # this transfer can also take place as we scan each name
+            # for finer-grained control of how collected_attributes is
+            # populated, as this is what impacts column ordering.
+            # however it's simpler to get it out of the way here.
+            collected_attributes.update(locally_collected_columns)
+
+            for (
+                name,
+                obj,
+                annotation,
+                is_dataclass_field,
+            ) in local_attributes_for_class():
+                if name in _include_dunders:
+                    if name == "__mapper_args__":
+                        check_decl = _check_declared_props_nocascade(
+                            obj, name, cls
+                        )
+                        if not mapper_args_fn and (
+                            not class_mapped or check_decl
+                        ):
+                            # don't even invoke __mapper_args__ until
+                            # after we've determined everything about the
+                            # mapped table.
+                            # make a copy of it so a class-level dictionary
+                            # is not overwritten when we update column-based
+                            # arguments.
+                            def _mapper_args_fn() -> Dict[str, Any]:
+                                return dict(cls_as_Decl.__mapper_args__)
+
+                            mapper_args_fn = _mapper_args_fn
+
+                    elif name == "__tablename__":
+                        check_decl = _check_declared_props_nocascade(
+                            obj, name, cls
+                        )
+                        if not tablename and (not class_mapped or check_decl):
+                            tablename = cls_as_Decl.__tablename__
+                    elif name == "__table__":
+                        check_decl = _check_declared_props_nocascade(
+                            obj, name, cls
+                        )
+                        # if a @declared_attr using "__table__" is detected,
+                        # wrap up a callable to look for "__table__" from
+                        # the final concrete class when we set up a table.
+                        # this was fixed by
+                        # #11509, regression in 2.0 from version 1.4.
+                        if check_decl and not table_fn:
+                            # don't even invoke __table__ until we're ready
+                            def _table_fn() -> FromClause:
+                                return cls_as_Decl.__table__
+
+                            table_fn = _table_fn
+
+                    elif name == "__table_args__":
+                        check_decl = _check_declared_props_nocascade(
+                            obj, name, cls
+                        )
+                        if not table_args and (not class_mapped or check_decl):
+                            table_args = cls_as_Decl.__table_args__
+                            if not isinstance(
+                                table_args, (tuple, dict, type(None))
+                            ):
+                                raise exc.ArgumentError(
+                                    "__table_args__ value must be a tuple, "
+                                    "dict, or None"
+                                )
+                            if base is not cls:
+                                inherited_table_args = True
+                    else:
+                        # any other dunder names; should not be here
+                        # as we have tested for all four names in
+                        # _include_dunders
+                        assert False
+                elif class_mapped:
+                    if _is_declarative_props(obj) and not obj._quiet:
+                        util.warn(
+                            "Regular (i.e. not __special__) "
+                            "attribute '%s.%s' uses @declared_attr, "
+                            "but owning class %s is mapped - "
+                            "not applying to subclass %s."
+                            % (base.__name__, name, base, cls)
+                        )
+
+                    continue
+                elif base is not cls:
+                    # we're a mixin, abstract base, or something that is
+                    # acting like that for now.
+
+                    if isinstance(obj, (Column, MappedColumn)):
+                        # already copied columns to the mapped class.
+                        continue
+                    elif isinstance(obj, MapperProperty):
+                        raise exc.InvalidRequestError(
+                            "Mapper properties (i.e. deferred,"
+                            "column_property(), relationship(), etc.) must "
+                            "be declared as @declared_attr callables "
+                            "on declarative mixin classes.  For dataclass "
+                            "field() objects, use a lambda:"
+                        )
+                    elif _is_declarative_props(obj):
+                        # tried to get overloads to tell this to
+                        # pylance, no luck
+                        assert obj is not None
+
+                        if obj._cascading:
+                            if name in clsdict_view:
+                                # unfortunately, while we can use the user-
+                                # defined attribute here to allow a clean
+                                # override, if there's another
+                                # subclass below then it still tries to use
+                                # this.  not sure if there is enough
+                                # information here to add this as a feature
+                                # later on.
+                                util.warn(
+                                    "Attribute '%s' on class %s cannot be "
+                                    "processed due to "
+                                    "@declared_attr.cascading; "
+                                    "skipping" % (name, cls)
+                                )
+                            collected_attributes[name] = column_copies[obj] = (
+                                ret
+                            ) = obj.__get__(obj, cls)
+                            setattr(cls, name, ret)
+                        else:
+                            if is_dataclass_field:
+                                # access attribute using normal class access
+                                # first, to see if it's been mapped on a
+                                # superclass.   note if the dataclasses.field()
+                                # has "default", this value can be anything.
+                                ret = getattr(cls, name, None)
+
+                                # so, if it's anything that's not ORM
+                                # mapped, assume we should invoke the
+                                # declared_attr
+                                if not isinstance(ret, InspectionAttr):
+                                    ret = obj.fget()
+                            else:
+                                # access attribute using normal class access.
+                                # if the declared attr already took place
+                                # on a superclass that is mapped, then
+                                # this is no longer a declared_attr, it will
+                                # be the InstrumentedAttribute
+                                ret = getattr(cls, name)
+
+                            # correct for proxies created from hybrid_property
+                            # or similar.  note there is no known case that
+                            # produces nested proxies, so we are only
+                            # looking one level deep right now.
+
+                            if (
+                                isinstance(ret, InspectionAttr)
+                                and attr_is_internal_proxy(ret)
+                                and not isinstance(
+                                    ret.original_property, MapperProperty
+                                )
+                            ):
+                                ret = ret.descriptor
+
+                            collected_attributes[name] = column_copies[obj] = (
+                                ret
+                            )
+
+                        if (
+                            isinstance(ret, (Column, MapperProperty))
+                            and ret.doc is None
+                        ):
+                            ret.doc = obj.__doc__
+
+                        self._collect_annotation(
+                            name,
+                            obj._collect_return_annotation(),
+                            base,
+                            True,
+                            obj,
+                        )
+                    elif _is_mapped_annotation(annotation, cls, base):
+                        # Mapped annotation without any object.
+                        # product_column_copies should have handled this.
+                        # if future support for other MapperProperty,
+                        # then test if this name is already handled and
+                        # otherwise proceed to generate.
+                        if not fixed_table:
+                            assert (
+                                name in collected_attributes
+                                or attribute_is_overridden(name, None)
+                            )
+                        continue
+                    else:
+                        # here, the attribute is some other kind of
+                        # property that we assume is not part of the
+                        # declarative mapping.  however, check for some
+                        # more common mistakes
+                        self._warn_for_decl_attributes(base, name, obj)
+                elif is_dataclass_field and (
+                    name not in clsdict_view or clsdict_view[name] is not obj
+                ):
+                    # here, we are definitely looking at the target class
+                    # and not a superclass.   this is currently a
+                    # dataclass-only path.  if the name is only
+                    # a dataclass field and isn't in local cls.__dict__,
+                    # put the object there.
+                    # assert that the dataclass-enabled resolver agrees
+                    # with what we are seeing
+
+                    assert not attribute_is_overridden(name, obj)
+
+                    if _is_declarative_props(obj):
+                        obj = obj.fget()
+
+                    collected_attributes[name] = obj
+                    self._collect_annotation(
+                        name, annotation, base, False, obj
+                    )
+                else:
+                    collected_annotation = self._collect_annotation(
+                        name, annotation, base, None, obj
+                    )
+                    is_mapped = (
+                        collected_annotation is not None
+                        and collected_annotation.mapped_container is not None
+                    )
+                    generated_obj = (
+                        collected_annotation.attr_value
+                        if collected_annotation is not None
+                        else obj
+                    )
+                    if obj is None and not fixed_table and is_mapped:
+                        collected_attributes[name] = (
+                            generated_obj
+                            if generated_obj is not None
+                            else MappedColumn()
+                        )
+                    elif name in clsdict_view:
+                        collected_attributes[name] = obj
+                    # else if the name is not in the cls.__dict__,
+                    # don't collect it as an attribute.
+                    # we will see the annotation only, which is meaningful
+                    # both for mapping and dataclasses setup
+
+        if inherited_table_args and not tablename:
+            table_args = None
+
+        self.table_args = table_args
+        self.tablename = tablename
+        self.mapper_args_fn = mapper_args_fn
+        self.table_fn = table_fn
+
+    def _setup_dataclasses_transforms(self) -> None:
+        dataclass_setup_arguments = self.dataclass_setup_arguments
+        if not dataclass_setup_arguments:
+            return
+
+        # can't use is_dataclass since it uses hasattr
+        if "__dataclass_fields__" in self.cls.__dict__:
+            raise exc.InvalidRequestError(
+                f"Class {self.cls} is already a dataclass; ensure that "
+                "base classes / decorator styles of establishing dataclasses "
+                "are not being mixed. "
+                "This can happen if a class that inherits from "
+                "'MappedAsDataclass', even indirectly, is been mapped with "
+                "'@registry.mapped_as_dataclass'"
+            )
+
+        # can't create a dataclass if __table__ is already there. This would
+        # fail an assertion when calling _get_arguments_for_make_dataclass:
+        # assert False, "Mapped[] received without a mapping declaration"
+        if "__table__" in self.cls.__dict__:
+            raise exc.InvalidRequestError(
+                f"Class {self.cls} already defines a '__table__'. "
+                "ORM Annotated Dataclasses do not support a pre-existing "
+                "'__table__' element"
+            )
+
+        warn_for_non_dc_attrs = collections.defaultdict(list)
+
+        def _allow_dataclass_field(
+            key: str, originating_class: Type[Any]
+        ) -> bool:
+            if (
+                originating_class is not self.cls
+                and "__dataclass_fields__" not in originating_class.__dict__
+            ):
+                warn_for_non_dc_attrs[originating_class].append(key)
+
+            return True
+
+        manager = instrumentation.manager_of_class(self.cls)
+        assert manager is not None
+
+        field_list = [
+            _AttributeOptions._get_arguments_for_make_dataclass(
+                key,
+                anno,
+                mapped_container,
+                self.collected_attributes.get(key, _NoArg.NO_ARG),
+            )
+            for key, anno, mapped_container in (
+                (
+                    key,
+                    mapped_anno if mapped_anno else raw_anno,
+                    mapped_container,
+                )
+                for key, (
+                    raw_anno,
+                    mapped_container,
+                    mapped_anno,
+                    is_dc,
+                    attr_value,
+                    originating_module,
+                    originating_class,
+                ) in self.collected_annotations.items()
+                if _allow_dataclass_field(key, originating_class)
+                and (
+                    key not in self.collected_attributes
+                    # issue #9226; check for attributes that we've collected
+                    # which are already instrumented, which we would assume
+                    # mean we are in an ORM inheritance mapping and this
+                    # attribute is already mapped on the superclass.   Under
+                    # no circumstance should any QueryableAttribute be sent to
+                    # the dataclass() function; anything that's mapped should
+                    # be Field and that's it
+                    or not isinstance(
+                        self.collected_attributes[key], QueryableAttribute
+                    )
+                )
+            )
+        ]
+
+        if warn_for_non_dc_attrs:
+            for (
+                originating_class,
+                non_dc_attrs,
+            ) in warn_for_non_dc_attrs.items():
+                util.warn_deprecated(
+                    f"When transforming {self.cls} to a dataclass, "
+                    f"attribute(s) "
+                    f"{', '.join(repr(key) for key in non_dc_attrs)} "
+                    f"originates from superclass "
+                    f"{originating_class}, which is not a dataclass.  This "
+                    f"usage is deprecated and will raise an error in "
+                    f"SQLAlchemy 2.1.  When declaring SQLAlchemy Declarative "
+                    f"Dataclasses, ensure that all mixin classes and other "
+                    f"superclasses which include attributes are also a "
+                    f"subclass of MappedAsDataclass.",
+                    "2.0",
+                    code="dcmx",
+                )
+
+        annotations = {}
+        defaults = {}
+        for item in field_list:
+            if len(item) == 2:
+                name, tp = item
+            elif len(item) == 3:
+                name, tp, spec = item
+                defaults[name] = spec
+            else:
+                assert False
+            annotations[name] = tp
+
+        for k, v in defaults.items():
+            setattr(self.cls, k, v)
+
+        self._apply_dataclasses_to_any_class(
+            dataclass_setup_arguments, self.cls, annotations
+        )
+
+    @classmethod
+    def _update_annotations_for_non_mapped_class(
+        cls, klass: Type[_O]
+    ) -> Mapping[str, _AnnotationScanType]:
+        cls_annotations = util.get_annotations(klass)
+
+        new_anno = {}
+        for name, annotation in cls_annotations.items():
+            if _is_mapped_annotation(annotation, klass, klass):
+                extracted = _extract_mapped_subtype(
+                    annotation,
+                    klass,
+                    klass.__module__,
+                    name,
+                    type(None),
+                    required=False,
+                    is_dataclass_field=False,
+                    expect_mapped=False,
+                )
+                if extracted:
+                    inner, _ = extracted
+                    new_anno[name] = inner
+            else:
+                new_anno[name] = annotation
+        return new_anno
+
+    @classmethod
+    def _apply_dataclasses_to_any_class(
+        cls,
+        dataclass_setup_arguments: _DataclassArguments,
+        klass: Type[_O],
+        use_annotations: Mapping[str, _AnnotationScanType],
+    ) -> None:
+        cls._assert_dc_arguments(dataclass_setup_arguments)
+
+        dataclass_callable = dataclass_setup_arguments["dataclass_callable"]
+        if dataclass_callable is _NoArg.NO_ARG:
+            dataclass_callable = dataclasses.dataclass
+
+        restored: Optional[Any]
+
+        if use_annotations:
+            # apply constructed annotations that should look "normal" to a
+            # dataclasses callable, based on the fields present.  This
+            # means remove the Mapped[] container and ensure all Field
+            # entries have an annotation
+            restored = getattr(klass, "__annotations__", None)
+            klass.__annotations__ = cast("Dict[str, Any]", use_annotations)
+        else:
+            restored = None
+
+        try:
+            dataclass_callable(
+                klass,
+                **{
+                    k: v
+                    for k, v in dataclass_setup_arguments.items()
+                    if v is not _NoArg.NO_ARG and k != "dataclass_callable"
+                },
+            )
+        except (TypeError, ValueError) as ex:
+            raise exc.InvalidRequestError(
+                f"Python dataclasses error encountered when creating "
+                f"dataclass for {klass.__name__!r}: "
+                f"{ex!r}. Please refer to Python dataclasses "
+                "documentation for additional information.",
+                code="dcte",
+            ) from ex
+        finally:
+            # restore original annotations outside of the dataclasses
+            # process; for mixins and __abstract__ superclasses, SQLAlchemy
+            # Declarative will need to see the Mapped[] container inside the
+            # annotations in order to map subclasses
+            if use_annotations:
+                if restored is None:
+                    del klass.__annotations__
+                else:
+                    klass.__annotations__ = restored
+
+    @classmethod
+    def _assert_dc_arguments(cls, arguments: _DataclassArguments) -> None:
+        allowed = {
+            "init",
+            "repr",
+            "order",
+            "eq",
+            "unsafe_hash",
+            "kw_only",
+            "match_args",
+            "dataclass_callable",
+        }
+        disallowed_args = set(arguments).difference(allowed)
+        if disallowed_args:
+            msg = ", ".join(f"{arg!r}" for arg in sorted(disallowed_args))
+            raise exc.ArgumentError(
+                f"Dataclass argument(s) {msg} are not accepted"
+            )
+
+    def _collect_annotation(
+        self,
+        name: str,
+        raw_annotation: _AnnotationScanType,
+        originating_class: Type[Any],
+        expect_mapped: Optional[bool],
+        attr_value: Any,
+    ) -> Optional[_CollectedAnnotation]:
+        if name in self.collected_annotations:
+            return self.collected_annotations[name]
+
+        if raw_annotation is None:
+            return None
+
+        is_dataclass = self.is_dataclass_prior_to_mapping
+        allow_unmapped = self.allow_unmapped_annotations
+
+        if expect_mapped is None:
+            is_dataclass_field = isinstance(attr_value, dataclasses.Field)
+            expect_mapped = (
+                not is_dataclass_field
+                and not allow_unmapped
+                and (
+                    attr_value is None
+                    or isinstance(attr_value, _MappedAttribute)
+                )
+            )
+        else:
+            is_dataclass_field = False
+
+        is_dataclass_field = False
+        extracted = _extract_mapped_subtype(
+            raw_annotation,
+            self.cls,
+            originating_class.__module__,
+            name,
+            type(attr_value),
+            required=False,
+            is_dataclass_field=is_dataclass_field,
+            expect_mapped=expect_mapped and not is_dataclass,
+        )
+        if extracted is None:
+            # ClassVar can come out here
+            return None
+
+        extracted_mapped_annotation, mapped_container = extracted
+
+        if attr_value is None and not is_literal(extracted_mapped_annotation):
+            for elem in get_args(extracted_mapped_annotation):
+                if is_fwd_ref(
+                    elem, check_generic=True, check_for_plain_string=True
+                ):
+                    elem = de_stringify_annotation(
+                        self.cls,
+                        elem,
+                        originating_class.__module__,
+                        include_generic=True,
+                    )
+                # look in Annotated[...] for an ORM construct,
+                # such as Annotated[int, mapped_column(primary_key=True)]
+                if isinstance(elem, _IntrospectsAnnotations):
+                    attr_value = elem.found_in_pep593_annotated()
+
+        self.collected_annotations[name] = ca = _CollectedAnnotation(
+            raw_annotation,
+            mapped_container,
+            extracted_mapped_annotation,
+            is_dataclass,
+            attr_value,
+            originating_class.__module__,
+            originating_class,
+        )
+        return ca
+
+    def _warn_for_decl_attributes(
+        self, cls: Type[Any], key: str, c: Any
+    ) -> None:
+        if isinstance(c, expression.ColumnElement):
+            util.warn(
+                f"Attribute '{key}' on class {cls} appears to "
+                "be a non-schema SQLAlchemy expression "
+                "object; this won't be part of the declarative mapping. "
+                "To map arbitrary expressions, use ``column_property()`` "
+                "or a similar function such as ``deferred()``, "
+                "``query_expression()`` etc. "
+            )
+
+    def _produce_column_copies(
+        self,
+        attributes_for_class: Callable[
+            [], Iterable[Tuple[str, Any, Any, bool]]
+        ],
+        attribute_is_overridden: Callable[[str, Any], bool],
+        fixed_table: bool,
+        originating_class: Type[Any],
+    ) -> Dict[str, Union[Column[Any], MappedColumn[Any]]]:
+        cls = self.cls
+        dict_ = self.clsdict_view
+        locally_collected_attributes = {}
+        column_copies = self.column_copies
+        # copy mixin columns to the mapped class
+
+        for name, obj, annotation, is_dataclass in attributes_for_class():
+            if (
+                not fixed_table
+                and obj is None
+                and _is_mapped_annotation(annotation, cls, originating_class)
+            ):
+                # obj is None means this is the annotation only path
+
+                if attribute_is_overridden(name, obj):
+                    # perform same "overridden" check as we do for
+                    # Column/MappedColumn, this is how a mixin col is not
+                    # applied to an inherited subclass that does not have
+                    # the mixin.   the anno-only path added here for
+                    # #9564
+                    continue
+
+                collected_annotation = self._collect_annotation(
+                    name, annotation, originating_class, True, obj
+                )
+                obj = (
+                    collected_annotation.attr_value
+                    if collected_annotation is not None
+                    else obj
+                )
+                if obj is None:
+                    obj = MappedColumn()
+
+                locally_collected_attributes[name] = obj
+                setattr(cls, name, obj)
+
+            elif isinstance(obj, (Column, MappedColumn)):
+                if attribute_is_overridden(name, obj):
+                    # if column has been overridden
+                    # (like by the InstrumentedAttribute of the
+                    # superclass), skip.  don't collect the annotation
+                    # either (issue #8718)
+                    continue
+
+                collected_annotation = self._collect_annotation(
+                    name, annotation, originating_class, True, obj
+                )
+                obj = (
+                    collected_annotation.attr_value
+                    if collected_annotation is not None
+                    else obj
+                )
+
+                if name not in dict_ and not (
+                    "__table__" in dict_
+                    and (getattr(obj, "name", None) or name)
+                    in dict_["__table__"].c
+                ):
+                    if obj.foreign_keys:
+                        for fk in obj.foreign_keys:
+                            if (
+                                fk._table_column is not None
+                                and fk._table_column.table is None
+                            ):
+                                raise exc.InvalidRequestError(
+                                    "Columns with foreign keys to "
+                                    "non-table-bound "
+                                    "columns must be declared as "
+                                    "@declared_attr callables "
+                                    "on declarative mixin classes.  "
+                                    "For dataclass "
+                                    "field() objects, use a lambda:."
+                                )
+
+                    column_copies[obj] = copy_ = obj._copy()
+
+                    locally_collected_attributes[name] = copy_
+                    setattr(cls, name, copy_)
+
+        return locally_collected_attributes
+
+    def _extract_mappable_attributes(self) -> None:
+        cls = self.cls
+        collected_attributes = self.collected_attributes
+
+        our_stuff = self.properties
+
+        _include_dunders = self._include_dunders
+
+        late_mapped = _get_immediate_cls_attr(
+            cls, "_sa_decl_prepare_nocascade", strict=True
+        )
+
+        allow_unmapped_annotations = self.allow_unmapped_annotations
+        expect_annotations_wo_mapped = (
+            allow_unmapped_annotations or self.is_dataclass_prior_to_mapping
+        )
+
+        look_for_dataclass_things = bool(self.dataclass_setup_arguments)
+
+        for k in list(collected_attributes):
+            if k in _include_dunders:
+                continue
+
+            value = collected_attributes[k]
+
+            if _is_declarative_props(value):
+                # @declared_attr in collected_attributes only occurs here for a
+                # @declared_attr that's directly on the mapped class;
+                # for a mixin, these have already been evaluated
+                if value._cascading:
+                    util.warn(
+                        "Use of @declared_attr.cascading only applies to "
+                        "Declarative 'mixin' and 'abstract' classes.  "
+                        "Currently, this flag is ignored on mapped class "
+                        "%s" % self.cls
+                    )
+
+                value = getattr(cls, k)
+
+            elif (
+                isinstance(value, QueryableAttribute)
+                and value.class_ is not cls
+                and value.key != k
+            ):
+                # detect a QueryableAttribute that's already mapped being
+                # assigned elsewhere in userland, turn into a synonym()
+                value = SynonymProperty(value.key)
+                setattr(cls, k, value)
+
+            if (
+                isinstance(value, tuple)
+                and len(value) == 1
+                and isinstance(value[0], (Column, _MappedAttribute))
+            ):
+                util.warn(
+                    "Ignoring declarative-like tuple value of attribute "
+                    "'%s': possibly a copy-and-paste error with a comma "
+                    "accidentally placed at the end of the line?" % k
+                )
+                continue
+            elif look_for_dataclass_things and isinstance(
+                value, dataclasses.Field
+            ):
+                # we collected a dataclass Field; dataclasses would have
+                # set up the correct state on the class
+                continue
+            elif not isinstance(value, (Column, _DCAttributeOptions)):
+                # using @declared_attr for some object that
+                # isn't Column/MapperProperty/_DCAttributeOptions; remove
+                # from the clsdict_view
+                # and place the evaluated value onto the class.
+                collected_attributes.pop(k)
+                self._warn_for_decl_attributes(cls, k, value)
+                if not late_mapped:
+                    setattr(cls, k, value)
+                continue
+            # we expect to see the name 'metadata' in some valid cases;
+            # however at this point we see it's assigned to something trying
+            # to be mapped, so raise for that.
+            # TODO: should "registry" here be also?   might be too late
+            # to change that now (2.0 betas)
+            elif k in ("metadata",):
+                raise exc.InvalidRequestError(
+                    f"Attribute name '{k}' is reserved when using the "
+                    "Declarative API."
+                )
+            elif isinstance(value, Column):
+                _undefer_column_name(
+                    k, self.column_copies.get(value, value)  # type: ignore
+                )
+            else:
+                if isinstance(value, _IntrospectsAnnotations):
+                    (
+                        annotation,
+                        mapped_container,
+                        extracted_mapped_annotation,
+                        is_dataclass,
+                        attr_value,
+                        originating_module,
+                        originating_class,
+                    ) = self.collected_annotations.get(
+                        k, (None, None, None, False, None, None, None)
+                    )
+
+                    # issue #8692 - don't do any annotation interpretation if
+                    # an annotation were present and a container such as
+                    # Mapped[] etc. were not used.  If annotation is None,
+                    # do declarative_scan so that the property can raise
+                    # for required
+                    if (
+                        mapped_container is not None
+                        or annotation is None
+                        # issue #10516: need to do declarative_scan even with
+                        # a non-Mapped annotation if we are doing
+                        # __allow_unmapped__, for things like col.name
+                        # assignment
+                        or allow_unmapped_annotations
+                    ):
+                        try:
+                            value.declarative_scan(
+                                self,
+                                self.registry,
+                                cls,
+                                originating_module,
+                                k,
+                                mapped_container,
+                                annotation,
+                                extracted_mapped_annotation,
+                                is_dataclass,
+                            )
+                        except NameError as ne:
+                            raise exc.ArgumentError(
+                                f"Could not resolve all types within mapped "
+                                f'annotation: "{annotation}".  Ensure all '
+                                f"types are written correctly and are "
+                                f"imported within the module in use."
+                            ) from ne
+                    else:
+                        # assert that we were expecting annotations
+                        # without Mapped[] were going to be passed.
+                        # otherwise an error should have been raised
+                        # by util._extract_mapped_subtype before we got here.
+                        assert expect_annotations_wo_mapped
+
+                if isinstance(value, _DCAttributeOptions):
+                    if (
+                        value._has_dataclass_arguments
+                        and not look_for_dataclass_things
+                    ):
+                        if isinstance(value, MapperProperty):
+                            argnames = [
+                                "init",
+                                "default_factory",
+                                "repr",
+                                "default",
+                            ]
+                        else:
+                            argnames = ["init", "default_factory", "repr"]
+
+                        args = {
+                            a
+                            for a in argnames
+                            if getattr(
+                                value._attribute_options, f"dataclasses_{a}"
+                            )
+                            is not _NoArg.NO_ARG
+                        }
+
+                        raise exc.ArgumentError(
+                            f"Attribute '{k}' on class {cls} includes "
+                            f"dataclasses argument(s): "
+                            f"{', '.join(sorted(repr(a) for a in args))} but "
+                            f"class does not specify "
+                            "SQLAlchemy native dataclass configuration."
+                        )
+
+                    if not isinstance(value, (MapperProperty, _MapsColumns)):
+                        # filter for _DCAttributeOptions objects that aren't
+                        # MapperProperty / mapped_column().  Currently this
+                        # includes AssociationProxy.   pop it from the things
+                        # we're going to map and set it up as a descriptor
+                        # on the class.
+                        collected_attributes.pop(k)
+
+                        # Assoc Prox (or other descriptor object that may
+                        # use _DCAttributeOptions) is usually here, except if
+                        # 1. we're a
+                        # dataclass, dataclasses would have removed the
+                        # attr here or 2. assoc proxy is coming from a
+                        # superclass, we want it to be direct here so it
+                        # tracks state or 3. assoc prox comes from
+                        # declared_attr, uncommon case
+                        setattr(cls, k, value)
+                        continue
+
+            our_stuff[k] = value
+
+    def _extract_declared_columns(self) -> None:
+        our_stuff = self.properties
+
+        # extract columns from the class dict
+        declared_columns = self.declared_columns
+        column_ordering = self.column_ordering
+        name_to_prop_key = collections.defaultdict(set)
+
+        for key, c in list(our_stuff.items()):
+            if isinstance(c, _MapsColumns):
+                mp_to_assign = c.mapper_property_to_assign
+                if mp_to_assign:
+                    our_stuff[key] = mp_to_assign
+                else:
+                    # if no mapper property to assign, this currently means
+                    # this is a MappedColumn that will produce a Column for us
+                    del our_stuff[key]
+
+                for col, sort_order in c.columns_to_assign:
+                    if not isinstance(c, CompositeProperty):
+                        name_to_prop_key[col.name].add(key)
+                    declared_columns.add(col)
+
+                    # we would assert this, however we want the below
+                    # warning to take effect instead.  See #9630
+                    # assert col not in column_ordering
+
+                    column_ordering[col] = sort_order
+
+                    # if this is a MappedColumn and the attribute key we
+                    # have is not what the column has for its key, map the
+                    # Column explicitly under the attribute key name.
+                    # otherwise, Mapper will map it under the column key.
+                    if mp_to_assign is None and key != col.key:
+                        our_stuff[key] = col
+            elif isinstance(c, Column):
+                # undefer previously occurred here, and now occurs earlier.
+                # ensure every column we get here has been named
+                assert c.name is not None
+                name_to_prop_key[c.name].add(key)
+                declared_columns.add(c)
+                # if the column is the same name as the key,
+                # remove it from the explicit properties dict.
+                # the normal rules for assigning column-based properties
+                # will take over, including precedence of columns
+                # in multi-column ColumnProperties.
+                if key == c.key:
+                    del our_stuff[key]
+
+        for name, keys in name_to_prop_key.items():
+            if len(keys) > 1:
+                util.warn(
+                    "On class %r, Column object %r named "
+                    "directly multiple times, "
+                    "only one will be used: %s. "
+                    "Consider using orm.synonym instead"
+                    % (self.classname, name, (", ".join(sorted(keys))))
+                )
+
+    def _setup_table(self, table: Optional[FromClause] = None) -> None:
+        cls = self.cls
+        cls_as_Decl = cast("MappedClassProtocol[Any]", cls)
+
+        tablename = self.tablename
+        table_args = self.table_args
+        clsdict_view = self.clsdict_view
+        declared_columns = self.declared_columns
+        column_ordering = self.column_ordering
+
+        manager = attributes.manager_of_class(cls)
+
+        if (
+            self.table_fn is None
+            and "__table__" not in clsdict_view
+            and table is None
+        ):
+            if hasattr(cls, "__table_cls__"):
+                table_cls = cast(
+                    Type[Table],
+                    util.unbound_method_to_callable(cls.__table_cls__),  # type: ignore  # noqa: E501
+                )
+            else:
+                table_cls = Table
+
+            if tablename is not None:
+                args: Tuple[Any, ...] = ()
+                table_kw: Dict[str, Any] = {}
+
+                if table_args:
+                    if isinstance(table_args, dict):
+                        table_kw = table_args
+                    elif isinstance(table_args, tuple):
+                        if isinstance(table_args[-1], dict):
+                            args, table_kw = table_args[0:-1], table_args[-1]
+                        else:
+                            args = table_args
+
+                autoload_with = clsdict_view.get("__autoload_with__")
+                if autoload_with:
+                    table_kw["autoload_with"] = autoload_with
+
+                autoload = clsdict_view.get("__autoload__")
+                if autoload:
+                    table_kw["autoload"] = True
+
+                sorted_columns = sorted(
+                    declared_columns,
+                    key=lambda c: column_ordering.get(c, 0),
+                )
+                table = self.set_cls_attribute(
+                    "__table__",
+                    table_cls(
+                        tablename,
+                        self._metadata_for_cls(manager),
+                        *sorted_columns,
+                        *args,
+                        **table_kw,
+                    ),
+                )
+        else:
+            if table is None:
+                if self.table_fn:
+                    table = self.set_cls_attribute(
+                        "__table__", self.table_fn()
+                    )
+                else:
+                    table = cls_as_Decl.__table__
+            if declared_columns:
+                for c in declared_columns:
+                    if not table.c.contains_column(c):
+                        raise exc.ArgumentError(
+                            "Can't add additional column %r when "
+                            "specifying __table__" % c.key
+                        )
+
+        self.local_table = table
+
+    def _metadata_for_cls(self, manager: ClassManager[Any]) -> MetaData:
+        meta: Optional[MetaData] = getattr(self.cls, "metadata", None)
+        if meta is not None:
+            return meta
+        else:
+            return manager.registry.metadata
+
+    def _setup_inheriting_mapper(self, mapper_kw: _MapperKwArgs) -> None:
+        cls = self.cls
+
+        inherits = mapper_kw.get("inherits", None)
+
+        if inherits is None:
+            # since we search for classical mappings now, search for
+            # multiple mapped bases as well and raise an error.
+            inherits_search = []
+            for base_ in cls.__bases__:
+                c = _resolve_for_abstract_or_classical(base_)
+                if c is None:
+                    continue
+
+                if _is_supercls_for_inherits(c) and c not in inherits_search:
+                    inherits_search.append(c)
+
+            if inherits_search:
+                if len(inherits_search) > 1:
+                    raise exc.InvalidRequestError(
+                        "Class %s has multiple mapped bases: %r"
+                        % (cls, inherits_search)
+                    )
+                inherits = inherits_search[0]
+        elif isinstance(inherits, Mapper):
+            inherits = inherits.class_
+
+        self.inherits = inherits
+
+        clsdict_view = self.clsdict_view
+        if "__table__" not in clsdict_view and self.tablename is None:
+            self.single = True
+
+    def _setup_inheriting_columns(self, mapper_kw: _MapperKwArgs) -> None:
+        table = self.local_table
+        cls = self.cls
+        table_args = self.table_args
+        declared_columns = self.declared_columns
+
+        if (
+            table is None
+            and self.inherits is None
+            and not _get_immediate_cls_attr(cls, "__no_table__")
+        ):
+            raise exc.InvalidRequestError(
+                "Class %r does not have a __table__ or __tablename__ "
+                "specified and does not inherit from an existing "
+                "table-mapped class." % cls
+            )
+        elif self.inherits:
+            inherited_mapper_or_config = _declared_mapping_info(self.inherits)
+            assert inherited_mapper_or_config is not None
+            inherited_table = inherited_mapper_or_config.local_table
+            inherited_persist_selectable = (
+                inherited_mapper_or_config.persist_selectable
+            )
+
+            if table is None:
+                # single table inheritance.
+                # ensure no table args
+                if table_args:
+                    raise exc.ArgumentError(
+                        "Can't place __table_args__ on an inherited class "
+                        "with no table."
+                    )
+
+                # add any columns declared here to the inherited table.
+                if declared_columns and not isinstance(inherited_table, Table):
+                    raise exc.ArgumentError(
+                        f"Can't declare columns on single-table-inherited "
+                        f"subclass {self.cls}; superclass {self.inherits} "
+                        "is not mapped to a Table"
+                    )
+
+                for col in declared_columns:
+                    assert inherited_table is not None
+                    if col.name in inherited_table.c:
+                        if inherited_table.c[col.name] is col:
+                            continue
+                        raise exc.ArgumentError(
+                            f"Column '{col}' on class {cls.__name__} "
+                            f"conflicts with existing column "
+                            f"'{inherited_table.c[col.name]}'.  If using "
+                            f"Declarative, consider using the "
+                            "use_existing_column parameter of mapped_column() "
+                            "to resolve conflicts."
+                        )
+                    if col.primary_key:
+                        raise exc.ArgumentError(
+                            "Can't place primary key columns on an inherited "
+                            "class with no table."
+                        )
+
+                    if TYPE_CHECKING:
+                        assert isinstance(inherited_table, Table)
+
+                    inherited_table.append_column(col)
+                    if (
+                        inherited_persist_selectable is not None
+                        and inherited_persist_selectable is not inherited_table
+                    ):
+                        inherited_persist_selectable._refresh_for_new_column(
+                            col
+                        )
+
+    def _prepare_mapper_arguments(self, mapper_kw: _MapperKwArgs) -> None:
+        properties = self.properties
+
+        if self.mapper_args_fn:
+            mapper_args = self.mapper_args_fn()
+        else:
+            mapper_args = {}
+
+        if mapper_kw:
+            mapper_args.update(mapper_kw)
+
+        if "properties" in mapper_args:
+            properties = dict(properties)
+            properties.update(mapper_args["properties"])
+
+        # make sure that column copies are used rather
+        # than the original columns from any mixins
+        for k in ("version_id_col", "polymorphic_on"):
+            if k in mapper_args:
+                v = mapper_args[k]
+                mapper_args[k] = self.column_copies.get(v, v)
+
+        if "primary_key" in mapper_args:
+            mapper_args["primary_key"] = [
+                self.column_copies.get(v, v)
+                for v in util.to_list(mapper_args["primary_key"])
+            ]
+
+        if "inherits" in mapper_args:
+            inherits_arg = mapper_args["inherits"]
+            if isinstance(inherits_arg, Mapper):
+                inherits_arg = inherits_arg.class_
+
+            if inherits_arg is not self.inherits:
+                raise exc.InvalidRequestError(
+                    "mapper inherits argument given for non-inheriting "
+                    "class %s" % (mapper_args["inherits"])
+                )
+
+        if self.inherits:
+            mapper_args["inherits"] = self.inherits
+
+        if self.inherits and not mapper_args.get("concrete", False):
+            # note the superclass is expected to have a Mapper assigned and
+            # not be a deferred config, as this is called within map()
+            inherited_mapper = class_mapper(self.inherits, False)
+            inherited_table = inherited_mapper.local_table
+
+            # single or joined inheritance
+            # exclude any cols on the inherited table which are
+            # not mapped on the parent class, to avoid
+            # mapping columns specific to sibling/nephew classes
+            if "exclude_properties" not in mapper_args:
+                mapper_args["exclude_properties"] = exclude_properties = {
+                    c.key
+                    for c in inherited_table.c
+                    if c not in inherited_mapper._columntoproperty
+                }.union(inherited_mapper.exclude_properties or ())
+                exclude_properties.difference_update(
+                    [c.key for c in self.declared_columns]
+                )
+
+            # look through columns in the current mapper that
+            # are keyed to a propname different than the colname
+            # (if names were the same, we'd have popped it out above,
+            # in which case the mapper makes this combination).
+            # See if the superclass has a similar column property.
+            # If so, join them together.
+            for k, col in list(properties.items()):
+                if not isinstance(col, expression.ColumnElement):
+                    continue
+                if k in inherited_mapper._props:
+                    p = inherited_mapper._props[k]
+                    if isinstance(p, ColumnProperty):
+                        # note here we place the subclass column
+                        # first.  See [ticket:1892] for background.
+                        properties[k] = [col] + p.columns
+        result_mapper_args = mapper_args.copy()
+        result_mapper_args["properties"] = properties
+        self.mapper_args = result_mapper_args
+
+    def map(self, mapper_kw: _MapperKwArgs = util.EMPTY_DICT) -> Mapper[Any]:
+        self._prepare_mapper_arguments(mapper_kw)
+        if hasattr(self.cls, "__mapper_cls__"):
+            mapper_cls = cast(
+                "Type[Mapper[Any]]",
+                util.unbound_method_to_callable(
+                    self.cls.__mapper_cls__  # type: ignore
+                ),
+            )
+        else:
+            mapper_cls = Mapper
+
+        return self.set_cls_attribute(
+            "__mapper__",
+            mapper_cls(self.cls, self.local_table, **self.mapper_args),
+        )
+
+
+@util.preload_module("sqlalchemy.orm.decl_api")
+def _as_dc_declaredattr(
+    field_metadata: Mapping[str, Any], sa_dataclass_metadata_key: str
+) -> Any:
+    # wrap lambdas inside dataclass fields inside an ad-hoc declared_attr.
+    # we can't write it because field.metadata is immutable :( so we have
+    # to go through extra trouble to compare these
+    decl_api = util.preloaded.orm_decl_api
+    obj = field_metadata[sa_dataclass_metadata_key]
+    if callable(obj) and not isinstance(obj, decl_api.declared_attr):
+        return decl_api.declared_attr(obj)
+    else:
+        return obj
+
+
+class _DeferredMapperConfig(_ClassScanMapperConfig):
+    _cls: weakref.ref[Type[Any]]
+
+    is_deferred = True
+
+    _configs: util.OrderedDict[
+        weakref.ref[Type[Any]], _DeferredMapperConfig
+    ] = util.OrderedDict()
+
+    def _early_mapping(self, mapper_kw: _MapperKwArgs) -> None:
+        pass
+
+    # mypy disallows plain property override of variable
+    @property  # type: ignore
+    def cls(self) -> Type[Any]:
+        return self._cls()  # type: ignore
+
+    @cls.setter
+    def cls(self, class_: Type[Any]) -> None:
+        self._cls = weakref.ref(class_, self._remove_config_cls)
+        self._configs[self._cls] = self
+
+    @classmethod
+    def _remove_config_cls(cls, ref: weakref.ref[Type[Any]]) -> None:
+        cls._configs.pop(ref, None)
+
+    @classmethod
+    def has_cls(cls, class_: Type[Any]) -> bool:
+        # 2.6 fails on weakref if class_ is an old style class
+        return isinstance(class_, type) and weakref.ref(class_) in cls._configs
+
+    @classmethod
+    def raise_unmapped_for_cls(cls, class_: Type[Any]) -> NoReturn:
+        if hasattr(class_, "_sa_raise_deferred_config"):
+            class_._sa_raise_deferred_config()
+
+        raise orm_exc.UnmappedClassError(
+            class_,
+            msg=(
+                f"Class {orm_exc._safe_cls_name(class_)} has a deferred "
+                "mapping on it.  It is not yet usable as a mapped class."
+            ),
+        )
+
+    @classmethod
+    def config_for_cls(cls, class_: Type[Any]) -> _DeferredMapperConfig:
+        return cls._configs[weakref.ref(class_)]
+
+    @classmethod
+    def classes_for_base(
+        cls, base_cls: Type[Any], sort: bool = True
+    ) -> List[_DeferredMapperConfig]:
+        classes_for_base = [
+            m
+            for m, cls_ in [(m, m.cls) for m in cls._configs.values()]
+            if cls_ is not None and issubclass(cls_, base_cls)
+        ]
+
+        if not sort:
+            return classes_for_base
+
+        all_m_by_cls = {m.cls: m for m in classes_for_base}
+
+        tuples: List[Tuple[_DeferredMapperConfig, _DeferredMapperConfig]] = []
+        for m_cls in all_m_by_cls:
+            tuples.extend(
+                (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
+                for base_cls in m_cls.__bases__
+                if base_cls in all_m_by_cls
+            )
+        return list(topological.sort(tuples, classes_for_base))
+
+    def map(self, mapper_kw: _MapperKwArgs = util.EMPTY_DICT) -> Mapper[Any]:
+        self._configs.pop(self._cls, None)
+        return super().map(mapper_kw)
+
+
+def _add_attribute(
+    cls: Type[Any], key: str, value: MapperProperty[Any]
+) -> None:
+    """add an attribute to an existing declarative class.
+
+    This runs through the logic to determine MapperProperty,
+    adds it to the Mapper, adds a column to the mapped Table, etc.
+
+    """
+
+    if "__mapper__" in cls.__dict__:
+        mapped_cls = cast("MappedClassProtocol[Any]", cls)
+
+        def _table_or_raise(mc: MappedClassProtocol[Any]) -> Table:
+            if isinstance(mc.__table__, Table):
+                return mc.__table__
+            raise exc.InvalidRequestError(
+                f"Cannot add a new attribute to mapped class {mc.__name__!r} "
+                "because it's not mapped against a table."
+            )
+
+        if isinstance(value, Column):
+            _undefer_column_name(key, value)
+            _table_or_raise(mapped_cls).append_column(
+                value, replace_existing=True
+            )
+            mapped_cls.__mapper__.add_property(key, value)
+        elif isinstance(value, _MapsColumns):
+            mp = value.mapper_property_to_assign
+            for col, _ in value.columns_to_assign:
+                _undefer_column_name(key, col)
+                _table_or_raise(mapped_cls).append_column(
+                    col, replace_existing=True
+                )
+                if not mp:
+                    mapped_cls.__mapper__.add_property(key, col)
+            if mp:
+                mapped_cls.__mapper__.add_property(key, mp)
+        elif isinstance(value, MapperProperty):
+            mapped_cls.__mapper__.add_property(key, value)
+        elif isinstance(value, QueryableAttribute) and value.key != key:
+            # detect a QueryableAttribute that's already mapped being
+            # assigned elsewhere in userland, turn into a synonym()
+            value = SynonymProperty(value.key)
+            mapped_cls.__mapper__.add_property(key, value)
+        else:
+            type.__setattr__(cls, key, value)
+            mapped_cls.__mapper__._expire_memoizations()
+    else:
+        type.__setattr__(cls, key, value)
+
+
+def _del_attribute(cls: Type[Any], key: str) -> None:
+    if (
+        "__mapper__" in cls.__dict__
+        and key in cls.__dict__
+        and not cast(
+            "MappedClassProtocol[Any]", cls
+        ).__mapper__._dispose_called
+    ):
+        value = cls.__dict__[key]
+        if isinstance(
+            value, (Column, _MapsColumns, MapperProperty, QueryableAttribute)
+        ):
+            raise NotImplementedError(
+                "Can't un-map individual mapped attributes on a mapped class."
+            )
+        else:
+            type.__delattr__(cls, key)
+            cast(
+                "MappedClassProtocol[Any]", cls
+            ).__mapper__._expire_memoizations()
+    else:
+        type.__delattr__(cls, key)
+
+
+def _declarative_constructor(self: Any, **kwargs: Any) -> None:
+    """A simple constructor that allows initialization from kwargs.
+
+    Sets attributes on the constructed instance using the names and
+    values in ``kwargs``.
+
+    Only keys that are present as
+    attributes of the instance's class are allowed. These could be,
+    for example, any mapped columns or relationships.
+    """
+    cls_ = type(self)
+    for k in kwargs:
+        if not hasattr(cls_, k):
+            raise TypeError(
+                "%r is an invalid keyword argument for %s" % (k, cls_.__name__)
+            )
+        setattr(self, k, kwargs[k])
+
+
+_declarative_constructor.__name__ = "__init__"
+
+
+def _undefer_column_name(key: str, column: Column[Any]) -> None:
+    if column.key is None:
+        column.key = key
+    if column.name is None:
+        column.name = key
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py
new file mode 100644
index 00000000..59530624
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dependency.py
@@ -0,0 +1,1304 @@
+# orm/dependency.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""Relationship dependencies.
+
+"""
+
+from __future__ import annotations
+
+from . import attributes
+from . import exc
+from . import sync
+from . import unitofwork
+from . import util as mapperutil
+from .interfaces import MANYTOMANY
+from .interfaces import MANYTOONE
+from .interfaces import ONETOMANY
+from .. import exc as sa_exc
+from .. import sql
+from .. import util
+
+
+class DependencyProcessor:
+    def __init__(self, prop):
+        self.prop = prop
+        self.cascade = prop.cascade
+        self.mapper = prop.mapper
+        self.parent = prop.parent
+        self.secondary = prop.secondary
+        self.direction = prop.direction
+        self.post_update = prop.post_update
+        self.passive_deletes = prop.passive_deletes
+        self.passive_updates = prop.passive_updates
+        self.enable_typechecks = prop.enable_typechecks
+        if self.passive_deletes:
+            self._passive_delete_flag = attributes.PASSIVE_NO_INITIALIZE
+        else:
+            self._passive_delete_flag = attributes.PASSIVE_OFF
+        if self.passive_updates:
+            self._passive_update_flag = attributes.PASSIVE_NO_INITIALIZE
+        else:
+            self._passive_update_flag = attributes.PASSIVE_OFF
+
+        self.sort_key = "%s_%s" % (self.parent._sort_key, prop.key)
+        self.key = prop.key
+        if not self.prop.synchronize_pairs:
+            raise sa_exc.ArgumentError(
+                "Can't build a DependencyProcessor for relationship %s. "
+                "No target attributes to populate between parent and "
+                "child are present" % self.prop
+            )
+
+    @classmethod
+    def from_relationship(cls, prop):
+        return _direction_to_processor[prop.direction](prop)
+
+    def hasparent(self, state):
+        """return True if the given object instance has a parent,
+        according to the ``InstrumentedAttribute`` handled by this
+        ``DependencyProcessor``.
+
+        """
+        return self.parent.class_manager.get_impl(self.key).hasparent(state)
+
+    def per_property_preprocessors(self, uow):
+        """establish actions and dependencies related to a flush.
+
+        These actions will operate on all relevant states in
+        the aggregate.
+
+        """
+        uow.register_preprocessor(self, True)
+
+    def per_property_flush_actions(self, uow):
+        after_save = unitofwork.ProcessAll(uow, self, False, True)
+        before_delete = unitofwork.ProcessAll(uow, self, True, True)
+
+        parent_saves = unitofwork.SaveUpdateAll(
+            uow, self.parent.primary_base_mapper
+        )
+        child_saves = unitofwork.SaveUpdateAll(
+            uow, self.mapper.primary_base_mapper
+        )
+
+        parent_deletes = unitofwork.DeleteAll(
+            uow, self.parent.primary_base_mapper
+        )
+        child_deletes = unitofwork.DeleteAll(
+            uow, self.mapper.primary_base_mapper
+        )
+
+        self.per_property_dependencies(
+            uow,
+            parent_saves,
+            child_saves,
+            parent_deletes,
+            child_deletes,
+            after_save,
+            before_delete,
+        )
+
+    def per_state_flush_actions(self, uow, states, isdelete):
+        """establish actions and dependencies related to a flush.
+
+        These actions will operate on all relevant states
+        individually.    This occurs only if there are cycles
+        in the 'aggregated' version of events.
+
+        """
+
+        child_base_mapper = self.mapper.primary_base_mapper
+        child_saves = unitofwork.SaveUpdateAll(uow, child_base_mapper)
+        child_deletes = unitofwork.DeleteAll(uow, child_base_mapper)
+
+        # locate and disable the aggregate processors
+        # for this dependency
+
+        if isdelete:
+            before_delete = unitofwork.ProcessAll(uow, self, True, True)
+            before_delete.disabled = True
+        else:
+            after_save = unitofwork.ProcessAll(uow, self, False, True)
+            after_save.disabled = True
+
+        # check if the "child" side is part of the cycle
+
+        if child_saves not in uow.cycles:
+            # based on the current dependencies we use, the saves/
+            # deletes should always be in the 'cycles' collection
+            # together.   if this changes, we will have to break up
+            # this method a bit more.
+            assert child_deletes not in uow.cycles
+
+            # child side is not part of the cycle, so we will link per-state
+            # actions to the aggregate "saves", "deletes" actions
+            child_actions = [(child_saves, False), (child_deletes, True)]
+            child_in_cycles = False
+        else:
+            child_in_cycles = True
+
+        # check if the "parent" side is part of the cycle
+        if not isdelete:
+            parent_saves = unitofwork.SaveUpdateAll(
+                uow, self.parent.base_mapper
+            )
+            parent_deletes = before_delete = None
+            if parent_saves in uow.cycles:
+                parent_in_cycles = True
+        else:
+            parent_deletes = unitofwork.DeleteAll(uow, self.parent.base_mapper)
+            parent_saves = after_save = None
+            if parent_deletes in uow.cycles:
+                parent_in_cycles = True
+
+        # now create actions /dependencies for each state.
+
+        for state in states:
+            # detect if there's anything changed or loaded
+            # by a preprocessor on this state/attribute.   In the
+            # case of deletes we may try to load missing items here as well.
+            sum_ = state.manager[self.key].impl.get_all_pending(
+                state,
+                state.dict,
+                (
+                    self._passive_delete_flag
+                    if isdelete
+                    else attributes.PASSIVE_NO_INITIALIZE
+                ),
+            )
+
+            if not sum_:
+                continue
+
+            if isdelete:
+                before_delete = unitofwork.ProcessState(uow, self, True, state)
+                if parent_in_cycles:
+                    parent_deletes = unitofwork.DeleteState(uow, state)
+            else:
+                after_save = unitofwork.ProcessState(uow, self, False, state)
+                if parent_in_cycles:
+                    parent_saves = unitofwork.SaveUpdateState(uow, state)
+
+            if child_in_cycles:
+                child_actions = []
+                for child_state, child in sum_:
+                    if child_state not in uow.states:
+                        child_action = (None, None)
+                    else:
+                        (deleted, listonly) = uow.states[child_state]
+                        if deleted:
+                            child_action = (
+                                unitofwork.DeleteState(uow, child_state),
+                                True,
+                            )
+                        else:
+                            child_action = (
+                                unitofwork.SaveUpdateState(uow, child_state),
+                                False,
+                            )
+                    child_actions.append(child_action)
+
+            # establish dependencies between our possibly per-state
+            # parent action and our possibly per-state child action.
+            for child_action, childisdelete in child_actions:
+                self.per_state_dependencies(
+                    uow,
+                    parent_saves,
+                    parent_deletes,
+                    child_action,
+                    after_save,
+                    before_delete,
+                    isdelete,
+                    childisdelete,
+                )
+
+    def presort_deletes(self, uowcommit, states):
+        return False
+
+    def presort_saves(self, uowcommit, states):
+        return False
+
+    def process_deletes(self, uowcommit, states):
+        pass
+
+    def process_saves(self, uowcommit, states):
+        pass
+
+    def prop_has_changes(self, uowcommit, states, isdelete):
+        if not isdelete or self.passive_deletes:
+            passive = (
+                attributes.PASSIVE_NO_INITIALIZE
+                | attributes.INCLUDE_PENDING_MUTATIONS
+            )
+        elif self.direction is MANYTOONE:
+            # here, we were hoping to optimize having to fetch many-to-one
+            # for history and ignore it, if there's no further cascades
+            # to take place.  however there are too many less common conditions
+            # that still take place and tests in test_relationships /
+            # test_cascade etc. will still fail.
+            passive = attributes.PASSIVE_NO_FETCH_RELATED
+        else:
+            passive = (
+                attributes.PASSIVE_OFF | attributes.INCLUDE_PENDING_MUTATIONS
+            )
+
+        for s in states:
+            # TODO: add a high speed method
+            # to InstanceState which returns:  attribute
+            # has a non-None value, or had one
+            history = uowcommit.get_attribute_history(s, self.key, passive)
+            if history and not history.empty():
+                return True
+        else:
+            return (
+                states
+                and not self.prop._is_self_referential
+                and self.mapper in uowcommit.mappers
+            )
+
+    def _verify_canload(self, state):
+        if self.prop.uselist and state is None:
+            raise exc.FlushError(
+                "Can't flush None value found in "
+                "collection %s" % (self.prop,)
+            )
+        elif state is not None and not self.mapper._canload(
+            state, allow_subtypes=not self.enable_typechecks
+        ):
+            if self.mapper._canload(state, allow_subtypes=True):
+                raise exc.FlushError(
+                    "Attempting to flush an item of type "
+                    "%(x)s as a member of collection "
+                    '"%(y)s". Expected an object of type '
+                    "%(z)s or a polymorphic subclass of "
+                    "this type. If %(x)s is a subclass of "
+                    '%(z)s, configure mapper "%(zm)s" to '
+                    "load this subtype polymorphically, or "
+                    "set enable_typechecks=False to allow "
+                    "any subtype to be accepted for flush. "
+                    % {
+                        "x": state.class_,
+                        "y": self.prop,
+                        "z": self.mapper.class_,
+                        "zm": self.mapper,
+                    }
+                )
+            else:
+                raise exc.FlushError(
+                    "Attempting to flush an item of type "
+                    "%(x)s as a member of collection "
+                    '"%(y)s". Expected an object of type '
+                    "%(z)s or a polymorphic subclass of "
+                    "this type."
+                    % {
+                        "x": state.class_,
+                        "y": self.prop,
+                        "z": self.mapper.class_,
+                    }
+                )
+
+    def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
+        raise NotImplementedError()
+
+    def _get_reversed_processed_set(self, uow):
+        if not self.prop._reverse_property:
+            return None
+
+        process_key = tuple(
+            sorted([self.key] + [p.key for p in self.prop._reverse_property])
+        )
+        return uow.memo(("reverse_key", process_key), set)
+
+    def _post_update(self, state, uowcommit, related, is_m2o_delete=False):
+        for x in related:
+            if not is_m2o_delete or x is not None:
+                uowcommit.register_post_update(
+                    state, [r for l, r in self.prop.synchronize_pairs]
+                )
+                break
+
+    def _pks_changed(self, uowcommit, state):
+        raise NotImplementedError()
+
+    def __repr__(self):
+        return "%s(%s)" % (self.__class__.__name__, self.prop)
+
+
+class OneToManyDP(DependencyProcessor):
+    def per_property_dependencies(
+        self,
+        uow,
+        parent_saves,
+        child_saves,
+        parent_deletes,
+        child_deletes,
+        after_save,
+        before_delete,
+    ):
+        if self.post_update:
+            child_post_updates = unitofwork.PostUpdateAll(
+                uow, self.mapper.primary_base_mapper, False
+            )
+            child_pre_updates = unitofwork.PostUpdateAll(
+                uow, self.mapper.primary_base_mapper, True
+            )
+
+            uow.dependencies.update(
+                [
+                    (child_saves, after_save),
+                    (parent_saves, after_save),
+                    (after_save, child_post_updates),
+                    (before_delete, child_pre_updates),
+                    (child_pre_updates, parent_deletes),
+                    (child_pre_updates, child_deletes),
+                ]
+            )
+        else:
+            uow.dependencies.update(
+                [
+                    (parent_saves, after_save),
+                    (after_save, child_saves),
+                    (after_save, child_deletes),
+                    (child_saves, parent_deletes),
+                    (child_deletes, parent_deletes),
+                    (before_delete, child_saves),
+                    (before_delete, child_deletes),
+                ]
+            )
+
+    def per_state_dependencies(
+        self,
+        uow,
+        save_parent,
+        delete_parent,
+        child_action,
+        after_save,
+        before_delete,
+        isdelete,
+        childisdelete,
+    ):
+        if self.post_update:
+            child_post_updates = unitofwork.PostUpdateAll(
+                uow, self.mapper.primary_base_mapper, False
+            )
+            child_pre_updates = unitofwork.PostUpdateAll(
+                uow, self.mapper.primary_base_mapper, True
+            )
+
+            # TODO: this whole block is not covered
+            # by any tests
+            if not isdelete:
+                if childisdelete:
+                    uow.dependencies.update(
+                        [
+                            (child_action, after_save),
+                            (after_save, child_post_updates),
+                        ]
+                    )
+                else:
+                    uow.dependencies.update(
+                        [
+                            (save_parent, after_save),
+                            (child_action, after_save),
+                            (after_save, child_post_updates),
+                        ]
+                    )
+            else:
+                if childisdelete:
+                    uow.dependencies.update(
+                        [
+                            (before_delete, child_pre_updates),
+                            (child_pre_updates, delete_parent),
+                        ]
+                    )
+                else:
+                    uow.dependencies.update(
+                        [
+                            (before_delete, child_pre_updates),
+                            (child_pre_updates, delete_parent),
+                        ]
+                    )
+        elif not isdelete:
+            uow.dependencies.update(
+                [
+                    (save_parent, after_save),
+                    (after_save, child_action),
+                    (save_parent, child_action),
+                ]
+            )
+        else:
+            uow.dependencies.update(
+                [(before_delete, child_action), (child_action, delete_parent)]
+            )
+
+    def presort_deletes(self, uowcommit, states):
+        # head object is being deleted, and we manage its list of
+        # child objects the child objects have to have their
+        # foreign key to the parent set to NULL
+        should_null_fks = (
+            not self.cascade.delete and not self.passive_deletes == "all"
+        )
+
+        for state in states:
+            history = uowcommit.get_attribute_history(
+                state, self.key, self._passive_delete_flag
+            )
+            if history:
+                for child in history.deleted:
+                    if child is not None and self.hasparent(child) is False:
+                        if self.cascade.delete_orphan:
+                            uowcommit.register_object(child, isdelete=True)
+                        else:
+                            uowcommit.register_object(child)
+
+                if should_null_fks:
+                    for child in history.unchanged:
+                        if child is not None:
+                            uowcommit.register_object(
+                                child, operation="delete", prop=self.prop
+                            )
+
+    def presort_saves(self, uowcommit, states):
+        children_added = uowcommit.memo(("children_added", self), set)
+
+        should_null_fks = (
+            not self.cascade.delete_orphan
+            and not self.passive_deletes == "all"
+        )
+
+        for state in states:
+            pks_changed = self._pks_changed(uowcommit, state)
+
+            if not pks_changed or self.passive_updates:
+                passive = (
+                    attributes.PASSIVE_NO_INITIALIZE
+                    | attributes.INCLUDE_PENDING_MUTATIONS
+                )
+            else:
+                passive = (
+                    attributes.PASSIVE_OFF
+                    | attributes.INCLUDE_PENDING_MUTATIONS
+                )
+
+            history = uowcommit.get_attribute_history(state, self.key, passive)
+            if history:
+                for child in history.added:
+                    if child is not None:
+                        uowcommit.register_object(
+                            child,
+                            cancel_delete=True,
+                            operation="add",
+                            prop=self.prop,
+                        )
+
+                children_added.update(history.added)
+
+                for child in history.deleted:
+                    if not self.cascade.delete_orphan:
+                        if should_null_fks:
+                            uowcommit.register_object(
+                                child,
+                                isdelete=False,
+                                operation="delete",
+                                prop=self.prop,
+                            )
+                    elif self.hasparent(child) is False:
+                        uowcommit.register_object(
+                            child,
+                            isdelete=True,
+                            operation="delete",
+                            prop=self.prop,
+                        )
+                        for c, m, st_, dct_ in self.mapper.cascade_iterator(
+                            "delete", child
+                        ):
+                            uowcommit.register_object(st_, isdelete=True)
+
+            if pks_changed:
+                if history:
+                    for child in history.unchanged:
+                        if child is not None:
+                            uowcommit.register_object(
+                                child,
+                                False,
+                                self.passive_updates,
+                                operation="pk change",
+                                prop=self.prop,
+                            )
+
+    def process_deletes(self, uowcommit, states):
+        # head object is being deleted, and we manage its list of
+        # child objects the child objects have to have their foreign
+        # key to the parent set to NULL this phase can be called
+        # safely for any cascade but is unnecessary if delete cascade
+        # is on.
+
+        if self.post_update or not self.passive_deletes == "all":
+            children_added = uowcommit.memo(("children_added", self), set)
+
+            for state in states:
+                history = uowcommit.get_attribute_history(
+                    state, self.key, self._passive_delete_flag
+                )
+                if history:
+                    for child in history.deleted:
+                        if (
+                            child is not None
+                            and self.hasparent(child) is False
+                        ):
+                            self._synchronize(
+                                state, child, None, True, uowcommit, False
+                            )
+                            if self.post_update and child:
+                                self._post_update(child, uowcommit, [state])
+
+                    if self.post_update or not self.cascade.delete:
+                        for child in set(history.unchanged).difference(
+                            children_added
+                        ):
+                            if child is not None:
+                                self._synchronize(
+                                    state, child, None, True, uowcommit, False
+                                )
+                                if self.post_update and child:
+                                    self._post_update(
+                                        child, uowcommit, [state]
+                                    )
+
+                    # technically, we can even remove each child from the
+                    # collection here too.  but this would be a somewhat
+                    # inconsistent behavior since it wouldn't happen
+                    # if the old parent wasn't deleted but child was moved.
+
+    def process_saves(self, uowcommit, states):
+        should_null_fks = (
+            not self.cascade.delete_orphan
+            and not self.passive_deletes == "all"
+        )
+
+        for state in states:
+            history = uowcommit.get_attribute_history(
+                state, self.key, attributes.PASSIVE_NO_INITIALIZE
+            )
+            if history:
+                for child in history.added:
+                    self._synchronize(
+                        state, child, None, False, uowcommit, False
+                    )
+                    if child is not None and self.post_update:
+                        self._post_update(child, uowcommit, [state])
+
+                for child in history.deleted:
+                    if (
+                        should_null_fks
+                        and not self.cascade.delete_orphan
+                        and not self.hasparent(child)
+                    ):
+                        self._synchronize(
+                            state, child, None, True, uowcommit, False
+                        )
+
+                if self._pks_changed(uowcommit, state):
+                    for child in history.unchanged:
+                        self._synchronize(
+                            state, child, None, False, uowcommit, True
+                        )
+
+    def _synchronize(
+        self, state, child, associationrow, clearkeys, uowcommit, pks_changed
+    ):
+        source = state
+        dest = child
+        self._verify_canload(child)
+        if dest is None or (
+            not self.post_update and uowcommit.is_deleted(dest)
+        ):
+            return
+        if clearkeys:
+            sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
+        else:
+            sync.populate(
+                source,
+                self.parent,
+                dest,
+                self.mapper,
+                self.prop.synchronize_pairs,
+                uowcommit,
+                self.passive_updates and pks_changed,
+            )
+
+    def _pks_changed(self, uowcommit, state):
+        return sync.source_modified(
+            uowcommit, state, self.parent, self.prop.synchronize_pairs
+        )
+
+
+class ManyToOneDP(DependencyProcessor):
+    def __init__(self, prop):
+        DependencyProcessor.__init__(self, prop)
+        for mapper in self.mapper.self_and_descendants:
+            mapper._dependency_processors.append(DetectKeySwitch(prop))
+
+    def per_property_dependencies(
+        self,
+        uow,
+        parent_saves,
+        child_saves,
+        parent_deletes,
+        child_deletes,
+        after_save,
+        before_delete,
+    ):
+        if self.post_update:
+            parent_post_updates = unitofwork.PostUpdateAll(
+                uow, self.parent.primary_base_mapper, False
+            )
+            parent_pre_updates = unitofwork.PostUpdateAll(
+                uow, self.parent.primary_base_mapper, True
+            )
+
+            uow.dependencies.update(
+                [
+                    (child_saves, after_save),
+                    (parent_saves, after_save),
+                    (after_save, parent_post_updates),
+                    (after_save, parent_pre_updates),
+                    (before_delete, parent_pre_updates),
+                    (parent_pre_updates, child_deletes),
+                    (parent_pre_updates, parent_deletes),
+                ]
+            )
+        else:
+            uow.dependencies.update(
+                [
+                    (child_saves, after_save),
+                    (after_save, parent_saves),
+                    (parent_saves, child_deletes),
+                    (parent_deletes, child_deletes),
+                ]
+            )
+
+    def per_state_dependencies(
+        self,
+        uow,
+        save_parent,
+        delete_parent,
+        child_action,
+        after_save,
+        before_delete,
+        isdelete,
+        childisdelete,
+    ):
+        if self.post_update:
+            if not isdelete:
+                parent_post_updates = unitofwork.PostUpdateAll(
+                    uow, self.parent.primary_base_mapper, False
+                )
+                if childisdelete:
+                    uow.dependencies.update(
+                        [
+                            (after_save, parent_post_updates),
+                            (parent_post_updates, child_action),
+                        ]
+                    )
+                else:
+                    uow.dependencies.update(
+                        [
+                            (save_parent, after_save),
+                            (child_action, after_save),
+                            (after_save, parent_post_updates),
+                        ]
+                    )
+            else:
+                parent_pre_updates = unitofwork.PostUpdateAll(
+                    uow, self.parent.primary_base_mapper, True
+                )
+
+                uow.dependencies.update(
+                    [
+                        (before_delete, parent_pre_updates),
+                        (parent_pre_updates, delete_parent),
+                        (parent_pre_updates, child_action),
+                    ]
+                )
+
+        elif not isdelete:
+            if not childisdelete:
+                uow.dependencies.update(
+                    [(child_action, after_save), (after_save, save_parent)]
+                )
+            else:
+                uow.dependencies.update([(after_save, save_parent)])
+
+        else:
+            if childisdelete:
+                uow.dependencies.update([(delete_parent, child_action)])
+
+    def presort_deletes(self, uowcommit, states):
+        if self.cascade.delete or self.cascade.delete_orphan:
+            for state in states:
+                history = uowcommit.get_attribute_history(
+                    state, self.key, self._passive_delete_flag
+                )
+                if history:
+                    if self.cascade.delete_orphan:
+                        todelete = history.sum()
+                    else:
+                        todelete = history.non_deleted()
+                    for child in todelete:
+                        if child is None:
+                            continue
+                        uowcommit.register_object(
+                            child,
+                            isdelete=True,
+                            operation="delete",
+                            prop=self.prop,
+                        )
+                        t = self.mapper.cascade_iterator("delete", child)
+                        for c, m, st_, dct_ in t:
+                            uowcommit.register_object(st_, isdelete=True)
+
+    def presort_saves(self, uowcommit, states):
+        for state in states:
+            uowcommit.register_object(state, operation="add", prop=self.prop)
+            if self.cascade.delete_orphan:
+                history = uowcommit.get_attribute_history(
+                    state, self.key, self._passive_delete_flag
+                )
+                if history:
+                    for child in history.deleted:
+                        if self.hasparent(child) is False:
+                            uowcommit.register_object(
+                                child,
+                                isdelete=True,
+                                operation="delete",
+                                prop=self.prop,
+                            )
+
+                            t = self.mapper.cascade_iterator("delete", child)
+                            for c, m, st_, dct_ in t:
+                                uowcommit.register_object(st_, isdelete=True)
+
+    def process_deletes(self, uowcommit, states):
+        if (
+            self.post_update
+            and not self.cascade.delete_orphan
+            and not self.passive_deletes == "all"
+        ):
+            # post_update means we have to update our
+            # row to not reference the child object
+            # before we can DELETE the row
+            for state in states:
+                self._synchronize(state, None, None, True, uowcommit)
+                if state and self.post_update:
+                    history = uowcommit.get_attribute_history(
+                        state, self.key, self._passive_delete_flag
+                    )
+                    if history:
+                        self._post_update(
+                            state, uowcommit, history.sum(), is_m2o_delete=True
+                        )
+
+    def process_saves(self, uowcommit, states):
+        for state in states:
+            history = uowcommit.get_attribute_history(
+                state, self.key, attributes.PASSIVE_NO_INITIALIZE
+            )
+            if history:
+                if history.added:
+                    for child in history.added:
+                        self._synchronize(
+                            state, child, None, False, uowcommit, "add"
+                        )
+                elif history.deleted:
+                    self._synchronize(
+                        state, None, None, True, uowcommit, "delete"
+                    )
+                if self.post_update:
+                    self._post_update(state, uowcommit, history.sum())
+
+    def _synchronize(
+        self,
+        state,
+        child,
+        associationrow,
+        clearkeys,
+        uowcommit,
+        operation=None,
+    ):
+        if state is None or (
+            not self.post_update and uowcommit.is_deleted(state)
+        ):
+            return
+
+        if (
+            operation is not None
+            and child is not None
+            and not uowcommit.session._contains_state(child)
+        ):
+            util.warn(
+                "Object of type %s not in session, %s "
+                "operation along '%s' won't proceed"
+                % (mapperutil.state_class_str(child), operation, self.prop)
+            )
+            return
+
+        if clearkeys or child is None:
+            sync.clear(state, self.parent, self.prop.synchronize_pairs)
+        else:
+            self._verify_canload(child)
+            sync.populate(
+                child,
+                self.mapper,
+                state,
+                self.parent,
+                self.prop.synchronize_pairs,
+                uowcommit,
+                False,
+            )
+
+
+class DetectKeySwitch(DependencyProcessor):
+    """For many-to-one relationships with no one-to-many backref,
+    searches for parents through the unit of work when a primary
+    key has changed and updates them.
+
+    Theoretically, this approach could be expanded to support transparent
+    deletion of objects referenced via many-to-one as well, although
+    the current attribute system doesn't do enough bookkeeping for this
+    to be efficient.
+
+    """
+
+    def per_property_preprocessors(self, uow):
+        if self.prop._reverse_property:
+            if self.passive_updates:
+                return
+            else:
+                if False in (
+                    prop.passive_updates
+                    for prop in self.prop._reverse_property
+                ):
+                    return
+
+        uow.register_preprocessor(self, False)
+
+    def per_property_flush_actions(self, uow):
+        parent_saves = unitofwork.SaveUpdateAll(uow, self.parent.base_mapper)
+        after_save = unitofwork.ProcessAll(uow, self, False, False)
+        uow.dependencies.update([(parent_saves, after_save)])
+
+    def per_state_flush_actions(self, uow, states, isdelete):
+        pass
+
+    def presort_deletes(self, uowcommit, states):
+        pass
+
+    def presort_saves(self, uow, states):
+        if not self.passive_updates:
+            # for non-passive updates, register in the preprocess stage
+            # so that mapper save_obj() gets a hold of changes
+            self._process_key_switches(states, uow)
+
+    def prop_has_changes(self, uow, states, isdelete):
+        if not isdelete and self.passive_updates:
+            d = self._key_switchers(uow, states)
+            return bool(d)
+
+        return False
+
+    def process_deletes(self, uowcommit, states):
+        assert False
+
+    def process_saves(self, uowcommit, states):
+        # for passive updates, register objects in the process stage
+        # so that we avoid ManyToOneDP's registering the object without
+        # the listonly flag in its own preprocess stage (results in UPDATE)
+        # statements being emitted
+        assert self.passive_updates
+        self._process_key_switches(states, uowcommit)
+
+    def _key_switchers(self, uow, states):
+        switched, notswitched = uow.memo(
+            ("pk_switchers", self), lambda: (set(), set())
+        )
+
+        allstates = switched.union(notswitched)
+        for s in states:
+            if s not in allstates:
+                if self._pks_changed(uow, s):
+                    switched.add(s)
+                else:
+                    notswitched.add(s)
+        return switched
+
+    def _process_key_switches(self, deplist, uowcommit):
+        switchers = self._key_switchers(uowcommit, deplist)
+        if switchers:
+            # if primary key values have actually changed somewhere, perform
+            # a linear search through the UOW in search of a parent.
+            for state in uowcommit.session.identity_map.all_states():
+                if not issubclass(state.class_, self.parent.class_):
+                    continue
+                dict_ = state.dict
+                related = state.get_impl(self.key).get(
+                    state, dict_, passive=self._passive_update_flag
+                )
+                if (
+                    related is not attributes.PASSIVE_NO_RESULT
+                    and related is not None
+                ):
+                    if self.prop.uselist:
+                        if not related:
+                            continue
+                        related_obj = related[0]
+                    else:
+                        related_obj = related
+                    related_state = attributes.instance_state(related_obj)
+                    if related_state in switchers:
+                        uowcommit.register_object(
+                            state, False, self.passive_updates
+                        )
+                        sync.populate(
+                            related_state,
+                            self.mapper,
+                            state,
+                            self.parent,
+                            self.prop.synchronize_pairs,
+                            uowcommit,
+                            self.passive_updates,
+                        )
+
+    def _pks_changed(self, uowcommit, state):
+        return bool(state.key) and sync.source_modified(
+            uowcommit, state, self.mapper, self.prop.synchronize_pairs
+        )
+
+
+class ManyToManyDP(DependencyProcessor):
+    def per_property_dependencies(
+        self,
+        uow,
+        parent_saves,
+        child_saves,
+        parent_deletes,
+        child_deletes,
+        after_save,
+        before_delete,
+    ):
+        uow.dependencies.update(
+            [
+                (parent_saves, after_save),
+                (child_saves, after_save),
+                (after_save, child_deletes),
+                # a rowswitch on the parent from  deleted to saved
+                # can make this one occur, as the "save" may remove
+                # an element from the
+                # "deleted" list before we have a chance to
+                # process its child rows
+                (before_delete, parent_saves),
+                (before_delete, parent_deletes),
+                (before_delete, child_deletes),
+                (before_delete, child_saves),
+            ]
+        )
+
+    def per_state_dependencies(
+        self,
+        uow,
+        save_parent,
+        delete_parent,
+        child_action,
+        after_save,
+        before_delete,
+        isdelete,
+        childisdelete,
+    ):
+        if not isdelete:
+            if childisdelete:
+                uow.dependencies.update(
+                    [(save_parent, after_save), (after_save, child_action)]
+                )
+            else:
+                uow.dependencies.update(
+                    [(save_parent, after_save), (child_action, after_save)]
+                )
+        else:
+            uow.dependencies.update(
+                [(before_delete, child_action), (before_delete, delete_parent)]
+            )
+
+    def presort_deletes(self, uowcommit, states):
+        # TODO: no tests fail if this whole
+        # thing is removed !!!!
+        if not self.passive_deletes:
+            # if no passive deletes, load history on
+            # the collection, so that prop_has_changes()
+            # returns True
+            for state in states:
+                uowcommit.get_attribute_history(
+                    state, self.key, self._passive_delete_flag
+                )
+
+    def presort_saves(self, uowcommit, states):
+        if not self.passive_updates:
+            # if no passive updates, load history on
+            # each collection where parent has changed PK,
+            # so that prop_has_changes() returns True
+            for state in states:
+                if self._pks_changed(uowcommit, state):
+                    history = uowcommit.get_attribute_history(
+                        state, self.key, attributes.PASSIVE_OFF
+                    )
+
+        if not self.cascade.delete_orphan:
+            return
+
+        # check for child items removed from the collection
+        # if delete_orphan check is turned on.
+        for state in states:
+            history = uowcommit.get_attribute_history(
+                state, self.key, attributes.PASSIVE_NO_INITIALIZE
+            )
+            if history:
+                for child in history.deleted:
+                    if self.hasparent(child) is False:
+                        uowcommit.register_object(
+                            child,
+                            isdelete=True,
+                            operation="delete",
+                            prop=self.prop,
+                        )
+                        for c, m, st_, dct_ in self.mapper.cascade_iterator(
+                            "delete", child
+                        ):
+                            uowcommit.register_object(st_, isdelete=True)
+
+    def process_deletes(self, uowcommit, states):
+        secondary_delete = []
+        secondary_insert = []
+        secondary_update = []
+
+        processed = self._get_reversed_processed_set(uowcommit)
+        tmp = set()
+        for state in states:
+            # this history should be cached already, as
+            # we loaded it in preprocess_deletes
+            history = uowcommit.get_attribute_history(
+                state, self.key, self._passive_delete_flag
+            )
+            if history:
+                for child in history.non_added():
+                    if child is None or (
+                        processed is not None and (state, child) in processed
+                    ):
+                        continue
+                    associationrow = {}
+                    if not self._synchronize(
+                        state,
+                        child,
+                        associationrow,
+                        False,
+                        uowcommit,
+                        "delete",
+                    ):
+                        continue
+                    secondary_delete.append(associationrow)
+
+                tmp.update((c, state) for c in history.non_added())
+
+        if processed is not None:
+            processed.update(tmp)
+
+        self._run_crud(
+            uowcommit, secondary_insert, secondary_update, secondary_delete
+        )
+
+    def process_saves(self, uowcommit, states):
+        secondary_delete = []
+        secondary_insert = []
+        secondary_update = []
+
+        processed = self._get_reversed_processed_set(uowcommit)
+        tmp = set()
+
+        for state in states:
+            need_cascade_pks = not self.passive_updates and self._pks_changed(
+                uowcommit, state
+            )
+            if need_cascade_pks:
+                passive = (
+                    attributes.PASSIVE_OFF
+                    | attributes.INCLUDE_PENDING_MUTATIONS
+                )
+            else:
+                passive = (
+                    attributes.PASSIVE_NO_INITIALIZE
+                    | attributes.INCLUDE_PENDING_MUTATIONS
+                )
+            history = uowcommit.get_attribute_history(state, self.key, passive)
+            if history:
+                for child in history.added:
+                    if processed is not None and (state, child) in processed:
+                        continue
+                    associationrow = {}
+                    if not self._synchronize(
+                        state, child, associationrow, False, uowcommit, "add"
+                    ):
+                        continue
+                    secondary_insert.append(associationrow)
+                for child in history.deleted:
+                    if processed is not None and (state, child) in processed:
+                        continue
+                    associationrow = {}
+                    if not self._synchronize(
+                        state,
+                        child,
+                        associationrow,
+                        False,
+                        uowcommit,
+                        "delete",
+                    ):
+                        continue
+                    secondary_delete.append(associationrow)
+
+                tmp.update((c, state) for c in history.added + history.deleted)
+
+                if need_cascade_pks:
+                    for child in history.unchanged:
+                        associationrow = {}
+                        sync.update(
+                            state,
+                            self.parent,
+                            associationrow,
+                            "old_",
+                            self.prop.synchronize_pairs,
+                        )
+                        sync.update(
+                            child,
+                            self.mapper,
+                            associationrow,
+                            "old_",
+                            self.prop.secondary_synchronize_pairs,
+                        )
+
+                        secondary_update.append(associationrow)
+
+        if processed is not None:
+            processed.update(tmp)
+
+        self._run_crud(
+            uowcommit, secondary_insert, secondary_update, secondary_delete
+        )
+
+    def _run_crud(
+        self, uowcommit, secondary_insert, secondary_update, secondary_delete
+    ):
+        connection = uowcommit.transaction.connection(self.mapper)
+
+        if secondary_delete:
+            associationrow = secondary_delete[0]
+            statement = self.secondary.delete().where(
+                sql.and_(
+                    *[
+                        c == sql.bindparam(c.key, type_=c.type)
+                        for c in self.secondary.c
+                        if c.key in associationrow
+                    ]
+                )
+            )
+            result = connection.execute(statement, secondary_delete)
+
+            if (
+                result.supports_sane_multi_rowcount()
+            ) and result.rowcount != len(secondary_delete):
+                raise exc.StaleDataError(
+                    "DELETE statement on table '%s' expected to delete "
+                    "%d row(s); Only %d were matched."
+                    % (
+                        self.secondary.description,
+                        len(secondary_delete),
+                        result.rowcount,
+                    )
+                )
+
+        if secondary_update:
+            associationrow = secondary_update[0]
+            statement = self.secondary.update().where(
+                sql.and_(
+                    *[
+                        c == sql.bindparam("old_" + c.key, type_=c.type)
+                        for c in self.secondary.c
+                        if c.key in associationrow
+                    ]
+                )
+            )
+            result = connection.execute(statement, secondary_update)
+
+            if (
+                result.supports_sane_multi_rowcount()
+            ) and result.rowcount != len(secondary_update):
+                raise exc.StaleDataError(
+                    "UPDATE statement on table '%s' expected to update "
+                    "%d row(s); Only %d were matched."
+                    % (
+                        self.secondary.description,
+                        len(secondary_update),
+                        result.rowcount,
+                    )
+                )
+
+        if secondary_insert:
+            statement = self.secondary.insert()
+            connection.execute(statement, secondary_insert)
+
+    def _synchronize(
+        self, state, child, associationrow, clearkeys, uowcommit, operation
+    ):
+        # this checks for None if uselist=True
+        self._verify_canload(child)
+
+        # but if uselist=False we get here.   If child is None,
+        # no association row can be generated, so return.
+        if child is None:
+            return False
+
+        if child is not None and not uowcommit.session._contains_state(child):
+            if not child.deleted:
+                util.warn(
+                    "Object of type %s not in session, %s "
+                    "operation along '%s' won't proceed"
+                    % (mapperutil.state_class_str(child), operation, self.prop)
+                )
+            return False
+
+        sync.populate_dict(
+            state, self.parent, associationrow, self.prop.synchronize_pairs
+        )
+        sync.populate_dict(
+            child,
+            self.mapper,
+            associationrow,
+            self.prop.secondary_synchronize_pairs,
+        )
+
+        return True
+
+    def _pks_changed(self, uowcommit, state):
+        return sync.source_modified(
+            uowcommit, state, self.parent, self.prop.synchronize_pairs
+        )
+
+
+_direction_to_processor = {
+    ONETOMANY: OneToManyDP,
+    MANYTOONE: ManyToOneDP,
+    MANYTOMANY: ManyToManyDP,
+}
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py
new file mode 100644
index 00000000..f01cc178
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/descriptor_props.py
@@ -0,0 +1,1077 @@
+# orm/descriptor_props.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Descriptor properties are more "auxiliary" properties
+that exist as configurational elements, but don't participate
+as actively in the load/persist ORM loop.
+
+"""
+from __future__ import annotations
+
+from dataclasses import is_dataclass
+import inspect
+import itertools
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import List
+from typing import NoReturn
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import util as orm_util
+from .base import _DeclarativeMapped
+from .base import LoaderCallableStatus
+from .base import Mapped
+from .base import PassiveFlag
+from .base import SQLORMOperations
+from .interfaces import _AttributeOptions
+from .interfaces import _IntrospectsAnnotations
+from .interfaces import _MapsColumns
+from .interfaces import MapperProperty
+from .interfaces import PropComparator
+from .util import _none_set
+from .util import de_stringify_annotation
+from .. import event
+from .. import exc as sa_exc
+from .. import schema
+from .. import sql
+from .. import util
+from ..sql import expression
+from ..sql import operators
+from ..sql.elements import BindParameter
+from ..util.typing import get_args
+from ..util.typing import is_fwd_ref
+from ..util.typing import is_pep593
+
+
+if typing.TYPE_CHECKING:
+    from ._typing import _InstanceDict
+    from ._typing import _RegistryType
+    from .attributes import History
+    from .attributes import InstrumentedAttribute
+    from .attributes import QueryableAttribute
+    from .context import ORMCompileState
+    from .decl_base import _ClassScanMapperConfig
+    from .mapper import Mapper
+    from .properties import ColumnProperty
+    from .properties import MappedColumn
+    from .state import InstanceState
+    from ..engine.base import Connection
+    from ..engine.row import Row
+    from ..sql._typing import _DMLColumnArgument
+    from ..sql._typing import _InfoType
+    from ..sql.elements import ClauseList
+    from ..sql.elements import ColumnElement
+    from ..sql.operators import OperatorType
+    from ..sql.schema import Column
+    from ..sql.selectable import Select
+    from ..util.typing import _AnnotationScanType
+    from ..util.typing import CallableReference
+    from ..util.typing import DescriptorReference
+    from ..util.typing import RODescriptorReference
+
+_T = TypeVar("_T", bound=Any)
+_PT = TypeVar("_PT", bound=Any)
+
+
+class DescriptorProperty(MapperProperty[_T]):
+    """:class:`.MapperProperty` which proxies access to a
+    user-defined descriptor."""
+
+    doc: Optional[str] = None
+
+    uses_objects = False
+    _links_to_entity = False
+
+    descriptor: DescriptorReference[Any]
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> History:
+        raise NotImplementedError()
+
+    def instrument_class(self, mapper: Mapper[Any]) -> None:
+        prop = self
+
+        class _ProxyImpl(attributes.AttributeImpl):
+            accepts_scalar_loader = False
+            load_on_unexpire = True
+            collection = False
+
+            @property
+            def uses_objects(self) -> bool:  # type: ignore
+                return prop.uses_objects
+
+            def __init__(self, key: str):
+                self.key = key
+
+            def get_history(
+                self,
+                state: InstanceState[Any],
+                dict_: _InstanceDict,
+                passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+            ) -> History:
+                return prop.get_history(state, dict_, passive)
+
+        if self.descriptor is None:
+            desc = getattr(mapper.class_, self.key, None)
+            if mapper._is_userland_descriptor(self.key, desc):
+                self.descriptor = desc
+
+        if self.descriptor is None:
+
+            def fset(obj: Any, value: Any) -> None:
+                setattr(obj, self.name, value)
+
+            def fdel(obj: Any) -> None:
+                delattr(obj, self.name)
+
+            def fget(obj: Any) -> Any:
+                return getattr(obj, self.name)
+
+            self.descriptor = property(fget=fget, fset=fset, fdel=fdel)
+
+        proxy_attr = attributes.create_proxied_attribute(self.descriptor)(
+            self.parent.class_,
+            self.key,
+            self.descriptor,
+            lambda: self._comparator_factory(mapper),
+            doc=self.doc,
+            original_property=self,
+        )
+        proxy_attr.impl = _ProxyImpl(self.key)
+        mapper.class_manager.instrument_attribute(self.key, proxy_attr)
+
+
+_CompositeAttrType = Union[
+    str,
+    "Column[_T]",
+    "MappedColumn[_T]",
+    "InstrumentedAttribute[_T]",
+    "Mapped[_T]",
+]
+
+
+_CC = TypeVar("_CC", bound=Any)
+
+
+_composite_getters: weakref.WeakKeyDictionary[
+    Type[Any], Callable[[Any], Tuple[Any, ...]]
+] = weakref.WeakKeyDictionary()
+
+
+class CompositeProperty(
+    _MapsColumns[_CC], _IntrospectsAnnotations, DescriptorProperty[_CC]
+):
+    """Defines a "composite" mapped attribute, representing a collection
+    of columns as one attribute.
+
+    :class:`.CompositeProperty` is constructed using the :func:`.composite`
+    function.
+
+    .. seealso::
+
+        :ref:`mapper_composite`
+
+    """
+
+    composite_class: Union[Type[_CC], Callable[..., _CC]]
+    attrs: Tuple[_CompositeAttrType[Any], ...]
+
+    _generated_composite_accessor: CallableReference[
+        Optional[Callable[[_CC], Tuple[Any, ...]]]
+    ]
+
+    comparator_factory: Type[Comparator[_CC]]
+
+    def __init__(
+        self,
+        _class_or_attr: Union[
+            None, Type[_CC], Callable[..., _CC], _CompositeAttrType[Any]
+        ] = None,
+        *attrs: _CompositeAttrType[Any],
+        attribute_options: Optional[_AttributeOptions] = None,
+        active_history: bool = False,
+        deferred: bool = False,
+        group: Optional[str] = None,
+        comparator_factory: Optional[Type[Comparator[_CC]]] = None,
+        info: Optional[_InfoType] = None,
+        **kwargs: Any,
+    ):
+        super().__init__(attribute_options=attribute_options)
+
+        if isinstance(_class_or_attr, (Mapped, str, sql.ColumnElement)):
+            self.attrs = (_class_or_attr,) + attrs
+            # will initialize within declarative_scan
+            self.composite_class = None  # type: ignore
+        else:
+            self.composite_class = _class_or_attr  # type: ignore
+            self.attrs = attrs
+
+        self.active_history = active_history
+        self.deferred = deferred
+        self.group = group
+        self.comparator_factory = (
+            comparator_factory
+            if comparator_factory is not None
+            else self.__class__.Comparator
+        )
+        self._generated_composite_accessor = None
+        if info is not None:
+            self.info.update(info)
+
+        util.set_creation_order(self)
+        self._create_descriptor()
+        self._init_accessor()
+
+    def instrument_class(self, mapper: Mapper[Any]) -> None:
+        super().instrument_class(mapper)
+        self._setup_event_handlers()
+
+    def _composite_values_from_instance(self, value: _CC) -> Tuple[Any, ...]:
+        if self._generated_composite_accessor:
+            return self._generated_composite_accessor(value)
+        else:
+            try:
+                accessor = value.__composite_values__
+            except AttributeError as ae:
+                raise sa_exc.InvalidRequestError(
+                    f"Composite class {self.composite_class.__name__} is not "
+                    f"a dataclass and does not define a __composite_values__()"
+                    " method; can't get state"
+                ) from ae
+            else:
+                return accessor()  # type: ignore
+
+    def do_init(self) -> None:
+        """Initialization which occurs after the :class:`.Composite`
+        has been associated with its parent mapper.
+
+        """
+        self._setup_arguments_on_columns()
+
+    _COMPOSITE_FGET = object()
+
+    def _create_descriptor(self) -> None:
+        """Create the Python descriptor that will serve as
+        the access point on instances of the mapped class.
+
+        """
+
+        def fget(instance: Any) -> Any:
+            dict_ = attributes.instance_dict(instance)
+            state = attributes.instance_state(instance)
+
+            if self.key not in dict_:
+                # key not present.  Iterate through related
+                # attributes, retrieve their values.  This
+                # ensures they all load.
+                values = [
+                    getattr(instance, key) for key in self._attribute_keys
+                ]
+
+                # current expected behavior here is that the composite is
+                # created on access if the object is persistent or if
+                # col attributes have non-None.  This would be better
+                # if the composite were created unconditionally,
+                # but that would be a behavioral change.
+                if self.key not in dict_ and (
+                    state.key is not None or not _none_set.issuperset(values)
+                ):
+                    dict_[self.key] = self.composite_class(*values)
+                    state.manager.dispatch.refresh(
+                        state, self._COMPOSITE_FGET, [self.key]
+                    )
+
+            return dict_.get(self.key, None)
+
+        def fset(instance: Any, value: Any) -> None:
+            dict_ = attributes.instance_dict(instance)
+            state = attributes.instance_state(instance)
+            attr = state.manager[self.key]
+
+            if attr.dispatch._active_history:
+                previous = fget(instance)
+            else:
+                previous = dict_.get(self.key, LoaderCallableStatus.NO_VALUE)
+
+            for fn in attr.dispatch.set:
+                value = fn(state, value, previous, attr.impl)
+            dict_[self.key] = value
+            if value is None:
+                for key in self._attribute_keys:
+                    setattr(instance, key, None)
+            else:
+                for key, value in zip(
+                    self._attribute_keys,
+                    self._composite_values_from_instance(value),
+                ):
+                    setattr(instance, key, value)
+
+        def fdel(instance: Any) -> None:
+            state = attributes.instance_state(instance)
+            dict_ = attributes.instance_dict(instance)
+            attr = state.manager[self.key]
+
+            if attr.dispatch._active_history:
+                previous = fget(instance)
+                dict_.pop(self.key, None)
+            else:
+                previous = dict_.pop(self.key, LoaderCallableStatus.NO_VALUE)
+
+            attr = state.manager[self.key]
+            attr.dispatch.remove(state, previous, attr.impl)
+            for key in self._attribute_keys:
+                setattr(instance, key, None)
+
+        self.descriptor = property(fget, fset, fdel)
+
+    @util.preload_module("sqlalchemy.orm.properties")
+    def declarative_scan(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        registry: _RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
+        annotation: Optional[_AnnotationScanType],
+        extracted_mapped_annotation: Optional[_AnnotationScanType],
+        is_dataclass_field: bool,
+    ) -> None:
+        MappedColumn = util.preloaded.orm_properties.MappedColumn
+        if (
+            self.composite_class is None
+            and extracted_mapped_annotation is None
+        ):
+            self._raise_for_required(key, cls)
+        argument = extracted_mapped_annotation
+
+        if is_pep593(argument):
+            argument = get_args(argument)[0]
+
+        if argument and self.composite_class is None:
+            if isinstance(argument, str) or is_fwd_ref(
+                argument, check_generic=True
+            ):
+                if originating_module is None:
+                    str_arg = (
+                        argument.__forward_arg__
+                        if hasattr(argument, "__forward_arg__")
+                        else str(argument)
+                    )
+                    raise sa_exc.ArgumentError(
+                        f"Can't use forward ref {argument} for composite "
+                        f"class argument; set up the type as Mapped[{str_arg}]"
+                    )
+                argument = de_stringify_annotation(
+                    cls, argument, originating_module, include_generic=True
+                )
+
+            self.composite_class = argument
+
+        if is_dataclass(self.composite_class):
+            self._setup_for_dataclass(registry, cls, originating_module, key)
+        else:
+            for attr in self.attrs:
+                if (
+                    isinstance(attr, (MappedColumn, schema.Column))
+                    and attr.name is None
+                ):
+                    raise sa_exc.ArgumentError(
+                        "Composite class column arguments must be named "
+                        "unless a dataclass is used"
+                    )
+        self._init_accessor()
+
+    def _init_accessor(self) -> None:
+        if is_dataclass(self.composite_class) and not hasattr(
+            self.composite_class, "__composite_values__"
+        ):
+            insp = inspect.signature(self.composite_class)
+            getter = operator.attrgetter(
+                *[p.name for p in insp.parameters.values()]
+            )
+            if len(insp.parameters) == 1:
+                self._generated_composite_accessor = lambda obj: (getter(obj),)
+            else:
+                self._generated_composite_accessor = getter
+
+        if (
+            self.composite_class is not None
+            and isinstance(self.composite_class, type)
+            and self.composite_class not in _composite_getters
+        ):
+            if self._generated_composite_accessor is not None:
+                _composite_getters[self.composite_class] = (
+                    self._generated_composite_accessor
+                )
+            elif hasattr(self.composite_class, "__composite_values__"):
+                _composite_getters[self.composite_class] = (
+                    lambda obj: obj.__composite_values__()
+                )
+
+    @util.preload_module("sqlalchemy.orm.properties")
+    @util.preload_module("sqlalchemy.orm.decl_base")
+    def _setup_for_dataclass(
+        self,
+        registry: _RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+    ) -> None:
+        MappedColumn = util.preloaded.orm_properties.MappedColumn
+
+        decl_base = util.preloaded.orm_decl_base
+
+        insp = inspect.signature(self.composite_class)
+        for param, attr in itertools.zip_longest(
+            insp.parameters.values(), self.attrs
+        ):
+            if param is None:
+                raise sa_exc.ArgumentError(
+                    f"number of composite attributes "
+                    f"{len(self.attrs)} exceeds "
+                    f"that of the number of attributes in class "
+                    f"{self.composite_class.__name__} {len(insp.parameters)}"
+                )
+            if attr is None:
+                # fill in missing attr spots with empty MappedColumn
+                attr = MappedColumn()
+                self.attrs += (attr,)
+
+            if isinstance(attr, MappedColumn):
+                attr.declarative_scan_for_composite(
+                    registry,
+                    cls,
+                    originating_module,
+                    key,
+                    param.name,
+                    param.annotation,
+                )
+            elif isinstance(attr, schema.Column):
+                decl_base._undefer_column_name(param.name, attr)
+
+    @util.memoized_property
+    def _comparable_elements(self) -> Sequence[QueryableAttribute[Any]]:
+        return [getattr(self.parent.class_, prop.key) for prop in self.props]
+
+    @util.memoized_property
+    @util.preload_module("orm.properties")
+    def props(self) -> Sequence[MapperProperty[Any]]:
+        props = []
+        MappedColumn = util.preloaded.orm_properties.MappedColumn
+
+        for attr in self.attrs:
+            if isinstance(attr, str):
+                prop = self.parent.get_property(attr, _configure_mappers=False)
+            elif isinstance(attr, schema.Column):
+                prop = self.parent._columntoproperty[attr]
+            elif isinstance(attr, MappedColumn):
+                prop = self.parent._columntoproperty[attr.column]
+            elif isinstance(attr, attributes.InstrumentedAttribute):
+                prop = attr.property
+            else:
+                prop = None
+
+            if not isinstance(prop, MapperProperty):
+                raise sa_exc.ArgumentError(
+                    "Composite expects Column objects or mapped "
+                    f"attributes/attribute names as arguments, got: {attr!r}"
+                )
+
+            props.append(prop)
+        return props
+
+    @util.non_memoized_property
+    @util.preload_module("orm.properties")
+    def columns(self) -> Sequence[Column[Any]]:
+        MappedColumn = util.preloaded.orm_properties.MappedColumn
+        return [
+            a.column if isinstance(a, MappedColumn) else a
+            for a in self.attrs
+            if isinstance(a, (schema.Column, MappedColumn))
+        ]
+
+    @property
+    def mapper_property_to_assign(self) -> Optional[MapperProperty[_CC]]:
+        return self
+
+    @property
+    def columns_to_assign(self) -> List[Tuple[schema.Column[Any], int]]:
+        return [(c, 0) for c in self.columns if c.table is None]
+
+    @util.preload_module("orm.properties")
+    def _setup_arguments_on_columns(self) -> None:
+        """Propagate configuration arguments made on this composite
+        to the target columns, for those that apply.
+
+        """
+        ColumnProperty = util.preloaded.orm_properties.ColumnProperty
+
+        for prop in self.props:
+            if not isinstance(prop, ColumnProperty):
+                continue
+            else:
+                cprop = prop
+
+            cprop.active_history = self.active_history
+            if self.deferred:
+                cprop.deferred = self.deferred
+                cprop.strategy_key = (("deferred", True), ("instrument", True))
+            cprop.group = self.group
+
+    def _setup_event_handlers(self) -> None:
+        """Establish events that populate/expire the composite attribute."""
+
+        def load_handler(
+            state: InstanceState[Any], context: ORMCompileState
+        ) -> None:
+            _load_refresh_handler(state, context, None, is_refresh=False)
+
+        def refresh_handler(
+            state: InstanceState[Any],
+            context: ORMCompileState,
+            to_load: Optional[Sequence[str]],
+        ) -> None:
+            # note this corresponds to sqlalchemy.ext.mutable load_attrs()
+
+            if not to_load or (
+                {self.key}.union(self._attribute_keys)
+            ).intersection(to_load):
+                _load_refresh_handler(state, context, to_load, is_refresh=True)
+
+        def _load_refresh_handler(
+            state: InstanceState[Any],
+            context: ORMCompileState,
+            to_load: Optional[Sequence[str]],
+            is_refresh: bool,
+        ) -> None:
+            dict_ = state.dict
+
+            # if context indicates we are coming from the
+            # fget() handler, this already set the value; skip the
+            # handler here. (other handlers like mutablecomposite will still
+            # want to catch it)
+            # there's an insufficiency here in that the fget() handler
+            # really should not be using the refresh event and there should
+            # be some other event that mutablecomposite can subscribe
+            # towards for this.
+
+            if (
+                not is_refresh or context is self._COMPOSITE_FGET
+            ) and self.key in dict_:
+                return
+
+            # if column elements aren't loaded, skip.
+            # __get__() will initiate a load for those
+            # columns
+            for k in self._attribute_keys:
+                if k not in dict_:
+                    return
+
+            dict_[self.key] = self.composite_class(
+                *[state.dict[key] for key in self._attribute_keys]
+            )
+
+        def expire_handler(
+            state: InstanceState[Any], keys: Optional[Sequence[str]]
+        ) -> None:
+            if keys is None or set(self._attribute_keys).intersection(keys):
+                state.dict.pop(self.key, None)
+
+        def insert_update_handler(
+            mapper: Mapper[Any],
+            connection: Connection,
+            state: InstanceState[Any],
+        ) -> None:
+            """After an insert or update, some columns may be expired due
+            to server side defaults, or re-populated due to client side
+            defaults.  Pop out the composite value here so that it
+            recreates.
+
+            """
+
+            state.dict.pop(self.key, None)
+
+        event.listen(
+            self.parent, "after_insert", insert_update_handler, raw=True
+        )
+        event.listen(
+            self.parent, "after_update", insert_update_handler, raw=True
+        )
+        event.listen(
+            self.parent, "load", load_handler, raw=True, propagate=True
+        )
+        event.listen(
+            self.parent, "refresh", refresh_handler, raw=True, propagate=True
+        )
+        event.listen(
+            self.parent, "expire", expire_handler, raw=True, propagate=True
+        )
+
+        proxy_attr = self.parent.class_manager[self.key]
+        proxy_attr.impl.dispatch = proxy_attr.dispatch  # type: ignore
+        proxy_attr.impl.dispatch._active_history = self.active_history
+
+        # TODO: need a deserialize hook here
+
+    @util.memoized_property
+    def _attribute_keys(self) -> Sequence[str]:
+        return [prop.key for prop in self.props]
+
+    def _populate_composite_bulk_save_mappings_fn(
+        self,
+    ) -> Callable[[Dict[str, Any]], None]:
+        if self._generated_composite_accessor:
+            get_values = self._generated_composite_accessor
+        else:
+
+            def get_values(val: Any) -> Tuple[Any]:
+                return val.__composite_values__()  # type: ignore
+
+        attrs = [prop.key for prop in self.props]
+
+        def populate(dest_dict: Dict[str, Any]) -> None:
+            dest_dict.update(
+                {
+                    key: val
+                    for key, val in zip(
+                        attrs, get_values(dest_dict.pop(self.key))
+                    )
+                }
+            )
+
+        return populate
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> History:
+        """Provided for userland code that uses attributes.get_history()."""
+
+        added: List[Any] = []
+        deleted: List[Any] = []
+
+        has_history = False
+        for prop in self.props:
+            key = prop.key
+            hist = state.manager[key].impl.get_history(state, dict_)
+            if hist.has_changes():
+                has_history = True
+
+            non_deleted = hist.non_deleted()
+            if non_deleted:
+                added.extend(non_deleted)
+            else:
+                added.append(None)
+            if hist.deleted:
+                deleted.extend(hist.deleted)
+            else:
+                deleted.append(None)
+
+        if has_history:
+            return attributes.History(
+                [self.composite_class(*added)],
+                (),
+                [self.composite_class(*deleted)],
+            )
+        else:
+            return attributes.History((), [self.composite_class(*added)], ())
+
+    def _comparator_factory(
+        self, mapper: Mapper[Any]
+    ) -> Composite.Comparator[_CC]:
+        return self.comparator_factory(self, mapper)
+
+    class CompositeBundle(orm_util.Bundle[_T]):
+        def __init__(
+            self,
+            property_: Composite[_T],
+            expr: ClauseList,
+        ):
+            self.property = property_
+            super().__init__(property_.key, *expr)
+
+        def create_row_processor(
+            self,
+            query: Select[Any],
+            procs: Sequence[Callable[[Row[Any]], Any]],
+            labels: Sequence[str],
+        ) -> Callable[[Row[Any]], Any]:
+            def proc(row: Row[Any]) -> Any:
+                return self.property.composite_class(
+                    *[proc(row) for proc in procs]
+                )
+
+            return proc
+
+    class Comparator(PropComparator[_PT]):
+        """Produce boolean, comparison, and other operators for
+        :class:`.Composite` attributes.
+
+        See the example in :ref:`composite_operations` for an overview
+        of usage , as well as the documentation for :class:`.PropComparator`.
+
+        .. seealso::
+
+            :class:`.PropComparator`
+
+            :class:`.ColumnOperators`
+
+            :ref:`types_operators`
+
+            :attr:`.TypeEngine.comparator_factory`
+
+        """
+
+        # https://github.com/python/mypy/issues/4266
+        __hash__ = None  # type: ignore
+
+        prop: RODescriptorReference[Composite[_PT]]
+
+        @util.memoized_property
+        def clauses(self) -> ClauseList:
+            return expression.ClauseList(
+                group=False, *self._comparable_elements
+            )
+
+        def __clause_element__(self) -> CompositeProperty.CompositeBundle[_PT]:
+            return self.expression
+
+        @util.memoized_property
+        def expression(self) -> CompositeProperty.CompositeBundle[_PT]:
+            clauses = self.clauses._annotate(
+                {
+                    "parententity": self._parententity,
+                    "parentmapper": self._parententity,
+                    "proxy_key": self.prop.key,
+                }
+            )
+            return CompositeProperty.CompositeBundle(self.prop, clauses)
+
+        def _bulk_update_tuples(
+            self, value: Any
+        ) -> Sequence[Tuple[_DMLColumnArgument, Any]]:
+            if isinstance(value, BindParameter):
+                value = value.value
+
+            values: Sequence[Any]
+
+            if value is None:
+                values = [None for key in self.prop._attribute_keys]
+            elif isinstance(self.prop.composite_class, type) and isinstance(
+                value, self.prop.composite_class
+            ):
+                values = self.prop._composite_values_from_instance(
+                    value  # type: ignore[arg-type]
+                )
+            else:
+                raise sa_exc.ArgumentError(
+                    "Can't UPDATE composite attribute %s to %r"
+                    % (self.prop, value)
+                )
+
+            return list(zip(self._comparable_elements, values))
+
+        @util.memoized_property
+        def _comparable_elements(self) -> Sequence[QueryableAttribute[Any]]:
+            if self._adapt_to_entity:
+                return [
+                    getattr(self._adapt_to_entity.entity, prop.key)
+                    for prop in self.prop._comparable_elements
+                ]
+            else:
+                return self.prop._comparable_elements
+
+        def __eq__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+            return self._compare(operators.eq, other)
+
+        def __ne__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+            return self._compare(operators.ne, other)
+
+        def __lt__(self, other: Any) -> ColumnElement[bool]:
+            return self._compare(operators.lt, other)
+
+        def __gt__(self, other: Any) -> ColumnElement[bool]:
+            return self._compare(operators.gt, other)
+
+        def __le__(self, other: Any) -> ColumnElement[bool]:
+            return self._compare(operators.le, other)
+
+        def __ge__(self, other: Any) -> ColumnElement[bool]:
+            return self._compare(operators.ge, other)
+
+        # what might be interesting would be if we create
+        # an instance of the composite class itself with
+        # the columns as data members, then use "hybrid style" comparison
+        # to create these comparisons.  then your Point.__eq__() method could
+        # be where comparison behavior is defined for SQL also.   Likely
+        # not a good choice for default behavior though, not clear how it would
+        # work w/ dataclasses, etc.  also no demand for any of this anyway.
+        def _compare(
+            self, operator: OperatorType, other: Any
+        ) -> ColumnElement[bool]:
+            values: Sequence[Any]
+            if other is None:
+                values = [None] * len(self.prop._comparable_elements)
+            else:
+                values = self.prop._composite_values_from_instance(other)
+            comparisons = [
+                operator(a, b)
+                for a, b in zip(self.prop._comparable_elements, values)
+            ]
+            if self._adapt_to_entity:
+                assert self.adapter is not None
+                comparisons = [self.adapter(x) for x in comparisons]
+            return sql.and_(*comparisons)
+
+    def __str__(self) -> str:
+        return str(self.parent.class_.__name__) + "." + self.key
+
+
+class Composite(CompositeProperty[_T], _DeclarativeMapped[_T]):
+    """Declarative-compatible front-end for the :class:`.CompositeProperty`
+    class.
+
+    Public constructor is the :func:`_orm.composite` function.
+
+    .. versionchanged:: 2.0 Added :class:`_orm.Composite` as a Declarative
+       compatible subclass of :class:`_orm.CompositeProperty`.
+
+    .. seealso::
+
+        :ref:`mapper_composite`
+
+    """
+
+    inherit_cache = True
+    """:meta private:"""
+
+
+class ConcreteInheritedProperty(DescriptorProperty[_T]):
+    """A 'do nothing' :class:`.MapperProperty` that disables
+    an attribute on a concrete subclass that is only present
+    on the inherited mapper, not the concrete classes' mapper.
+
+    Cases where this occurs include:
+
+    * When the superclass mapper is mapped against a
+      "polymorphic union", which includes all attributes from
+      all subclasses.
+    * When a relationship() is configured on an inherited mapper,
+      but not on the subclass mapper.  Concrete mappers require
+      that relationship() is configured explicitly on each
+      subclass.
+
+    """
+
+    def _comparator_factory(
+        self, mapper: Mapper[Any]
+    ) -> Type[PropComparator[_T]]:
+        comparator_callable = None
+
+        for m in self.parent.iterate_to_root():
+            p = m._props[self.key]
+            if getattr(p, "comparator_factory", None) is not None:
+                comparator_callable = p.comparator_factory
+                break
+        assert comparator_callable is not None
+        return comparator_callable(p, mapper)  # type: ignore
+
+    def __init__(self) -> None:
+        super().__init__()
+
+        def warn() -> NoReturn:
+            raise AttributeError(
+                "Concrete %s does not implement "
+                "attribute %r at the instance level.  Add "
+                "this property explicitly to %s."
+                % (self.parent, self.key, self.parent)
+            )
+
+        class NoninheritedConcreteProp:
+            def __set__(s: Any, obj: Any, value: Any) -> NoReturn:
+                warn()
+
+            def __delete__(s: Any, obj: Any) -> NoReturn:
+                warn()
+
+            def __get__(s: Any, obj: Any, owner: Any) -> Any:
+                if obj is None:
+                    return self.descriptor
+                warn()
+
+        self.descriptor = NoninheritedConcreteProp()
+
+
+class SynonymProperty(DescriptorProperty[_T]):
+    """Denote an attribute name as a synonym to a mapped property,
+    in that the attribute will mirror the value and expression behavior
+    of another attribute.
+
+    :class:`.Synonym` is constructed using the :func:`_orm.synonym`
+    function.
+
+    .. seealso::
+
+        :ref:`synonyms` - Overview of synonyms
+
+    """
+
+    comparator_factory: Optional[Type[PropComparator[_T]]]
+
+    def __init__(
+        self,
+        name: str,
+        map_column: Optional[bool] = None,
+        descriptor: Optional[Any] = None,
+        comparator_factory: Optional[Type[PropComparator[_T]]] = None,
+        attribute_options: Optional[_AttributeOptions] = None,
+        info: Optional[_InfoType] = None,
+        doc: Optional[str] = None,
+    ):
+        super().__init__(attribute_options=attribute_options)
+
+        self.name = name
+        self.map_column = map_column
+        self.descriptor = descriptor
+        self.comparator_factory = comparator_factory
+        if doc:
+            self.doc = doc
+        elif descriptor and descriptor.__doc__:
+            self.doc = descriptor.__doc__
+        else:
+            self.doc = None
+        if info:
+            self.info.update(info)
+
+        util.set_creation_order(self)
+
+    if not TYPE_CHECKING:
+
+        @property
+        def uses_objects(self) -> bool:
+            return getattr(self.parent.class_, self.name).impl.uses_objects
+
+    # TODO: when initialized, check _proxied_object,
+    # emit a warning if its not a column-based property
+
+    @util.memoized_property
+    def _proxied_object(
+        self,
+    ) -> Union[MapperProperty[_T], SQLORMOperations[_T]]:
+        attr = getattr(self.parent.class_, self.name)
+        if not hasattr(attr, "property") or not isinstance(
+            attr.property, MapperProperty
+        ):
+            # attribute is a non-MapperProprerty proxy such as
+            # hybrid or association proxy
+            if isinstance(attr, attributes.QueryableAttribute):
+                return attr.comparator
+            elif isinstance(attr, SQLORMOperations):
+                # assocaition proxy comes here
+                return attr
+
+            raise sa_exc.InvalidRequestError(
+                """synonym() attribute "%s.%s" only supports """
+                """ORM mapped attributes, got %r"""
+                % (self.parent.class_.__name__, self.name, attr)
+            )
+        return attr.property
+
+    def _comparator_factory(self, mapper: Mapper[Any]) -> SQLORMOperations[_T]:
+        prop = self._proxied_object
+
+        if isinstance(prop, MapperProperty):
+            if self.comparator_factory:
+                comp = self.comparator_factory(prop, mapper)
+            else:
+                comp = prop.comparator_factory(prop, mapper)
+            return comp
+        else:
+            return prop
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> History:
+        attr: QueryableAttribute[Any] = getattr(self.parent.class_, self.name)
+        return attr.impl.get_history(state, dict_, passive=passive)
+
+    @util.preload_module("sqlalchemy.orm.properties")
+    def set_parent(self, parent: Mapper[Any], init: bool) -> None:
+        properties = util.preloaded.orm_properties
+
+        if self.map_column:
+            # implement the 'map_column' option.
+            if self.key not in parent.persist_selectable.c:
+                raise sa_exc.ArgumentError(
+                    "Can't compile synonym '%s': no column on table "
+                    "'%s' named '%s'"
+                    % (
+                        self.name,
+                        parent.persist_selectable.description,
+                        self.key,
+                    )
+                )
+            elif (
+                parent.persist_selectable.c[self.key]
+                in parent._columntoproperty
+                and parent._columntoproperty[
+                    parent.persist_selectable.c[self.key]
+                ].key
+                == self.name
+            ):
+                raise sa_exc.ArgumentError(
+                    "Can't call map_column=True for synonym %r=%r, "
+                    "a ColumnProperty already exists keyed to the name "
+                    "%r for column %r"
+                    % (self.key, self.name, self.name, self.key)
+                )
+            p: ColumnProperty[Any] = properties.ColumnProperty(
+                parent.persist_selectable.c[self.key]
+            )
+            parent._configure_property(self.name, p, init=init, setparent=True)
+            p._mapped_by_synonym = self.key
+
+        self.parent = parent
+
+
+class Synonym(SynonymProperty[_T], _DeclarativeMapped[_T]):
+    """Declarative front-end for the :class:`.SynonymProperty` class.
+
+    Public constructor is the :func:`_orm.synonym` function.
+
+    .. versionchanged:: 2.0 Added :class:`_orm.Synonym` as a Declarative
+       compatible subclass for :class:`_orm.SynonymProperty`
+
+    .. seealso::
+
+        :ref:`synonyms` - Overview of synonyms
+
+    """
+
+    inherit_cache = True
+    """:meta private:"""
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py
new file mode 100644
index 00000000..3c81c396
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/dynamic.py
@@ -0,0 +1,300 @@
+# orm/dynamic.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+
+"""Dynamic collection API.
+
+Dynamic collections act like Query() objects for read operations and support
+basic add/delete mutation.
+
+.. legacy:: the "dynamic" loader is a legacy feature, superseded by the
+ "write_only" loader.
+
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import attributes
+from . import exc as orm_exc
+from . import relationships
+from . import util as orm_util
+from .base import PassiveFlag
+from .query import Query
+from .session import object_session
+from .writeonly import AbstractCollectionWriter
+from .writeonly import WriteOnlyAttributeImpl
+from .writeonly import WriteOnlyHistory
+from .writeonly import WriteOnlyLoader
+from .. import util
+from ..engine import result
+
+
+if TYPE_CHECKING:
+    from . import QueryableAttribute
+    from .mapper import Mapper
+    from .relationships import _RelationshipOrderByArg
+    from .session import Session
+    from .state import InstanceState
+    from .util import AliasedClass
+    from ..event import _Dispatch
+    from ..sql.elements import ColumnElement
+
+_T = TypeVar("_T", bound=Any)
+
+
+class DynamicCollectionHistory(WriteOnlyHistory[_T]):
+    def __init__(
+        self,
+        attr: DynamicAttributeImpl,
+        state: InstanceState[_T],
+        passive: PassiveFlag,
+        apply_to: Optional[DynamicCollectionHistory[_T]] = None,
+    ) -> None:
+        if apply_to:
+            coll = AppenderQuery(attr, state).autoflush(False)
+            self.unchanged_items = util.OrderedIdentitySet(coll)
+            self.added_items = apply_to.added_items
+            self.deleted_items = apply_to.deleted_items
+            self._reconcile_collection = True
+        else:
+            self.deleted_items = util.OrderedIdentitySet()
+            self.added_items = util.OrderedIdentitySet()
+            self.unchanged_items = util.OrderedIdentitySet()
+            self._reconcile_collection = False
+
+
+class DynamicAttributeImpl(WriteOnlyAttributeImpl):
+    _supports_dynamic_iteration = True
+    collection_history_cls = DynamicCollectionHistory[Any]
+    query_class: Type[AppenderMixin[Any]]  # type: ignore[assignment]
+
+    def __init__(
+        self,
+        class_: Union[Type[Any], AliasedClass[Any]],
+        key: str,
+        dispatch: _Dispatch[QueryableAttribute[Any]],
+        target_mapper: Mapper[_T],
+        order_by: _RelationshipOrderByArg,
+        query_class: Optional[Type[AppenderMixin[_T]]] = None,
+        **kw: Any,
+    ) -> None:
+        attributes.AttributeImpl.__init__(
+            self, class_, key, None, dispatch, **kw
+        )
+        self.target_mapper = target_mapper
+        if order_by:
+            self.order_by = tuple(order_by)
+        if not query_class:
+            self.query_class = AppenderQuery
+        elif AppenderMixin in query_class.mro():
+            self.query_class = query_class
+        else:
+            self.query_class = mixin_user_query(query_class)
+
+
+@relationships.RelationshipProperty.strategy_for(lazy="dynamic")
+class DynaLoader(WriteOnlyLoader):
+    impl_class = DynamicAttributeImpl
+
+
+class AppenderMixin(AbstractCollectionWriter[_T]):
+    """A mixin that expects to be mixing in a Query class with
+    AbstractAppender.
+
+
+    """
+
+    query_class: Optional[Type[Query[_T]]] = None
+    _order_by_clauses: Tuple[ColumnElement[Any], ...]
+
+    def __init__(
+        self, attr: DynamicAttributeImpl, state: InstanceState[_T]
+    ) -> None:
+        Query.__init__(
+            self,  # type: ignore[arg-type]
+            attr.target_mapper,
+            None,
+        )
+        super().__init__(attr, state)
+
+    @property
+    def session(self) -> Optional[Session]:
+        sess = object_session(self.instance)
+        if sess is not None and sess.autoflush and self.instance in sess:
+            sess.flush()
+        if not orm_util.has_identity(self.instance):
+            return None
+        else:
+            return sess
+
+    @session.setter
+    def session(self, session: Session) -> None:
+        self.sess = session
+
+    def _iter(self) -> Union[result.ScalarResult[_T], result.Result[_T]]:
+        sess = self.session
+        if sess is None:
+            state = attributes.instance_state(self.instance)
+            if state.detached:
+                util.warn(
+                    "Instance %s is detached, dynamic relationship cannot "
+                    "return a correct result.   This warning will become "
+                    "a DetachedInstanceError in a future release."
+                    % (orm_util.state_str(state))
+                )
+
+            return result.IteratorResult(
+                result.SimpleResultMetaData([self.attr.class_.__name__]),
+                iter(
+                    self.attr._get_collection_history(
+                        attributes.instance_state(self.instance),
+                        PassiveFlag.PASSIVE_NO_INITIALIZE,
+                    ).added_items
+                ),
+                _source_supports_scalars=True,
+            ).scalars()
+        else:
+            return self._generate(sess)._iter()
+
+    if TYPE_CHECKING:
+
+        def __iter__(self) -> Iterator[_T]: ...
+
+    def __getitem__(self, index: Any) -> Union[_T, List[_T]]:
+        sess = self.session
+        if sess is None:
+            return self.attr._get_collection_history(
+                attributes.instance_state(self.instance),
+                PassiveFlag.PASSIVE_NO_INITIALIZE,
+            ).indexed(index)
+        else:
+            return self._generate(sess).__getitem__(index)  # type: ignore[no-any-return] # noqa: E501
+
+    def count(self) -> int:
+        sess = self.session
+        if sess is None:
+            return len(
+                self.attr._get_collection_history(
+                    attributes.instance_state(self.instance),
+                    PassiveFlag.PASSIVE_NO_INITIALIZE,
+                ).added_items
+            )
+        else:
+            return self._generate(sess).count()
+
+    def _generate(
+        self,
+        sess: Optional[Session] = None,
+    ) -> Query[_T]:
+        # note we're returning an entirely new Query class instance
+        # here without any assignment capabilities; the class of this
+        # query is determined by the session.
+        instance = self.instance
+        if sess is None:
+            sess = object_session(instance)
+            if sess is None:
+                raise orm_exc.DetachedInstanceError(
+                    "Parent instance %s is not bound to a Session, and no "
+                    "contextual session is established; lazy load operation "
+                    "of attribute '%s' cannot proceed"
+                    % (orm_util.instance_str(instance), self.attr.key)
+                )
+
+        if self.query_class:
+            query = self.query_class(self.attr.target_mapper, session=sess)
+        else:
+            query = sess.query(self.attr.target_mapper)
+
+        query._where_criteria = self._where_criteria
+        query._from_obj = self._from_obj
+        query._order_by_clauses = self._order_by_clauses
+
+        return query
+
+    def add_all(self, iterator: Iterable[_T]) -> None:
+        """Add an iterable of items to this :class:`_orm.AppenderQuery`.
+
+        The given items will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        This method is provided to assist in delivering forwards-compatibility
+        with the :class:`_orm.WriteOnlyCollection` collection class.
+
+        .. versionadded:: 2.0
+
+        """
+        self._add_all_impl(iterator)
+
+    def add(self, item: _T) -> None:
+        """Add an item to this :class:`_orm.AppenderQuery`.
+
+        The given item will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        This method is provided to assist in delivering forwards-compatibility
+        with the :class:`_orm.WriteOnlyCollection` collection class.
+
+        .. versionadded:: 2.0
+
+        """
+        self._add_all_impl([item])
+
+    def extend(self, iterator: Iterable[_T]) -> None:
+        """Add an iterable of items to this :class:`_orm.AppenderQuery`.
+
+        The given items will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        """
+        self._add_all_impl(iterator)
+
+    def append(self, item: _T) -> None:
+        """Append an item to this :class:`_orm.AppenderQuery`.
+
+        The given item will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        """
+        self._add_all_impl([item])
+
+    def remove(self, item: _T) -> None:
+        """Remove an item from this :class:`_orm.AppenderQuery`.
+
+        The given item will be removed from the parent instance's collection on
+        the next flush.
+
+        """
+        self._remove_impl(item)
+
+
+class AppenderQuery(AppenderMixin[_T], Query[_T]):  # type: ignore[misc]
+    """A dynamic query that supports basic collection storage operations.
+
+    Methods on :class:`.AppenderQuery` include all methods of
+    :class:`_orm.Query`, plus additional methods used for collection
+    persistence.
+
+
+    """
+
+
+def mixin_user_query(cls: Any) -> type[AppenderMixin[Any]]:
+    """Return a new class with AppenderQuery functionality layered over."""
+    name = "Appender" + cls.__name__
+    return type(name, (AppenderMixin, cls), {"query_class": cls})
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py
new file mode 100644
index 00000000..57aae5a3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/evaluator.py
@@ -0,0 +1,379 @@
+# orm/evaluator.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+"""Evaluation functions used **INTERNALLY** by ORM DML use cases.
+
+
+This module is **private, for internal use by SQLAlchemy**.
+
+.. versionchanged:: 2.0.4 renamed ``EvaluatorCompiler`` to
+   ``_EvaluatorCompiler``.
+
+"""
+
+
+from __future__ import annotations
+
+from typing import Type
+
+from . import exc as orm_exc
+from .base import LoaderCallableStatus
+from .base import PassiveFlag
+from .. import exc
+from .. import inspect
+from ..sql import and_
+from ..sql import operators
+from ..sql.sqltypes import Concatenable
+from ..sql.sqltypes import Integer
+from ..sql.sqltypes import Numeric
+from ..util import warn_deprecated
+
+
+class UnevaluatableError(exc.InvalidRequestError):
+    pass
+
+
+class _NoObject(operators.ColumnOperators):
+    def operate(self, *arg, **kw):
+        return None
+
+    def reverse_operate(self, *arg, **kw):
+        return None
+
+
+class _ExpiredObject(operators.ColumnOperators):
+    def operate(self, *arg, **kw):
+        return self
+
+    def reverse_operate(self, *arg, **kw):
+        return self
+
+
+_NO_OBJECT = _NoObject()
+_EXPIRED_OBJECT = _ExpiredObject()
+
+
+class _EvaluatorCompiler:
+    def __init__(self, target_cls=None):
+        self.target_cls = target_cls
+
+    def process(self, clause, *clauses):
+        if clauses:
+            clause = and_(clause, *clauses)
+
+        meth = getattr(self, f"visit_{clause.__visit_name__}", None)
+        if not meth:
+            raise UnevaluatableError(
+                f"Cannot evaluate {type(clause).__name__}"
+            )
+        return meth(clause)
+
+    def visit_grouping(self, clause):
+        return self.process(clause.element)
+
+    def visit_null(self, clause):
+        return lambda obj: None
+
+    def visit_false(self, clause):
+        return lambda obj: False
+
+    def visit_true(self, clause):
+        return lambda obj: True
+
+    def visit_column(self, clause):
+        try:
+            parentmapper = clause._annotations["parentmapper"]
+        except KeyError as ke:
+            raise UnevaluatableError(
+                f"Cannot evaluate column: {clause}"
+            ) from ke
+
+        if self.target_cls and not issubclass(
+            self.target_cls, parentmapper.class_
+        ):
+            raise UnevaluatableError(
+                "Can't evaluate criteria against "
+                f"alternate class {parentmapper.class_}"
+            )
+
+        parentmapper._check_configure()
+
+        # we'd like to use "proxy_key" annotation to get the "key", however
+        # in relationship primaryjoin cases proxy_key is sometimes deannotated
+        # and sometimes apparently not present in the first place (?).
+        # While I can stop it from being deannotated (though need to see if
+        # this breaks other things), not sure right now  about cases where it's
+        # not there in the first place.  can fix at some later point.
+        # key = clause._annotations["proxy_key"]
+
+        # for now, use the old way
+        try:
+            key = parentmapper._columntoproperty[clause].key
+        except orm_exc.UnmappedColumnError as err:
+            raise UnevaluatableError(
+                f"Cannot evaluate expression: {err}"
+            ) from err
+
+        # note this used to fall back to a simple `getattr(obj, key)` evaluator
+        # if impl was None; as of #8656, we ensure mappers are configured
+        # so that impl is available
+        impl = parentmapper.class_manager[key].impl
+
+        def get_corresponding_attr(obj):
+            if obj is None:
+                return _NO_OBJECT
+            state = inspect(obj)
+            dict_ = state.dict
+
+            value = impl.get(
+                state, dict_, passive=PassiveFlag.PASSIVE_NO_FETCH
+            )
+            if value is LoaderCallableStatus.PASSIVE_NO_RESULT:
+                return _EXPIRED_OBJECT
+            return value
+
+        return get_corresponding_attr
+
+    def visit_tuple(self, clause):
+        return self.visit_clauselist(clause)
+
+    def visit_expression_clauselist(self, clause):
+        return self.visit_clauselist(clause)
+
+    def visit_clauselist(self, clause):
+        evaluators = [self.process(clause) for clause in clause.clauses]
+
+        dispatch = (
+            f"visit_{clause.operator.__name__.rstrip('_')}_clauselist_op"
+        )
+        meth = getattr(self, dispatch, None)
+        if meth:
+            return meth(clause.operator, evaluators, clause)
+        else:
+            raise UnevaluatableError(
+                f"Cannot evaluate clauselist with operator {clause.operator}"
+            )
+
+    def visit_binary(self, clause):
+        eval_left = self.process(clause.left)
+        eval_right = self.process(clause.right)
+
+        dispatch = f"visit_{clause.operator.__name__.rstrip('_')}_binary_op"
+        meth = getattr(self, dispatch, None)
+        if meth:
+            return meth(clause.operator, eval_left, eval_right, clause)
+        else:
+            raise UnevaluatableError(
+                f"Cannot evaluate {type(clause).__name__} with "
+                f"operator {clause.operator}"
+            )
+
+    def visit_or_clauselist_op(self, operator, evaluators, clause):
+        def evaluate(obj):
+            has_null = False
+            for sub_evaluate in evaluators:
+                value = sub_evaluate(obj)
+                if value is _EXPIRED_OBJECT:
+                    return _EXPIRED_OBJECT
+                elif value:
+                    return True
+                has_null = has_null or value is None
+            if has_null:
+                return None
+            return False
+
+        return evaluate
+
+    def visit_and_clauselist_op(self, operator, evaluators, clause):
+        def evaluate(obj):
+            for sub_evaluate in evaluators:
+                value = sub_evaluate(obj)
+                if value is _EXPIRED_OBJECT:
+                    return _EXPIRED_OBJECT
+
+                if not value:
+                    if value is None or value is _NO_OBJECT:
+                        return None
+                    return False
+            return True
+
+        return evaluate
+
+    def visit_comma_op_clauselist_op(self, operator, evaluators, clause):
+        def evaluate(obj):
+            values = []
+            for sub_evaluate in evaluators:
+                value = sub_evaluate(obj)
+                if value is _EXPIRED_OBJECT:
+                    return _EXPIRED_OBJECT
+                elif value is None or value is _NO_OBJECT:
+                    return None
+                values.append(value)
+            return tuple(values)
+
+        return evaluate
+
+    def visit_custom_op_binary_op(
+        self, operator, eval_left, eval_right, clause
+    ):
+        if operator.python_impl:
+            return self._straight_evaluate(
+                operator, eval_left, eval_right, clause
+            )
+        else:
+            raise UnevaluatableError(
+                f"Custom operator {operator.opstring!r} can't be evaluated "
+                "in Python unless it specifies a callable using "
+                "`.python_impl`."
+            )
+
+    def visit_is_binary_op(self, operator, eval_left, eval_right, clause):
+        def evaluate(obj):
+            left_val = eval_left(obj)
+            right_val = eval_right(obj)
+            if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT:
+                return _EXPIRED_OBJECT
+            return left_val == right_val
+
+        return evaluate
+
+    def visit_is_not_binary_op(self, operator, eval_left, eval_right, clause):
+        def evaluate(obj):
+            left_val = eval_left(obj)
+            right_val = eval_right(obj)
+            if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT:
+                return _EXPIRED_OBJECT
+            return left_val != right_val
+
+        return evaluate
+
+    def _straight_evaluate(self, operator, eval_left, eval_right, clause):
+        def evaluate(obj):
+            left_val = eval_left(obj)
+            right_val = eval_right(obj)
+            if left_val is _EXPIRED_OBJECT or right_val is _EXPIRED_OBJECT:
+                return _EXPIRED_OBJECT
+            elif left_val is None or right_val is None:
+                return None
+
+            return operator(eval_left(obj), eval_right(obj))
+
+        return evaluate
+
+    def _straight_evaluate_numeric_only(
+        self, operator, eval_left, eval_right, clause
+    ):
+        if clause.left.type._type_affinity not in (
+            Numeric,
+            Integer,
+        ) or clause.right.type._type_affinity not in (Numeric, Integer):
+            raise UnevaluatableError(
+                f'Cannot evaluate math operator "{operator.__name__}" for '
+                f"datatypes {clause.left.type}, {clause.right.type}"
+            )
+
+        return self._straight_evaluate(operator, eval_left, eval_right, clause)
+
+    visit_add_binary_op = _straight_evaluate_numeric_only
+    visit_mul_binary_op = _straight_evaluate_numeric_only
+    visit_sub_binary_op = _straight_evaluate_numeric_only
+    visit_mod_binary_op = _straight_evaluate_numeric_only
+    visit_truediv_binary_op = _straight_evaluate_numeric_only
+    visit_lt_binary_op = _straight_evaluate
+    visit_le_binary_op = _straight_evaluate
+    visit_ne_binary_op = _straight_evaluate
+    visit_gt_binary_op = _straight_evaluate
+    visit_ge_binary_op = _straight_evaluate
+    visit_eq_binary_op = _straight_evaluate
+
+    def visit_in_op_binary_op(self, operator, eval_left, eval_right, clause):
+        return self._straight_evaluate(
+            lambda a, b: a in b if a is not _NO_OBJECT else None,
+            eval_left,
+            eval_right,
+            clause,
+        )
+
+    def visit_not_in_op_binary_op(
+        self, operator, eval_left, eval_right, clause
+    ):
+        return self._straight_evaluate(
+            lambda a, b: a not in b if a is not _NO_OBJECT else None,
+            eval_left,
+            eval_right,
+            clause,
+        )
+
+    def visit_concat_op_binary_op(
+        self, operator, eval_left, eval_right, clause
+    ):
+
+        if not issubclass(
+            clause.left.type._type_affinity, Concatenable
+        ) or not issubclass(clause.right.type._type_affinity, Concatenable):
+            raise UnevaluatableError(
+                f"Cannot evaluate concatenate operator "
+                f'"{operator.__name__}" for '
+                f"datatypes {clause.left.type}, {clause.right.type}"
+            )
+
+        return self._straight_evaluate(
+            lambda a, b: a + b, eval_left, eval_right, clause
+        )
+
+    def visit_startswith_op_binary_op(
+        self, operator, eval_left, eval_right, clause
+    ):
+        return self._straight_evaluate(
+            lambda a, b: a.startswith(b), eval_left, eval_right, clause
+        )
+
+    def visit_endswith_op_binary_op(
+        self, operator, eval_left, eval_right, clause
+    ):
+        return self._straight_evaluate(
+            lambda a, b: a.endswith(b), eval_left, eval_right, clause
+        )
+
+    def visit_unary(self, clause):
+        eval_inner = self.process(clause.element)
+        if clause.operator is operators.inv:
+
+            def evaluate(obj):
+                value = eval_inner(obj)
+                if value is _EXPIRED_OBJECT:
+                    return _EXPIRED_OBJECT
+                elif value is None:
+                    return None
+                return not value
+
+            return evaluate
+        raise UnevaluatableError(
+            f"Cannot evaluate {type(clause).__name__} "
+            f"with operator {clause.operator}"
+        )
+
+    def visit_bindparam(self, clause):
+        if clause.callable:
+            val = clause.callable()
+        else:
+            val = clause.value
+        return lambda obj: val
+
+
+def __getattr__(name: str) -> Type[_EvaluatorCompiler]:
+    if name == "EvaluatorCompiler":
+        warn_deprecated(
+            "Direct use of 'EvaluatorCompiler' is not supported, and this "
+            "name will be removed in a future release.  "
+            "'_EvaluatorCompiler' is for internal use only",
+            "2.0",
+        )
+        return _EvaluatorCompiler
+    else:
+        raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py
new file mode 100644
index 00000000..f161760e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/events.py
@@ -0,0 +1,3271 @@
+# orm/events.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""ORM event interfaces.
+
+"""
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import Collection
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import instrumentation
+from . import interfaces
+from . import mapperlib
+from .attributes import QueryableAttribute
+from .base import _mapper_or_none
+from .base import NO_KEY
+from .instrumentation import ClassManager
+from .instrumentation import InstrumentationFactory
+from .query import BulkDelete
+from .query import BulkUpdate
+from .query import Query
+from .scoping import scoped_session
+from .session import Session
+from .session import sessionmaker
+from .. import event
+from .. import exc
+from .. import util
+from ..event import EventTarget
+from ..event.registry import _ET
+from ..util.compat import inspect_getfullargspec
+
+if TYPE_CHECKING:
+    from weakref import ReferenceType
+
+    from ._typing import _InstanceDict
+    from ._typing import _InternalEntityType
+    from ._typing import _O
+    from ._typing import _T
+    from .attributes import Event
+    from .base import EventConstants
+    from .session import ORMExecuteState
+    from .session import SessionTransaction
+    from .unitofwork import UOWTransaction
+    from ..engine import Connection
+    from ..event.base import _Dispatch
+    from ..event.base import _HasEventsDispatch
+    from ..event.registry import _EventKey
+    from ..orm.collections import CollectionAdapter
+    from ..orm.context import QueryContext
+    from ..orm.decl_api import DeclarativeAttributeIntercept
+    from ..orm.decl_api import DeclarativeMeta
+    from ..orm.mapper import Mapper
+    from ..orm.state import InstanceState
+
+_KT = TypeVar("_KT", bound=Any)
+_ET2 = TypeVar("_ET2", bound=EventTarget)
+
+
+class InstrumentationEvents(event.Events[InstrumentationFactory]):
+    """Events related to class instrumentation events.
+
+    The listeners here support being established against
+    any new style class, that is any object that is a subclass
+    of 'type'.  Events will then be fired off for events
+    against that class.  If the "propagate=True" flag is passed
+    to event.listen(), the event will fire off for subclasses
+    of that class as well.
+
+    The Python ``type`` builtin is also accepted as a target,
+    which when used has the effect of events being emitted
+    for all classes.
+
+    Note the "propagate" flag here is defaulted to ``True``,
+    unlike the other class level events where it defaults
+    to ``False``.  This means that new subclasses will also
+    be the subject of these events, when a listener
+    is established on a superclass.
+
+    """
+
+    _target_class_doc = "SomeBaseClass"
+    _dispatch_target = InstrumentationFactory
+
+    @classmethod
+    def _accept_with(
+        cls,
+        target: Union[
+            InstrumentationFactory,
+            Type[InstrumentationFactory],
+        ],
+        identifier: str,
+    ) -> Optional[
+        Union[
+            InstrumentationFactory,
+            Type[InstrumentationFactory],
+        ]
+    ]:
+        if isinstance(target, type):
+            return _InstrumentationEventsHold(target)  # type: ignore [return-value] # noqa: E501
+        else:
+            return None
+
+    @classmethod
+    def _listen(
+        cls, event_key: _EventKey[_T], propagate: bool = True, **kw: Any
+    ) -> None:
+        target, identifier, fn = (
+            event_key.dispatch_target,
+            event_key.identifier,
+            event_key._listen_fn,
+        )
+
+        def listen(target_cls: type, *arg: Any) -> Optional[Any]:
+            listen_cls = target()
+
+            # if weakref were collected, however this is not something
+            # that normally happens.   it was occurring during test teardown
+            # between mapper/registry/instrumentation_manager, however this
+            # interaction was changed to not rely upon the event system.
+            if listen_cls is None:
+                return None
+
+            if propagate and issubclass(target_cls, listen_cls):
+                return fn(target_cls, *arg)
+            elif not propagate and target_cls is listen_cls:
+                return fn(target_cls, *arg)
+            else:
+                return None
+
+        def remove(ref: ReferenceType[_T]) -> None:
+            key = event.registry._EventKey(  # type: ignore [type-var]
+                None,
+                identifier,
+                listen,
+                instrumentation._instrumentation_factory,
+            )
+            getattr(
+                instrumentation._instrumentation_factory.dispatch, identifier
+            ).remove(key)
+
+        target = weakref.ref(target.class_, remove)
+
+        event_key.with_dispatch_target(
+            instrumentation._instrumentation_factory
+        ).with_wrapper(listen).base_listen(**kw)
+
+    @classmethod
+    def _clear(cls) -> None:
+        super()._clear()
+        instrumentation._instrumentation_factory.dispatch._clear()
+
+    def class_instrument(self, cls: ClassManager[_O]) -> None:
+        """Called after the given class is instrumented.
+
+        To get at the :class:`.ClassManager`, use
+        :func:`.manager_of_class`.
+
+        """
+
+    def class_uninstrument(self, cls: ClassManager[_O]) -> None:
+        """Called before the given class is uninstrumented.
+
+        To get at the :class:`.ClassManager`, use
+        :func:`.manager_of_class`.
+
+        """
+
+    def attribute_instrument(
+        self, cls: ClassManager[_O], key: _KT, inst: _O
+    ) -> None:
+        """Called when an attribute is instrumented."""
+
+
+class _InstrumentationEventsHold:
+    """temporary marker object used to transfer from _accept_with() to
+    _listen() on the InstrumentationEvents class.
+
+    """
+
+    def __init__(self, class_: type) -> None:
+        self.class_ = class_
+
+    dispatch = event.dispatcher(InstrumentationEvents)
+
+
+class InstanceEvents(event.Events[ClassManager[Any]]):
+    """Define events specific to object lifecycle.
+
+    e.g.::
+
+        from sqlalchemy import event
+
+
+        def my_load_listener(target, context):
+            print("on load!")
+
+
+        event.listen(SomeClass, "load", my_load_listener)
+
+    Available targets include:
+
+    * mapped classes
+    * unmapped superclasses of mapped or to-be-mapped classes
+      (using the ``propagate=True`` flag)
+    * :class:`_orm.Mapper` objects
+    * the :class:`_orm.Mapper` class itself indicates listening for all
+      mappers.
+
+    Instance events are closely related to mapper events, but
+    are more specific to the instance and its instrumentation,
+    rather than its system of persistence.
+
+    When using :class:`.InstanceEvents`, several modifiers are
+    available to the :func:`.event.listen` function.
+
+    :param propagate=False: When True, the event listener should
+       be applied to all inheriting classes as well as the
+       class which is the target of this listener.
+    :param raw=False: When True, the "target" argument passed
+       to applicable event listener functions will be the
+       instance's :class:`.InstanceState` management
+       object, rather than the mapped instance itself.
+    :param restore_load_context=False: Applies to the
+       :meth:`.InstanceEvents.load` and :meth:`.InstanceEvents.refresh`
+       events.  Restores the loader context of the object when the event
+       hook is complete, so that ongoing eager load operations continue
+       to target the object appropriately.  A warning is emitted if the
+       object is moved to a new loader context from within one of these
+       events if this flag is not set.
+
+       .. versionadded:: 1.3.14
+
+
+    """
+
+    _target_class_doc = "SomeClass"
+
+    _dispatch_target = ClassManager
+
+    @classmethod
+    def _new_classmanager_instance(
+        cls,
+        class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type],
+        classmanager: ClassManager[_O],
+    ) -> None:
+        _InstanceEventsHold.populate(class_, classmanager)
+
+    @classmethod
+    @util.preload_module("sqlalchemy.orm")
+    def _accept_with(
+        cls,
+        target: Union[
+            ClassManager[Any],
+            Type[ClassManager[Any]],
+        ],
+        identifier: str,
+    ) -> Optional[Union[ClassManager[Any], Type[ClassManager[Any]]]]:
+        orm = util.preloaded.orm
+
+        if isinstance(target, ClassManager):
+            return target
+        elif isinstance(target, mapperlib.Mapper):
+            return target.class_manager
+        elif target is orm.mapper:  # type: ignore [attr-defined]
+            util.warn_deprecated(
+                "The `sqlalchemy.orm.mapper()` symbol is deprecated and "
+                "will be removed in a future release. For the mapper-wide "
+                "event target, use the 'sqlalchemy.orm.Mapper' class.",
+                "2.0",
+            )
+            return ClassManager
+        elif isinstance(target, type):
+            if issubclass(target, mapperlib.Mapper):
+                return ClassManager
+            else:
+                manager = instrumentation.opt_manager_of_class(target)
+                if manager:
+                    return manager
+                else:
+                    return _InstanceEventsHold(target)  # type: ignore [return-value] # noqa: E501
+        return None
+
+    @classmethod
+    def _listen(
+        cls,
+        event_key: _EventKey[ClassManager[Any]],
+        raw: bool = False,
+        propagate: bool = False,
+        restore_load_context: bool = False,
+        **kw: Any,
+    ) -> None:
+        target, fn = (event_key.dispatch_target, event_key._listen_fn)
+
+        if not raw or restore_load_context:
+
+            def wrap(
+                state: InstanceState[_O], *arg: Any, **kw: Any
+            ) -> Optional[Any]:
+                if not raw:
+                    target: Any = state.obj()
+                else:
+                    target = state
+                if restore_load_context:
+                    runid = state.runid
+                try:
+                    return fn(target, *arg, **kw)
+                finally:
+                    if restore_load_context:
+                        state.runid = runid
+
+            event_key = event_key.with_wrapper(wrap)
+
+        event_key.base_listen(propagate=propagate, **kw)
+
+        if propagate:
+            for mgr in target.subclass_managers(True):
+                event_key.with_dispatch_target(mgr).base_listen(propagate=True)
+
+    @classmethod
+    def _clear(cls) -> None:
+        super()._clear()
+        _InstanceEventsHold._clear()
+
+    def first_init(self, manager: ClassManager[_O], cls: Type[_O]) -> None:
+        """Called when the first instance of a particular mapping is called.
+
+        This event is called when the ``__init__`` method of a class
+        is called the first time for that particular class.    The event
+        invokes before ``__init__`` actually proceeds as well as before
+        the :meth:`.InstanceEvents.init` event is invoked.
+
+        """
+
+    def init(self, target: _O, args: Any, kwargs: Any) -> None:
+        """Receive an instance when its constructor is called.
+
+        This method is only called during a userland construction of
+        an object, in conjunction with the object's constructor, e.g.
+        its ``__init__`` method.  It is not called when an object is
+        loaded from the database; see the :meth:`.InstanceEvents.load`
+        event in order to intercept a database load.
+
+        The event is called before the actual ``__init__`` constructor
+        of the object is called.  The ``kwargs`` dictionary may be
+        modified in-place in order to affect what is passed to
+        ``__init__``.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param args: positional arguments passed to the ``__init__`` method.
+         This is passed as a tuple and is currently immutable.
+        :param kwargs: keyword arguments passed to the ``__init__`` method.
+         This structure *can* be altered in place.
+
+        .. seealso::
+
+            :meth:`.InstanceEvents.init_failure`
+
+            :meth:`.InstanceEvents.load`
+
+        """
+
+    def init_failure(self, target: _O, args: Any, kwargs: Any) -> None:
+        """Receive an instance when its constructor has been called,
+        and raised an exception.
+
+        This method is only called during a userland construction of
+        an object, in conjunction with the object's constructor, e.g.
+        its ``__init__`` method. It is not called when an object is loaded
+        from the database.
+
+        The event is invoked after an exception raised by the ``__init__``
+        method is caught.  After the event
+        is invoked, the original exception is re-raised outwards, so that
+        the construction of the object still raises an exception.   The
+        actual exception and stack trace raised should be present in
+        ``sys.exc_info()``.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param args: positional arguments that were passed to the ``__init__``
+         method.
+        :param kwargs: keyword arguments that were passed to the ``__init__``
+         method.
+
+        .. seealso::
+
+            :meth:`.InstanceEvents.init`
+
+            :meth:`.InstanceEvents.load`
+
+        """
+
+    def _sa_event_merge_wo_load(
+        self, target: _O, context: QueryContext
+    ) -> None:
+        """receive an object instance after it was the subject of a merge()
+        call, when load=False was passed.
+
+        The target would be the already-loaded object in the Session which
+        would have had its attributes overwritten by the incoming object. This
+        overwrite operation does not use attribute events, instead just
+        populating dict directly. Therefore the purpose of this event is so
+        that extensions like sqlalchemy.ext.mutable know that object state has
+        changed and incoming state needs to be set up for "parents" etc.
+
+        This functionality is acceptable to be made public in a later release.
+
+        .. versionadded:: 1.4.41
+
+        """
+
+    def load(self, target: _O, context: QueryContext) -> None:
+        """Receive an object instance after it has been created via
+        ``__new__``, and after initial attribute population has
+        occurred.
+
+        This typically occurs when the instance is created based on
+        incoming result rows, and is only called once for that
+        instance's lifetime.
+
+        .. warning::
+
+            During a result-row load, this event is invoked when the
+            first row received for this instance is processed.  When using
+            eager loading with collection-oriented attributes, the additional
+            rows that are to be loaded / processed in order to load subsequent
+            collection items have not occurred yet.   This has the effect
+            both that collections will not be fully loaded, as well as that
+            if an operation occurs within this event handler that emits
+            another database load operation for the object, the "loading
+            context" for the object can change and interfere with the
+            existing eager loaders still in progress.
+
+            Examples of what can cause the "loading context" to change within
+            the event handler include, but are not necessarily limited to:
+
+            * accessing deferred attributes that weren't part of the row,
+              will trigger an "undefer" operation and refresh the object
+
+            * accessing attributes on a joined-inheritance subclass that
+              weren't part of the row, will trigger a refresh operation.
+
+            As of SQLAlchemy 1.3.14, a warning is emitted when this occurs. The
+            :paramref:`.InstanceEvents.restore_load_context` option may  be
+            used on the event to prevent this warning; this will ensure that
+            the existing loading context is maintained for the object after the
+            event is called::
+
+                @event.listens_for(SomeClass, "load", restore_load_context=True)
+                def on_load(instance, context):
+                    instance.some_unloaded_attribute
+
+            .. versionchanged:: 1.3.14 Added
+               :paramref:`.InstanceEvents.restore_load_context`
+               and :paramref:`.SessionEvents.restore_load_context` flags which
+               apply to "on load" events, which will ensure that the loading
+               context for an object is restored when the event hook is
+               complete; a warning is emitted if the load context of the object
+               changes without this flag being set.
+
+
+        The :meth:`.InstanceEvents.load` event is also available in a
+        class-method decorator format called :func:`_orm.reconstructor`.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param context: the :class:`.QueryContext` corresponding to the
+         current :class:`_query.Query` in progress.  This argument may be
+         ``None`` if the load does not correspond to a :class:`_query.Query`,
+         such as during :meth:`.Session.merge`.
+
+        .. seealso::
+
+            :ref:`mapped_class_load_events`
+
+            :meth:`.InstanceEvents.init`
+
+            :meth:`.InstanceEvents.refresh`
+
+            :meth:`.SessionEvents.loaded_as_persistent`
+
+        """  # noqa: E501
+
+    def refresh(
+        self, target: _O, context: QueryContext, attrs: Optional[Iterable[str]]
+    ) -> None:
+        """Receive an object instance after one or more attributes have
+        been refreshed from a query.
+
+        Contrast this to the :meth:`.InstanceEvents.load` method, which
+        is invoked when the object is first loaded from a query.
+
+        .. note:: This event is invoked within the loader process before
+           eager loaders may have been completed, and the object's state may
+           not be complete.  Additionally, invoking row-level refresh
+           operations on the object will place the object into a new loader
+           context, interfering with the existing load context.   See the note
+           on :meth:`.InstanceEvents.load` for background on making use of the
+           :paramref:`.InstanceEvents.restore_load_context` parameter, in
+           order to resolve this scenario.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param context: the :class:`.QueryContext` corresponding to the
+         current :class:`_query.Query` in progress.
+        :param attrs: sequence of attribute names which
+         were populated, or None if all column-mapped, non-deferred
+         attributes were populated.
+
+        .. seealso::
+
+            :ref:`mapped_class_load_events`
+
+            :meth:`.InstanceEvents.load`
+
+        """
+
+    def refresh_flush(
+        self,
+        target: _O,
+        flush_context: UOWTransaction,
+        attrs: Optional[Iterable[str]],
+    ) -> None:
+        """Receive an object instance after one or more attributes that
+        contain a column-level default or onupdate handler have been refreshed
+        during persistence of the object's state.
+
+        This event is the same as :meth:`.InstanceEvents.refresh` except
+        it is invoked within the unit of work flush process, and includes
+        only non-primary-key columns that have column level default or
+        onupdate handlers, including Python callables as well as server side
+        defaults and triggers which may be fetched via the RETURNING clause.
+
+        .. note::
+
+            While the :meth:`.InstanceEvents.refresh_flush` event is triggered
+            for an object that was INSERTed as well as for an object that was
+            UPDATEd, the event is geared primarily  towards the UPDATE process;
+            it is mostly an internal artifact that INSERT actions can also
+            trigger this event, and note that **primary key columns for an
+            INSERTed row are explicitly omitted** from this event.  In order to
+            intercept the newly INSERTed state of an object, the
+            :meth:`.SessionEvents.pending_to_persistent` and
+            :meth:`.MapperEvents.after_insert` are better choices.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param flush_context: Internal :class:`.UOWTransaction` object
+         which handles the details of the flush.
+        :param attrs: sequence of attribute names which
+         were populated.
+
+        .. seealso::
+
+            :ref:`mapped_class_load_events`
+
+            :ref:`orm_server_defaults`
+
+            :ref:`metadata_defaults_toplevel`
+
+        """
+
+    def expire(self, target: _O, attrs: Optional[Iterable[str]]) -> None:
+        """Receive an object instance after its attributes or some subset
+        have been expired.
+
+        'keys' is a list of attribute names.  If None, the entire
+        state was expired.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param attrs: sequence of attribute
+         names which were expired, or None if all attributes were
+         expired.
+
+        """
+
+    def pickle(self, target: _O, state_dict: _InstanceDict) -> None:
+        """Receive an object instance when its associated state is
+        being pickled.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param state_dict: the dictionary returned by
+         :class:`.InstanceState.__getstate__`, containing the state
+         to be pickled.
+
+        """
+
+    def unpickle(self, target: _O, state_dict: _InstanceDict) -> None:
+        """Receive an object instance after its associated state has
+        been unpickled.
+
+        :param target: the mapped instance.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :param state_dict: the dictionary sent to
+         :class:`.InstanceState.__setstate__`, containing the state
+         dictionary which was pickled.
+
+        """
+
+
+class _EventsHold(event.RefCollection[_ET]):
+    """Hold onto listeners against unmapped, uninstrumented classes.
+
+    Establish _listen() for that class' mapper/instrumentation when
+    those objects are created for that class.
+
+    """
+
+    all_holds: weakref.WeakKeyDictionary[Any, Any]
+
+    def __init__(
+        self,
+        class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type],
+    ) -> None:
+        self.class_ = class_
+
+    @classmethod
+    def _clear(cls) -> None:
+        cls.all_holds.clear()
+
+    class HoldEvents(Generic[_ET2]):
+        _dispatch_target: Optional[Type[_ET2]] = None
+
+        @classmethod
+        def _listen(
+            cls,
+            event_key: _EventKey[_ET2],
+            raw: bool = False,
+            propagate: bool = False,
+            retval: bool = False,
+            **kw: Any,
+        ) -> None:
+            target = event_key.dispatch_target
+
+            if target.class_ in target.all_holds:
+                collection = target.all_holds[target.class_]
+            else:
+                collection = target.all_holds[target.class_] = {}
+
+            event.registry._stored_in_collection(event_key, target)
+            collection[event_key._key] = (
+                event_key,
+                raw,
+                propagate,
+                retval,
+                kw,
+            )
+
+            if propagate:
+                stack = list(target.class_.__subclasses__())
+                while stack:
+                    subclass = stack.pop(0)
+                    stack.extend(subclass.__subclasses__())
+                    subject = target.resolve(subclass)
+                    if subject is not None:
+                        # we are already going through __subclasses__()
+                        # so leave generic propagate flag False
+                        event_key.with_dispatch_target(subject).listen(
+                            raw=raw, propagate=False, retval=retval, **kw
+                        )
+
+    def remove(self, event_key: _EventKey[_ET]) -> None:
+        target = event_key.dispatch_target
+
+        if isinstance(target, _EventsHold):
+            collection = target.all_holds[target.class_]
+            del collection[event_key._key]
+
+    @classmethod
+    def populate(
+        cls,
+        class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type],
+        subject: Union[ClassManager[_O], Mapper[_O]],
+    ) -> None:
+        for subclass in class_.__mro__:
+            if subclass in cls.all_holds:
+                collection = cls.all_holds[subclass]
+                for (
+                    event_key,
+                    raw,
+                    propagate,
+                    retval,
+                    kw,
+                ) in collection.values():
+                    if propagate or subclass is class_:
+                        # since we can't be sure in what order different
+                        # classes in a hierarchy are triggered with
+                        # populate(), we rely upon _EventsHold for all event
+                        # assignment, instead of using the generic propagate
+                        # flag.
+                        event_key.with_dispatch_target(subject).listen(
+                            raw=raw, propagate=False, retval=retval, **kw
+                        )
+
+
+class _InstanceEventsHold(_EventsHold[_ET]):
+    all_holds: weakref.WeakKeyDictionary[Any, Any] = (
+        weakref.WeakKeyDictionary()
+    )
+
+    def resolve(self, class_: Type[_O]) -> Optional[ClassManager[_O]]:
+        return instrumentation.opt_manager_of_class(class_)
+
+    class HoldInstanceEvents(_EventsHold.HoldEvents[_ET], InstanceEvents):  # type: ignore [misc] # noqa: E501
+        pass
+
+    dispatch = event.dispatcher(HoldInstanceEvents)
+
+
+class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
+    """Define events specific to mappings.
+
+    e.g.::
+
+        from sqlalchemy import event
+
+
+        def my_before_insert_listener(mapper, connection, target):
+            # execute a stored procedure upon INSERT,
+            # apply the value to the row to be inserted
+            target.calculated_value = connection.execute(
+                text("select my_special_function(%d)" % target.special_number)
+            ).scalar()
+
+
+        # associate the listener function with SomeClass,
+        # to execute during the "before_insert" hook
+        event.listen(SomeClass, "before_insert", my_before_insert_listener)
+
+    Available targets include:
+
+    * mapped classes
+    * unmapped superclasses of mapped or to-be-mapped classes
+      (using the ``propagate=True`` flag)
+    * :class:`_orm.Mapper` objects
+    * the :class:`_orm.Mapper` class itself indicates listening for all
+      mappers.
+
+    Mapper events provide hooks into critical sections of the
+    mapper, including those related to object instrumentation,
+    object loading, and object persistence. In particular, the
+    persistence methods :meth:`~.MapperEvents.before_insert`,
+    and :meth:`~.MapperEvents.before_update` are popular
+    places to augment the state being persisted - however, these
+    methods operate with several significant restrictions. The
+    user is encouraged to evaluate the
+    :meth:`.SessionEvents.before_flush` and
+    :meth:`.SessionEvents.after_flush` methods as more
+    flexible and user-friendly hooks in which to apply
+    additional database state during a flush.
+
+    When using :class:`.MapperEvents`, several modifiers are
+    available to the :func:`.event.listen` function.
+
+    :param propagate=False: When True, the event listener should
+       be applied to all inheriting mappers and/or the mappers of
+       inheriting classes, as well as any
+       mapper which is the target of this listener.
+    :param raw=False: When True, the "target" argument passed
+       to applicable event listener functions will be the
+       instance's :class:`.InstanceState` management
+       object, rather than the mapped instance itself.
+    :param retval=False: when True, the user-defined event function
+       must have a return value, the purpose of which is either to
+       control subsequent event propagation, or to otherwise alter
+       the operation in progress by the mapper.   Possible return
+       values are:
+
+       * ``sqlalchemy.orm.interfaces.EXT_CONTINUE`` - continue event
+         processing normally.
+       * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent
+         event handlers in the chain.
+       * other values - the return value specified by specific listeners.
+
+    """
+
+    _target_class_doc = "SomeClass"
+    _dispatch_target = mapperlib.Mapper
+
+    @classmethod
+    def _new_mapper_instance(
+        cls,
+        class_: Union[DeclarativeAttributeIntercept, DeclarativeMeta, type],
+        mapper: Mapper[_O],
+    ) -> None:
+        _MapperEventsHold.populate(class_, mapper)
+
+    @classmethod
+    @util.preload_module("sqlalchemy.orm")
+    def _accept_with(
+        cls,
+        target: Union[mapperlib.Mapper[Any], Type[mapperlib.Mapper[Any]]],
+        identifier: str,
+    ) -> Optional[Union[mapperlib.Mapper[Any], Type[mapperlib.Mapper[Any]]]]:
+        orm = util.preloaded.orm
+
+        if target is orm.mapper:  # type: ignore [attr-defined]
+            util.warn_deprecated(
+                "The `sqlalchemy.orm.mapper()` symbol is deprecated and "
+                "will be removed in a future release. For the mapper-wide "
+                "event target, use the 'sqlalchemy.orm.Mapper' class.",
+                "2.0",
+            )
+            return mapperlib.Mapper
+        elif isinstance(target, type):
+            if issubclass(target, mapperlib.Mapper):
+                return target
+            else:
+                mapper = _mapper_or_none(target)
+                if mapper is not None:
+                    return mapper
+                else:
+                    return _MapperEventsHold(target)
+        else:
+            return target
+
+    @classmethod
+    def _listen(
+        cls,
+        event_key: _EventKey[_ET],
+        raw: bool = False,
+        retval: bool = False,
+        propagate: bool = False,
+        **kw: Any,
+    ) -> None:
+        target, identifier, fn = (
+            event_key.dispatch_target,
+            event_key.identifier,
+            event_key._listen_fn,
+        )
+
+        if (
+            identifier in ("before_configured", "after_configured")
+            and target is not mapperlib.Mapper
+        ):
+            util.warn(
+                "'before_configured' and 'after_configured' ORM events "
+                "only invoke with the Mapper class "
+                "as the target."
+            )
+
+        if not raw or not retval:
+            if not raw:
+                meth = getattr(cls, identifier)
+                try:
+                    target_index = (
+                        inspect_getfullargspec(meth)[0].index("target") - 1
+                    )
+                except ValueError:
+                    target_index = None
+
+            def wrap(*arg: Any, **kw: Any) -> Any:
+                if not raw and target_index is not None:
+                    arg = list(arg)  # type: ignore [assignment]
+                    arg[target_index] = arg[target_index].obj()  # type: ignore [index] # noqa: E501
+                if not retval:
+                    fn(*arg, **kw)
+                    return interfaces.EXT_CONTINUE
+                else:
+                    return fn(*arg, **kw)
+
+            event_key = event_key.with_wrapper(wrap)
+
+        if propagate:
+            for mapper in target.self_and_descendants:
+                event_key.with_dispatch_target(mapper).base_listen(
+                    propagate=True, **kw
+                )
+        else:
+            event_key.base_listen(**kw)
+
+    @classmethod
+    def _clear(cls) -> None:
+        super()._clear()
+        _MapperEventsHold._clear()
+
+    def instrument_class(self, mapper: Mapper[_O], class_: Type[_O]) -> None:
+        r"""Receive a class when the mapper is first constructed,
+        before instrumentation is applied to the mapped class.
+
+        This event is the earliest phase of mapper construction.
+        Most attributes of the mapper are not yet initialized.   To
+        receive an event within initial mapper construction where basic
+        state is available such as the :attr:`_orm.Mapper.attrs` collection,
+        the :meth:`_orm.MapperEvents.after_mapper_constructed` event may
+        be a better choice.
+
+        This listener can either be applied to the :class:`_orm.Mapper`
+        class overall, or to any un-mapped class which serves as a base
+        for classes that will be mapped (using the ``propagate=True`` flag)::
+
+            Base = declarative_base()
+
+
+            @event.listens_for(Base, "instrument_class", propagate=True)
+            def on_new_class(mapper, cls_):
+                "..."
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param class\_: the mapped class.
+
+        .. seealso::
+
+            :meth:`_orm.MapperEvents.after_mapper_constructed`
+
+        """
+
+    def after_mapper_constructed(
+        self, mapper: Mapper[_O], class_: Type[_O]
+    ) -> None:
+        """Receive a class and mapper when the :class:`_orm.Mapper` has been
+        fully constructed.
+
+        This event is called after the initial constructor for
+        :class:`_orm.Mapper` completes.  This occurs after the
+        :meth:`_orm.MapperEvents.instrument_class` event and after the
+        :class:`_orm.Mapper` has done an initial pass of its arguments
+        to generate its collection of :class:`_orm.MapperProperty` objects,
+        which are accessible via the :meth:`_orm.Mapper.get_property`
+        method and the :attr:`_orm.Mapper.iterate_properties` attribute.
+
+        This event differs from the
+        :meth:`_orm.MapperEvents.before_mapper_configured` event in that it
+        is invoked within the constructor for :class:`_orm.Mapper`, rather
+        than within the :meth:`_orm.registry.configure` process.   Currently,
+        this event is the only one which is appropriate for handlers that
+        wish to create additional mapped classes in response to the
+        construction of this :class:`_orm.Mapper`, which will be part of the
+        same configure step when :meth:`_orm.registry.configure` next runs.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`examples_versioning` - an example which illustrates the use
+            of the :meth:`_orm.MapperEvents.before_mapper_configured`
+            event to create new mappers to record change-audit histories on
+            objects.
+
+        """
+
+    def before_mapper_configured(
+        self, mapper: Mapper[_O], class_: Type[_O]
+    ) -> None:
+        """Called right before a specific mapper is to be configured.
+
+        This event is intended to allow a specific mapper to be skipped during
+        the configure step, by returning the :attr:`.orm.interfaces.EXT_SKIP`
+        symbol which indicates to the :func:`.configure_mappers` call that this
+        particular mapper (or hierarchy of mappers, if ``propagate=True`` is
+        used) should be skipped in the current configuration run. When one or
+        more mappers are skipped, the "new mappers" flag will remain set,
+        meaning the :func:`.configure_mappers` function will continue to be
+        called when mappers are used, to continue to try to configure all
+        available mappers.
+
+        In comparison to the other configure-level events,
+        :meth:`.MapperEvents.before_configured`,
+        :meth:`.MapperEvents.after_configured`, and
+        :meth:`.MapperEvents.mapper_configured`, the
+        :meth:`.MapperEvents.before_mapper_configured` event provides for a
+        meaningful return value when it is registered with the ``retval=True``
+        parameter.
+
+        .. versionadded:: 1.3
+
+        e.g.::
+
+            from sqlalchemy.orm import EXT_SKIP
+
+            Base = declarative_base()
+
+            DontConfigureBase = declarative_base()
+
+
+            @event.listens_for(
+                DontConfigureBase,
+                "before_mapper_configured",
+                retval=True,
+                propagate=True,
+            )
+            def dont_configure(mapper, cls):
+                return EXT_SKIP
+
+        .. seealso::
+
+            :meth:`.MapperEvents.before_configured`
+
+            :meth:`.MapperEvents.after_configured`
+
+            :meth:`.MapperEvents.mapper_configured`
+
+        """
+
+    def mapper_configured(self, mapper: Mapper[_O], class_: Type[_O]) -> None:
+        r"""Called when a specific mapper has completed its own configuration
+        within the scope of the :func:`.configure_mappers` call.
+
+        The :meth:`.MapperEvents.mapper_configured` event is invoked
+        for each mapper that is encountered when the
+        :func:`_orm.configure_mappers` function proceeds through the current
+        list of not-yet-configured mappers.
+        :func:`_orm.configure_mappers` is typically invoked
+        automatically as mappings are first used, as well as each time
+        new mappers have been made available and new mapper use is
+        detected.
+
+        When the event is called, the mapper should be in its final
+        state, but **not including backrefs** that may be invoked from
+        other mappers; they might still be pending within the
+        configuration operation.    Bidirectional relationships that
+        are instead configured via the
+        :paramref:`.orm.relationship.back_populates` argument
+        *will* be fully available, since this style of relationship does not
+        rely upon other possibly-not-configured mappers to know that they
+        exist.
+
+        For an event that is guaranteed to have **all** mappers ready
+        to go including backrefs that are defined only on other
+        mappings, use the :meth:`.MapperEvents.after_configured`
+        event; this event invokes only after all known mappings have been
+        fully configured.
+
+        The :meth:`.MapperEvents.mapper_configured` event, unlike
+        :meth:`.MapperEvents.before_configured` or
+        :meth:`.MapperEvents.after_configured`,
+        is called for each mapper/class individually, and the mapper is
+        passed to the event itself.  It also is called exactly once for
+        a particular mapper.  The event is therefore useful for
+        configurational steps that benefit from being invoked just once
+        on a specific mapper basis, which don't require that "backref"
+        configurations are necessarily ready yet.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param class\_: the mapped class.
+
+        .. seealso::
+
+            :meth:`.MapperEvents.before_configured`
+
+            :meth:`.MapperEvents.after_configured`
+
+            :meth:`.MapperEvents.before_mapper_configured`
+
+        """
+        # TODO: need coverage for this event
+
+    def before_configured(self) -> None:
+        """Called before a series of mappers have been configured.
+
+        The :meth:`.MapperEvents.before_configured` event is invoked
+        each time the :func:`_orm.configure_mappers` function is
+        invoked, before the function has done any of its work.
+        :func:`_orm.configure_mappers` is typically invoked
+        automatically as mappings are first used, as well as each time
+        new mappers have been made available and new mapper use is
+        detected.
+
+        This event can **only** be applied to the :class:`_orm.Mapper` class,
+        and not to individual mappings or mapped classes. It is only invoked
+        for all mappings as a whole::
+
+            from sqlalchemy.orm import Mapper
+
+
+            @event.listens_for(Mapper, "before_configured")
+            def go(): ...
+
+        Contrast this event to :meth:`.MapperEvents.after_configured`,
+        which is invoked after the series of mappers has been configured,
+        as well as :meth:`.MapperEvents.before_mapper_configured`
+        and :meth:`.MapperEvents.mapper_configured`, which are both invoked
+        on a per-mapper basis.
+
+        Theoretically this event is called once per
+        application, but is actually called any time new mappers
+        are to be affected by a :func:`_orm.configure_mappers`
+        call.   If new mappings are constructed after existing ones have
+        already been used, this event will likely be called again.  To ensure
+        that a particular event is only called once and no further, the
+        ``once=True`` argument (new in 0.9.4) can be applied::
+
+            from sqlalchemy.orm import mapper
+
+
+            @event.listens_for(mapper, "before_configured", once=True)
+            def go(): ...
+
+        .. seealso::
+
+            :meth:`.MapperEvents.before_mapper_configured`
+
+            :meth:`.MapperEvents.mapper_configured`
+
+            :meth:`.MapperEvents.after_configured`
+
+        """
+
+    def after_configured(self) -> None:
+        """Called after a series of mappers have been configured.
+
+        The :meth:`.MapperEvents.after_configured` event is invoked
+        each time the :func:`_orm.configure_mappers` function is
+        invoked, after the function has completed its work.
+        :func:`_orm.configure_mappers` is typically invoked
+        automatically as mappings are first used, as well as each time
+        new mappers have been made available and new mapper use is
+        detected.
+
+        Contrast this event to the :meth:`.MapperEvents.mapper_configured`
+        event, which is called on a per-mapper basis while the configuration
+        operation proceeds; unlike that event, when this event is invoked,
+        all cross-configurations (e.g. backrefs) will also have been made
+        available for any mappers that were pending.
+        Also contrast to :meth:`.MapperEvents.before_configured`,
+        which is invoked before the series of mappers has been configured.
+
+        This event can **only** be applied to the :class:`_orm.Mapper` class,
+        and not to individual mappings or
+        mapped classes.  It is only invoked for all mappings as a whole::
+
+            from sqlalchemy.orm import Mapper
+
+
+            @event.listens_for(Mapper, "after_configured")
+            def go(): ...
+
+        Theoretically this event is called once per
+        application, but is actually called any time new mappers
+        have been affected by a :func:`_orm.configure_mappers`
+        call.   If new mappings are constructed after existing ones have
+        already been used, this event will likely be called again.  To ensure
+        that a particular event is only called once and no further, the
+        ``once=True`` argument (new in 0.9.4) can be applied::
+
+            from sqlalchemy.orm import mapper
+
+
+            @event.listens_for(mapper, "after_configured", once=True)
+            def go(): ...
+
+        .. seealso::
+
+            :meth:`.MapperEvents.before_mapper_configured`
+
+            :meth:`.MapperEvents.mapper_configured`
+
+            :meth:`.MapperEvents.before_configured`
+
+        """
+
+    def before_insert(
+        self, mapper: Mapper[_O], connection: Connection, target: _O
+    ) -> None:
+        """Receive an object instance before an INSERT statement
+        is emitted corresponding to that instance.
+
+        .. note:: this event **only** applies to the
+           :ref:`session flush operation <session_flushing>`
+           and does **not** apply to the ORM DML operations described at
+           :ref:`orm_expression_update_delete`.  To intercept ORM
+           DML events, use :meth:`_orm.SessionEvents.do_orm_execute`.
+
+        This event is used to modify local, non-object related
+        attributes on the instance before an INSERT occurs, as well
+        as to emit additional SQL statements on the given
+        connection.
+
+        The event is often called for a batch of objects of the
+        same class before their INSERT statements are emitted at
+        once in a later step. In the extremely rare case that
+        this is not desirable, the :class:`_orm.Mapper` object can be
+        configured with ``batch=False``, which will cause
+        batches of instances to be broken up into individual
+        (and more poorly performing) event->persist->event
+        steps.
+
+        .. warning::
+
+            Mapper-level flush events only allow **very limited operations**,
+            on attributes local to the row being operated upon only,
+            as well as allowing any SQL to be emitted on the given
+            :class:`_engine.Connection`.  **Please read fully** the notes
+            at :ref:`session_persistence_mapper` for guidelines on using
+            these methods; generally, the :meth:`.SessionEvents.before_flush`
+            method should be preferred for general on-flush changes.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param connection: the :class:`_engine.Connection` being used to
+         emit INSERT statements for this instance.  This
+         provides a handle into the current transaction on the
+         target database specific to this instance.
+        :param target: the mapped instance being persisted.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :return: No return value is supported by this event.
+
+        .. seealso::
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def after_insert(
+        self, mapper: Mapper[_O], connection: Connection, target: _O
+    ) -> None:
+        """Receive an object instance after an INSERT statement
+        is emitted corresponding to that instance.
+
+        .. note:: this event **only** applies to the
+           :ref:`session flush operation <session_flushing>`
+           and does **not** apply to the ORM DML operations described at
+           :ref:`orm_expression_update_delete`.  To intercept ORM
+           DML events, use :meth:`_orm.SessionEvents.do_orm_execute`.
+
+        This event is used to modify in-Python-only
+        state on the instance after an INSERT occurs, as well
+        as to emit additional SQL statements on the given
+        connection.
+
+        The event is often called for a batch of objects of the
+        same class after their INSERT statements have been
+        emitted at once in a previous step. In the extremely
+        rare case that this is not desirable, the
+        :class:`_orm.Mapper` object can be configured with ``batch=False``,
+        which will cause batches of instances to be broken up
+        into individual (and more poorly performing)
+        event->persist->event steps.
+
+        .. warning::
+
+            Mapper-level flush events only allow **very limited operations**,
+            on attributes local to the row being operated upon only,
+            as well as allowing any SQL to be emitted on the given
+            :class:`_engine.Connection`.  **Please read fully** the notes
+            at :ref:`session_persistence_mapper` for guidelines on using
+            these methods; generally, the :meth:`.SessionEvents.before_flush`
+            method should be preferred for general on-flush changes.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param connection: the :class:`_engine.Connection` being used to
+         emit INSERT statements for this instance.  This
+         provides a handle into the current transaction on the
+         target database specific to this instance.
+        :param target: the mapped instance being persisted.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :return: No return value is supported by this event.
+
+        .. seealso::
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def before_update(
+        self, mapper: Mapper[_O], connection: Connection, target: _O
+    ) -> None:
+        """Receive an object instance before an UPDATE statement
+        is emitted corresponding to that instance.
+
+        .. note:: this event **only** applies to the
+           :ref:`session flush operation <session_flushing>`
+           and does **not** apply to the ORM DML operations described at
+           :ref:`orm_expression_update_delete`.  To intercept ORM
+           DML events, use :meth:`_orm.SessionEvents.do_orm_execute`.
+
+        This event is used to modify local, non-object related
+        attributes on the instance before an UPDATE occurs, as well
+        as to emit additional SQL statements on the given
+        connection.
+
+        This method is called for all instances that are
+        marked as "dirty", *even those which have no net changes
+        to their column-based attributes*. An object is marked
+        as dirty when any of its column-based attributes have a
+        "set attribute" operation called or when any of its
+        collections are modified. If, at update time, no
+        column-based attributes have any net changes, no UPDATE
+        statement will be issued. This means that an instance
+        being sent to :meth:`~.MapperEvents.before_update` is
+        *not* a guarantee that an UPDATE statement will be
+        issued, although you can affect the outcome here by
+        modifying attributes so that a net change in value does
+        exist.
+
+        To detect if the column-based attributes on the object have net
+        changes, and will therefore generate an UPDATE statement, use
+        ``object_session(instance).is_modified(instance,
+        include_collections=False)``.
+
+        The event is often called for a batch of objects of the
+        same class before their UPDATE statements are emitted at
+        once in a later step. In the extremely rare case that
+        this is not desirable, the :class:`_orm.Mapper` can be
+        configured with ``batch=False``, which will cause
+        batches of instances to be broken up into individual
+        (and more poorly performing) event->persist->event
+        steps.
+
+        .. warning::
+
+            Mapper-level flush events only allow **very limited operations**,
+            on attributes local to the row being operated upon only,
+            as well as allowing any SQL to be emitted on the given
+            :class:`_engine.Connection`.  **Please read fully** the notes
+            at :ref:`session_persistence_mapper` for guidelines on using
+            these methods; generally, the :meth:`.SessionEvents.before_flush`
+            method should be preferred for general on-flush changes.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param connection: the :class:`_engine.Connection` being used to
+         emit UPDATE statements for this instance.  This
+         provides a handle into the current transaction on the
+         target database specific to this instance.
+        :param target: the mapped instance being persisted.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :return: No return value is supported by this event.
+
+        .. seealso::
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def after_update(
+        self, mapper: Mapper[_O], connection: Connection, target: _O
+    ) -> None:
+        """Receive an object instance after an UPDATE statement
+        is emitted corresponding to that instance.
+
+        .. note:: this event **only** applies to the
+           :ref:`session flush operation <session_flushing>`
+           and does **not** apply to the ORM DML operations described at
+           :ref:`orm_expression_update_delete`.  To intercept ORM
+           DML events, use :meth:`_orm.SessionEvents.do_orm_execute`.
+
+        This event is used to modify in-Python-only
+        state on the instance after an UPDATE occurs, as well
+        as to emit additional SQL statements on the given
+        connection.
+
+        This method is called for all instances that are
+        marked as "dirty", *even those which have no net changes
+        to their column-based attributes*, and for which
+        no UPDATE statement has proceeded. An object is marked
+        as dirty when any of its column-based attributes have a
+        "set attribute" operation called or when any of its
+        collections are modified. If, at update time, no
+        column-based attributes have any net changes, no UPDATE
+        statement will be issued. This means that an instance
+        being sent to :meth:`~.MapperEvents.after_update` is
+        *not* a guarantee that an UPDATE statement has been
+        issued.
+
+        To detect if the column-based attributes on the object have net
+        changes, and therefore resulted in an UPDATE statement, use
+        ``object_session(instance).is_modified(instance,
+        include_collections=False)``.
+
+        The event is often called for a batch of objects of the
+        same class after their UPDATE statements have been emitted at
+        once in a previous step. In the extremely rare case that
+        this is not desirable, the :class:`_orm.Mapper` can be
+        configured with ``batch=False``, which will cause
+        batches of instances to be broken up into individual
+        (and more poorly performing) event->persist->event
+        steps.
+
+        .. warning::
+
+            Mapper-level flush events only allow **very limited operations**,
+            on attributes local to the row being operated upon only,
+            as well as allowing any SQL to be emitted on the given
+            :class:`_engine.Connection`.  **Please read fully** the notes
+            at :ref:`session_persistence_mapper` for guidelines on using
+            these methods; generally, the :meth:`.SessionEvents.before_flush`
+            method should be preferred for general on-flush changes.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param connection: the :class:`_engine.Connection` being used to
+         emit UPDATE statements for this instance.  This
+         provides a handle into the current transaction on the
+         target database specific to this instance.
+        :param target: the mapped instance being persisted.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :return: No return value is supported by this event.
+
+        .. seealso::
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def before_delete(
+        self, mapper: Mapper[_O], connection: Connection, target: _O
+    ) -> None:
+        """Receive an object instance before a DELETE statement
+        is emitted corresponding to that instance.
+
+        .. note:: this event **only** applies to the
+           :ref:`session flush operation <session_flushing>`
+           and does **not** apply to the ORM DML operations described at
+           :ref:`orm_expression_update_delete`.  To intercept ORM
+           DML events, use :meth:`_orm.SessionEvents.do_orm_execute`.
+
+        This event is used to emit additional SQL statements on
+        the given connection as well as to perform application
+        specific bookkeeping related to a deletion event.
+
+        The event is often called for a batch of objects of the
+        same class before their DELETE statements are emitted at
+        once in a later step.
+
+        .. warning::
+
+            Mapper-level flush events only allow **very limited operations**,
+            on attributes local to the row being operated upon only,
+            as well as allowing any SQL to be emitted on the given
+            :class:`_engine.Connection`.  **Please read fully** the notes
+            at :ref:`session_persistence_mapper` for guidelines on using
+            these methods; generally, the :meth:`.SessionEvents.before_flush`
+            method should be preferred for general on-flush changes.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param connection: the :class:`_engine.Connection` being used to
+         emit DELETE statements for this instance.  This
+         provides a handle into the current transaction on the
+         target database specific to this instance.
+        :param target: the mapped instance being deleted.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :return: No return value is supported by this event.
+
+        .. seealso::
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def after_delete(
+        self, mapper: Mapper[_O], connection: Connection, target: _O
+    ) -> None:
+        """Receive an object instance after a DELETE statement
+        has been emitted corresponding to that instance.
+
+        .. note:: this event **only** applies to the
+           :ref:`session flush operation <session_flushing>`
+           and does **not** apply to the ORM DML operations described at
+           :ref:`orm_expression_update_delete`.  To intercept ORM
+           DML events, use :meth:`_orm.SessionEvents.do_orm_execute`.
+
+        This event is used to emit additional SQL statements on
+        the given connection as well as to perform application
+        specific bookkeeping related to a deletion event.
+
+        The event is often called for a batch of objects of the
+        same class after their DELETE statements have been emitted at
+        once in a previous step.
+
+        .. warning::
+
+            Mapper-level flush events only allow **very limited operations**,
+            on attributes local to the row being operated upon only,
+            as well as allowing any SQL to be emitted on the given
+            :class:`_engine.Connection`.  **Please read fully** the notes
+            at :ref:`session_persistence_mapper` for guidelines on using
+            these methods; generally, the :meth:`.SessionEvents.before_flush`
+            method should be preferred for general on-flush changes.
+
+        :param mapper: the :class:`_orm.Mapper` which is the target
+         of this event.
+        :param connection: the :class:`_engine.Connection` being used to
+         emit DELETE statements for this instance.  This
+         provides a handle into the current transaction on the
+         target database specific to this instance.
+        :param target: the mapped instance being deleted.  If
+         the event is configured with ``raw=True``, this will
+         instead be the :class:`.InstanceState` state-management
+         object associated with the instance.
+        :return: No return value is supported by this event.
+
+        .. seealso::
+
+            :ref:`session_persistence_events`
+
+        """
+
+
+class _MapperEventsHold(_EventsHold[_ET]):
+    all_holds = weakref.WeakKeyDictionary()
+
+    def resolve(
+        self, class_: Union[Type[_T], _InternalEntityType[_T]]
+    ) -> Optional[Mapper[_T]]:
+        return _mapper_or_none(class_)
+
+    class HoldMapperEvents(_EventsHold.HoldEvents[_ET], MapperEvents):  # type: ignore [misc] # noqa: E501
+        pass
+
+    dispatch = event.dispatcher(HoldMapperEvents)
+
+
+_sessionevents_lifecycle_event_names: Set[str] = set()
+
+
+class SessionEvents(event.Events[Session]):
+    """Define events specific to :class:`.Session` lifecycle.
+
+    e.g.::
+
+        from sqlalchemy import event
+        from sqlalchemy.orm import sessionmaker
+
+
+        def my_before_commit(session):
+            print("before commit!")
+
+
+        Session = sessionmaker()
+
+        event.listen(Session, "before_commit", my_before_commit)
+
+    The :func:`~.event.listen` function will accept
+    :class:`.Session` objects as well as the return result
+    of :class:`~.sessionmaker()` and :class:`~.scoped_session()`.
+
+    Additionally, it accepts the :class:`.Session` class which
+    will apply listeners to all :class:`.Session` instances
+    globally.
+
+    :param raw=False: When True, the "target" argument passed
+       to applicable event listener functions that work on individual
+       objects will be the instance's :class:`.InstanceState` management
+       object, rather than the mapped instance itself.
+
+       .. versionadded:: 1.3.14
+
+    :param restore_load_context=False: Applies to the
+       :meth:`.SessionEvents.loaded_as_persistent` event.  Restores the loader
+       context of the object when the event hook is complete, so that ongoing
+       eager load operations continue to target the object appropriately.  A
+       warning is emitted if the object is moved to a new loader context from
+       within this event if this flag is not set.
+
+       .. versionadded:: 1.3.14
+
+    """
+
+    _target_class_doc = "SomeSessionClassOrObject"
+
+    _dispatch_target = Session
+
+    def _lifecycle_event(  # type: ignore [misc]
+        fn: Callable[[SessionEvents, Session, Any], None]
+    ) -> Callable[[SessionEvents, Session, Any], None]:
+        _sessionevents_lifecycle_event_names.add(fn.__name__)
+        return fn
+
+    @classmethod
+    def _accept_with(  # type: ignore [return]
+        cls, target: Any, identifier: str
+    ) -> Union[Session, type]:
+        if isinstance(target, scoped_session):
+            target = target.session_factory
+            if not isinstance(target, sessionmaker) and (
+                not isinstance(target, type) or not issubclass(target, Session)
+            ):
+                raise exc.ArgumentError(
+                    "Session event listen on a scoped_session "
+                    "requires that its creation callable "
+                    "is associated with the Session class."
+                )
+
+        if isinstance(target, sessionmaker):
+            return target.class_
+        elif isinstance(target, type):
+            if issubclass(target, scoped_session):
+                return Session
+            elif issubclass(target, Session):
+                return target
+        elif isinstance(target, Session):
+            return target
+        elif hasattr(target, "_no_async_engine_events"):
+            target._no_async_engine_events()
+        else:
+            # allows alternate SessionEvents-like-classes to be consulted
+            return event.Events._accept_with(target, identifier)  # type: ignore [return-value] # noqa: E501
+
+    @classmethod
+    def _listen(
+        cls,
+        event_key: Any,
+        *,
+        raw: bool = False,
+        restore_load_context: bool = False,
+        **kw: Any,
+    ) -> None:
+        is_instance_event = (
+            event_key.identifier in _sessionevents_lifecycle_event_names
+        )
+
+        if is_instance_event:
+            if not raw or restore_load_context:
+                fn = event_key._listen_fn
+
+                def wrap(
+                    session: Session,
+                    state: InstanceState[_O],
+                    *arg: Any,
+                    **kw: Any,
+                ) -> Optional[Any]:
+                    if not raw:
+                        target = state.obj()
+                        if target is None:
+                            # existing behavior is that if the object is
+                            # garbage collected, no event is emitted
+                            return None
+                    else:
+                        target = state  # type: ignore [assignment]
+                    if restore_load_context:
+                        runid = state.runid
+                    try:
+                        return fn(session, target, *arg, **kw)
+                    finally:
+                        if restore_load_context:
+                            state.runid = runid
+
+                event_key = event_key.with_wrapper(wrap)
+
+        event_key.base_listen(**kw)
+
+    def do_orm_execute(self, orm_execute_state: ORMExecuteState) -> None:
+        """Intercept statement executions that occur on behalf of an
+        ORM :class:`.Session` object.
+
+        This event is invoked for all top-level SQL statements invoked from the
+        :meth:`_orm.Session.execute` method, as well as related methods such as
+        :meth:`_orm.Session.scalars` and :meth:`_orm.Session.scalar`. As of
+        SQLAlchemy 1.4, all ORM queries that run through the
+        :meth:`_orm.Session.execute` method as well as related methods
+        :meth:`_orm.Session.scalars`, :meth:`_orm.Session.scalar` etc.
+        will participate in this event.
+        This event hook does **not** apply to the queries that are
+        emitted internally within the ORM flush process, i.e. the
+        process described at :ref:`session_flushing`.
+
+        .. note::  The :meth:`_orm.SessionEvents.do_orm_execute` event hook
+           is triggered **for ORM statement executions only**, meaning those
+           invoked via the :meth:`_orm.Session.execute` and similar methods on
+           the :class:`_orm.Session` object. It does **not** trigger for
+           statements that are invoked by SQLAlchemy Core only, i.e. statements
+           invoked directly using :meth:`_engine.Connection.execute` or
+           otherwise originating from an :class:`_engine.Engine` object without
+           any :class:`_orm.Session` involved. To intercept **all** SQL
+           executions regardless of whether the Core or ORM APIs are in use,
+           see the event hooks at :class:`.ConnectionEvents`, such as
+           :meth:`.ConnectionEvents.before_execute` and
+           :meth:`.ConnectionEvents.before_cursor_execute`.
+
+           Also, this event hook does **not** apply to queries that are
+           emitted internally within the ORM flush process,
+           i.e. the process described at :ref:`session_flushing`; to
+           intercept steps within the flush process, see the event
+           hooks described at :ref:`session_persistence_events` as
+           well as :ref:`session_persistence_mapper`.
+
+        This event is a ``do_`` event, meaning it has the capability to replace
+        the operation that the :meth:`_orm.Session.execute` method normally
+        performs.  The intended use for this includes sharding and
+        result-caching schemes which may seek to invoke the same statement
+        across  multiple database connections, returning a result that is
+        merged from each of them, or which don't invoke the statement at all,
+        instead returning data from a cache.
+
+        The hook intends to replace the use of the
+        ``Query._execute_and_instances`` method that could be subclassed prior
+        to SQLAlchemy 1.4.
+
+        :param orm_execute_state: an instance of :class:`.ORMExecuteState`
+         which contains all information about the current execution, as well
+         as helper functions used to derive other commonly required
+         information.   See that object for details.
+
+        .. seealso::
+
+            :ref:`session_execute_events` - top level documentation on how
+            to use :meth:`_orm.SessionEvents.do_orm_execute`
+
+            :class:`.ORMExecuteState` - the object passed to the
+            :meth:`_orm.SessionEvents.do_orm_execute` event which contains
+            all information about the statement to be invoked.  It also
+            provides an interface to extend the current statement, options,
+            and parameters as well as an option that allows programmatic
+            invocation of the statement at any point.
+
+            :ref:`examples_session_orm_events` - includes examples of using
+            :meth:`_orm.SessionEvents.do_orm_execute`
+
+            :ref:`examples_caching` - an example of how to integrate
+            Dogpile caching with the ORM :class:`_orm.Session` making use
+            of the :meth:`_orm.SessionEvents.do_orm_execute` event hook.
+
+            :ref:`examples_sharding` - the Horizontal Sharding example /
+            extension relies upon the
+            :meth:`_orm.SessionEvents.do_orm_execute` event hook to invoke a
+            SQL statement on multiple backends and return a merged result.
+
+
+        .. versionadded:: 1.4
+
+        """
+
+    def after_transaction_create(
+        self, session: Session, transaction: SessionTransaction
+    ) -> None:
+        """Execute when a new :class:`.SessionTransaction` is created.
+
+        This event differs from :meth:`~.SessionEvents.after_begin`
+        in that it occurs for each :class:`.SessionTransaction`
+        overall, as opposed to when transactions are begun
+        on individual database connections.  It is also invoked
+        for nested transactions and subtransactions, and is always
+        matched by a corresponding
+        :meth:`~.SessionEvents.after_transaction_end` event
+        (assuming normal operation of the :class:`.Session`).
+
+        :param session: the target :class:`.Session`.
+        :param transaction: the target :class:`.SessionTransaction`.
+
+         To detect if this is the outermost
+         :class:`.SessionTransaction`, as opposed to a "subtransaction" or a
+         SAVEPOINT, test that the :attr:`.SessionTransaction.parent` attribute
+         is ``None``::
+
+                @event.listens_for(session, "after_transaction_create")
+                def after_transaction_create(session, transaction):
+                    if transaction.parent is None:
+                        ...  # work with top-level transaction
+
+         To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the
+         :attr:`.SessionTransaction.nested` attribute::
+
+                @event.listens_for(session, "after_transaction_create")
+                def after_transaction_create(session, transaction):
+                    if transaction.nested:
+                        ...  # work with SAVEPOINT transaction
+
+        .. seealso::
+
+            :class:`.SessionTransaction`
+
+            :meth:`~.SessionEvents.after_transaction_end`
+
+        """
+
+    def after_transaction_end(
+        self, session: Session, transaction: SessionTransaction
+    ) -> None:
+        """Execute when the span of a :class:`.SessionTransaction` ends.
+
+        This event differs from :meth:`~.SessionEvents.after_commit`
+        in that it corresponds to all :class:`.SessionTransaction`
+        objects in use, including those for nested transactions
+        and subtransactions, and is always matched by a corresponding
+        :meth:`~.SessionEvents.after_transaction_create` event.
+
+        :param session: the target :class:`.Session`.
+        :param transaction: the target :class:`.SessionTransaction`.
+
+         To detect if this is the outermost
+         :class:`.SessionTransaction`, as opposed to a "subtransaction" or a
+         SAVEPOINT, test that the :attr:`.SessionTransaction.parent` attribute
+         is ``None``::
+
+                @event.listens_for(session, "after_transaction_create")
+                def after_transaction_end(session, transaction):
+                    if transaction.parent is None:
+                        ...  # work with top-level transaction
+
+         To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the
+         :attr:`.SessionTransaction.nested` attribute::
+
+                @event.listens_for(session, "after_transaction_create")
+                def after_transaction_end(session, transaction):
+                    if transaction.nested:
+                        ...  # work with SAVEPOINT transaction
+
+        .. seealso::
+
+            :class:`.SessionTransaction`
+
+            :meth:`~.SessionEvents.after_transaction_create`
+
+        """
+
+    def before_commit(self, session: Session) -> None:
+        """Execute before commit is called.
+
+        .. note::
+
+            The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
+            that is, the :class:`.Session` can emit SQL to the database
+            many times within the scope of a transaction.
+            For interception of these events, use the
+            :meth:`~.SessionEvents.before_flush`,
+            :meth:`~.SessionEvents.after_flush`, or
+            :meth:`~.SessionEvents.after_flush_postexec`
+            events.
+
+        :param session: The target :class:`.Session`.
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.after_commit`
+
+            :meth:`~.SessionEvents.after_begin`
+
+            :meth:`~.SessionEvents.after_transaction_create`
+
+            :meth:`~.SessionEvents.after_transaction_end`
+
+        """
+
+    def after_commit(self, session: Session) -> None:
+        """Execute after a commit has occurred.
+
+        .. note::
+
+            The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush,
+            that is, the :class:`.Session` can emit SQL to the database
+            many times within the scope of a transaction.
+            For interception of these events, use the
+            :meth:`~.SessionEvents.before_flush`,
+            :meth:`~.SessionEvents.after_flush`, or
+            :meth:`~.SessionEvents.after_flush_postexec`
+            events.
+
+        .. note::
+
+            The :class:`.Session` is not in an active transaction
+            when the :meth:`~.SessionEvents.after_commit` event is invoked,
+            and therefore can not emit SQL.  To emit SQL corresponding to
+            every transaction, use the :meth:`~.SessionEvents.before_commit`
+            event.
+
+        :param session: The target :class:`.Session`.
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.before_commit`
+
+            :meth:`~.SessionEvents.after_begin`
+
+            :meth:`~.SessionEvents.after_transaction_create`
+
+            :meth:`~.SessionEvents.after_transaction_end`
+
+        """
+
+    def after_rollback(self, session: Session) -> None:
+        """Execute after a real DBAPI rollback has occurred.
+
+        Note that this event only fires when the *actual* rollback against
+        the database occurs - it does *not* fire each time the
+        :meth:`.Session.rollback` method is called, if the underlying
+        DBAPI transaction has already been rolled back.  In many
+        cases, the :class:`.Session` will not be in
+        an "active" state during this event, as the current
+        transaction is not valid.   To acquire a :class:`.Session`
+        which is active after the outermost rollback has proceeded,
+        use the :meth:`.SessionEvents.after_soft_rollback` event, checking the
+        :attr:`.Session.is_active` flag.
+
+        :param session: The target :class:`.Session`.
+
+        """
+
+    def after_soft_rollback(
+        self, session: Session, previous_transaction: SessionTransaction
+    ) -> None:
+        """Execute after any rollback has occurred, including "soft"
+        rollbacks that don't actually emit at the DBAPI level.
+
+        This corresponds to both nested and outer rollbacks, i.e.
+        the innermost rollback that calls the DBAPI's
+        rollback() method, as well as the enclosing rollback
+        calls that only pop themselves from the transaction stack.
+
+        The given :class:`.Session` can be used to invoke SQL and
+        :meth:`.Session.query` operations after an outermost rollback
+        by first checking the :attr:`.Session.is_active` flag::
+
+            @event.listens_for(Session, "after_soft_rollback")
+            def do_something(session, previous_transaction):
+                if session.is_active:
+                    session.execute(text("select * from some_table"))
+
+        :param session: The target :class:`.Session`.
+        :param previous_transaction: The :class:`.SessionTransaction`
+         transactional marker object which was just closed.   The current
+         :class:`.SessionTransaction` for the given :class:`.Session` is
+         available via the :attr:`.Session.transaction` attribute.
+
+        """
+
+    def before_flush(
+        self,
+        session: Session,
+        flush_context: UOWTransaction,
+        instances: Optional[Sequence[_O]],
+    ) -> None:
+        """Execute before flush process has started.
+
+        :param session: The target :class:`.Session`.
+        :param flush_context: Internal :class:`.UOWTransaction` object
+         which handles the details of the flush.
+        :param instances: Usually ``None``, this is the collection of
+         objects which can be passed to the :meth:`.Session.flush` method
+         (note this usage is deprecated).
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.after_flush`
+
+            :meth:`~.SessionEvents.after_flush_postexec`
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def after_flush(
+        self, session: Session, flush_context: UOWTransaction
+    ) -> None:
+        """Execute after flush has completed, but before commit has been
+        called.
+
+        Note that the session's state is still in pre-flush, i.e. 'new',
+        'dirty', and 'deleted' lists still show pre-flush state as well
+        as the history settings on instance attributes.
+
+        .. warning:: This event runs after the :class:`.Session` has emitted
+           SQL to modify the database, but **before** it has altered its
+           internal state to reflect those changes, including that newly
+           inserted objects are placed into the identity map.  ORM operations
+           emitted within this event such as loads of related items
+           may produce new identity map entries that will immediately
+           be replaced, sometimes causing confusing results.  SQLAlchemy will
+           emit a warning for this condition as of version 1.3.9.
+
+        :param session: The target :class:`.Session`.
+        :param flush_context: Internal :class:`.UOWTransaction` object
+         which handles the details of the flush.
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.before_flush`
+
+            :meth:`~.SessionEvents.after_flush_postexec`
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def after_flush_postexec(
+        self, session: Session, flush_context: UOWTransaction
+    ) -> None:
+        """Execute after flush has completed, and after the post-exec
+        state occurs.
+
+        This will be when the 'new', 'dirty', and 'deleted' lists are in
+        their final state.  An actual commit() may or may not have
+        occurred, depending on whether or not the flush started its own
+        transaction or participated in a larger transaction.
+
+        :param session: The target :class:`.Session`.
+        :param flush_context: Internal :class:`.UOWTransaction` object
+         which handles the details of the flush.
+
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.before_flush`
+
+            :meth:`~.SessionEvents.after_flush`
+
+            :ref:`session_persistence_events`
+
+        """
+
+    def after_begin(
+        self,
+        session: Session,
+        transaction: SessionTransaction,
+        connection: Connection,
+    ) -> None:
+        """Execute after a transaction is begun on a connection.
+
+        .. note:: This event is called within the process of the
+          :class:`_orm.Session` modifying its own internal state.
+          To invoke SQL operations within this hook, use the
+          :class:`_engine.Connection` provided to the event;
+          do not run SQL operations using the :class:`_orm.Session`
+          directly.
+
+        :param session: The target :class:`.Session`.
+        :param transaction: The :class:`.SessionTransaction`.
+        :param connection: The :class:`_engine.Connection` object
+         which will be used for SQL statements.
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.before_commit`
+
+            :meth:`~.SessionEvents.after_commit`
+
+            :meth:`~.SessionEvents.after_transaction_create`
+
+            :meth:`~.SessionEvents.after_transaction_end`
+
+        """
+
+    @_lifecycle_event
+    def before_attach(self, session: Session, instance: _O) -> None:
+        """Execute before an instance is attached to a session.
+
+        This is called before an add, delete or merge causes
+        the object to be part of the session.
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.after_attach`
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def after_attach(self, session: Session, instance: _O) -> None:
+        """Execute after an instance is attached to a session.
+
+        This is called after an add, delete or merge.
+
+        .. note::
+
+           As of 0.8, this event fires off *after* the item
+           has been fully associated with the session, which is
+           different than previous releases.  For event
+           handlers that require the object not yet
+           be part of session state (such as handlers which
+           may autoflush while the target object is not
+           yet complete) consider the
+           new :meth:`.before_attach` event.
+
+        .. seealso::
+
+            :meth:`~.SessionEvents.before_attach`
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @event._legacy_signature(
+        "0.9",
+        ["session", "query", "query_context", "result"],
+        lambda update_context: (
+            update_context.session,
+            update_context.query,
+            None,
+            update_context.result,
+        ),
+    )
+    def after_bulk_update(self, update_context: _O) -> None:
+        """Event for after the legacy :meth:`_orm.Query.update` method
+        has been called.
+
+        .. legacy:: The :meth:`_orm.SessionEvents.after_bulk_update` method
+           is a legacy event hook as of SQLAlchemy 2.0.   The event
+           **does not participate** in :term:`2.0 style` invocations
+           using :func:`_dml.update` documented at
+           :ref:`orm_queryguide_update_delete_where`.  For 2.0 style use,
+           the :meth:`_orm.SessionEvents.do_orm_execute` hook will intercept
+           these calls.
+
+        :param update_context: an "update context" object which contains
+         details about the update, including these attributes:
+
+            * ``session`` - the :class:`.Session` involved
+            * ``query`` -the :class:`_query.Query`
+              object that this update operation
+              was called upon.
+            * ``values`` The "values" dictionary that was passed to
+              :meth:`_query.Query.update`.
+            * ``result`` the :class:`_engine.CursorResult`
+              returned as a result of the
+              bulk UPDATE operation.
+
+        .. versionchanged:: 1.4 the update_context no longer has a
+           ``QueryContext`` object associated with it.
+
+        .. seealso::
+
+            :meth:`.QueryEvents.before_compile_update`
+
+            :meth:`.SessionEvents.after_bulk_delete`
+
+        """
+
+    @event._legacy_signature(
+        "0.9",
+        ["session", "query", "query_context", "result"],
+        lambda delete_context: (
+            delete_context.session,
+            delete_context.query,
+            None,
+            delete_context.result,
+        ),
+    )
+    def after_bulk_delete(self, delete_context: _O) -> None:
+        """Event for after the legacy :meth:`_orm.Query.delete` method
+        has been called.
+
+        .. legacy:: The :meth:`_orm.SessionEvents.after_bulk_delete` method
+           is a legacy event hook as of SQLAlchemy 2.0.   The event
+           **does not participate** in :term:`2.0 style` invocations
+           using :func:`_dml.delete` documented at
+           :ref:`orm_queryguide_update_delete_where`.  For 2.0 style use,
+           the :meth:`_orm.SessionEvents.do_orm_execute` hook will intercept
+           these calls.
+
+        :param delete_context: a "delete context" object which contains
+         details about the update, including these attributes:
+
+            * ``session`` - the :class:`.Session` involved
+            * ``query`` -the :class:`_query.Query`
+              object that this update operation
+              was called upon.
+            * ``result`` the :class:`_engine.CursorResult`
+              returned as a result of the
+              bulk DELETE operation.
+
+        .. versionchanged:: 1.4 the update_context no longer has a
+           ``QueryContext`` object associated with it.
+
+        .. seealso::
+
+            :meth:`.QueryEvents.before_compile_delete`
+
+            :meth:`.SessionEvents.after_bulk_update`
+
+        """
+
+    @_lifecycle_event
+    def transient_to_pending(self, session: Session, instance: _O) -> None:
+        """Intercept the "transient to pending" transition for a specific
+        object.
+
+        This event is a specialization of the
+        :meth:`.SessionEvents.after_attach` event which is only invoked
+        for this specific transition.  It is invoked typically during the
+        :meth:`.Session.add` call.
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def pending_to_transient(self, session: Session, instance: _O) -> None:
+        """Intercept the "pending to transient" transition for a specific
+        object.
+
+        This less common transition occurs when an pending object that has
+        not been flushed is evicted from the session; this can occur
+        when the :meth:`.Session.rollback` method rolls back the transaction,
+        or when the :meth:`.Session.expunge` method is used.
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def persistent_to_transient(self, session: Session, instance: _O) -> None:
+        """Intercept the "persistent to transient" transition for a specific
+        object.
+
+        This less common transition occurs when an pending object that has
+        has been flushed is evicted from the session; this can occur
+        when the :meth:`.Session.rollback` method rolls back the transaction.
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def pending_to_persistent(self, session: Session, instance: _O) -> None:
+        """Intercept the "pending to persistent"" transition for a specific
+        object.
+
+        This event is invoked within the flush process, and is
+        similar to scanning the :attr:`.Session.new` collection within
+        the :meth:`.SessionEvents.after_flush` event.  However, in this
+        case the object has already been moved to the persistent state
+        when the event is called.
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def detached_to_persistent(self, session: Session, instance: _O) -> None:
+        """Intercept the "detached to persistent" transition for a specific
+        object.
+
+        This event is a specialization of the
+        :meth:`.SessionEvents.after_attach` event which is only invoked
+        for this specific transition.  It is invoked typically during the
+        :meth:`.Session.add` call, as well as during the
+        :meth:`.Session.delete` call if the object was not previously
+        associated with the
+        :class:`.Session` (note that an object marked as "deleted" remains
+        in the "persistent" state until the flush proceeds).
+
+        .. note::
+
+            If the object becomes persistent as part of a call to
+            :meth:`.Session.delete`, the object is **not** yet marked as
+            deleted when this event is called.  To detect deleted objects,
+            check the ``deleted`` flag sent to the
+            :meth:`.SessionEvents.persistent_to_detached` to event after the
+            flush proceeds, or check the :attr:`.Session.deleted` collection
+            within the :meth:`.SessionEvents.before_flush` event if deleted
+            objects need to be intercepted before the flush.
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def loaded_as_persistent(self, session: Session, instance: _O) -> None:
+        """Intercept the "loaded as persistent" transition for a specific
+        object.
+
+        This event is invoked within the ORM loading process, and is invoked
+        very similarly to the :meth:`.InstanceEvents.load` event.  However,
+        the event here is linkable to a :class:`.Session` class or instance,
+        rather than to a mapper or class hierarchy, and integrates
+        with the other session lifecycle events smoothly.  The object
+        is guaranteed to be present in the session's identity map when
+        this event is called.
+
+        .. note:: This event is invoked within the loader process before
+           eager loaders may have been completed, and the object's state may
+           not be complete.  Additionally, invoking row-level refresh
+           operations on the object will place the object into a new loader
+           context, interfering with the existing load context.   See the note
+           on :meth:`.InstanceEvents.load` for background on making use of the
+           :paramref:`.SessionEvents.restore_load_context` parameter, which
+           works in the same manner as that of
+           :paramref:`.InstanceEvents.restore_load_context`, in  order to
+           resolve this scenario.
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def persistent_to_deleted(self, session: Session, instance: _O) -> None:
+        """Intercept the "persistent to deleted" transition for a specific
+        object.
+
+        This event is invoked when a persistent object's identity
+        is deleted from the database within a flush, however the object
+        still remains associated with the :class:`.Session` until the
+        transaction completes.
+
+        If the transaction is rolled back, the object moves again
+        to the persistent state, and the
+        :meth:`.SessionEvents.deleted_to_persistent` event is called.
+        If the transaction is committed, the object becomes detached,
+        which will emit the :meth:`.SessionEvents.deleted_to_detached`
+        event.
+
+        Note that while the :meth:`.Session.delete` method is the primary
+        public interface to mark an object as deleted, many objects
+        get deleted due to cascade rules, which are not always determined
+        until flush time.  Therefore, there's no way to catch
+        every object that will be deleted until the flush has proceeded.
+        the :meth:`.SessionEvents.persistent_to_deleted` event is therefore
+        invoked at the end of a flush.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def deleted_to_persistent(self, session: Session, instance: _O) -> None:
+        """Intercept the "deleted to persistent" transition for a specific
+        object.
+
+        This transition occurs only when an object that's been deleted
+        successfully in a flush is restored due to a call to
+        :meth:`.Session.rollback`.   The event is not called under
+        any other circumstances.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def deleted_to_detached(self, session: Session, instance: _O) -> None:
+        """Intercept the "deleted to detached" transition for a specific
+        object.
+
+        This event is invoked when a deleted object is evicted
+        from the session.   The typical case when this occurs is when
+        the transaction for a :class:`.Session` in which the object
+        was deleted is committed; the object moves from the deleted
+        state to the detached state.
+
+        It is also invoked for objects that were deleted in a flush
+        when the :meth:`.Session.expunge_all` or :meth:`.Session.close`
+        events are called, as well as if the object is individually
+        expunged from its deleted state via :meth:`.Session.expunge`.
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+    @_lifecycle_event
+    def persistent_to_detached(self, session: Session, instance: _O) -> None:
+        """Intercept the "persistent to detached" transition for a specific
+        object.
+
+        This event is invoked when a persistent object is evicted
+        from the session.  There are many conditions that cause this
+        to happen, including:
+
+        * using a method such as :meth:`.Session.expunge`
+          or :meth:`.Session.close`
+
+        * Calling the :meth:`.Session.rollback` method, when the object
+          was part of an INSERT statement for that session's transaction
+
+
+        :param session: target :class:`.Session`
+
+        :param instance: the ORM-mapped instance being operated upon.
+
+        :param deleted: boolean.  If True, indicates this object moved
+         to the detached state because it was marked as deleted and flushed.
+
+
+        .. seealso::
+
+            :ref:`session_lifecycle_events`
+
+        """
+
+
+class AttributeEvents(event.Events[QueryableAttribute[Any]]):
+    r"""Define events for object attributes.
+
+    These are typically defined on the class-bound descriptor for the
+    target class.
+
+    For example, to register a listener that will receive the
+    :meth:`_orm.AttributeEvents.append` event::
+
+        from sqlalchemy import event
+
+
+        @event.listens_for(MyClass.collection, "append", propagate=True)
+        def my_append_listener(target, value, initiator):
+            print("received append event for target: %s" % target)
+
+    Listeners have the option to return a possibly modified version of the
+    value, when the :paramref:`.AttributeEvents.retval` flag is passed to
+    :func:`.event.listen` or :func:`.event.listens_for`, such as below,
+    illustrated using the :meth:`_orm.AttributeEvents.set` event::
+
+        def validate_phone(target, value, oldvalue, initiator):
+            "Strip non-numeric characters from a phone number"
+
+            return re.sub(r"\D", "", value)
+
+
+        # setup listener on UserContact.phone attribute, instructing
+        # it to use the return value
+        listen(UserContact.phone, "set", validate_phone, retval=True)
+
+    A validation function like the above can also raise an exception
+    such as :exc:`ValueError` to halt the operation.
+
+    The :paramref:`.AttributeEvents.propagate` flag is also important when
+    applying listeners to mapped classes that also have mapped subclasses,
+    as when using mapper inheritance patterns::
+
+
+        @event.listens_for(MySuperClass.attr, "set", propagate=True)
+        def receive_set(target, value, initiator):
+            print("value set: %s" % target)
+
+    The full list of modifiers available to the :func:`.event.listen`
+    and :func:`.event.listens_for` functions are below.
+
+    :param active_history=False: When True, indicates that the
+      "set" event would like to receive the "old" value being
+      replaced unconditionally, even if this requires firing off
+      database loads. Note that ``active_history`` can also be
+      set directly via :func:`.column_property` and
+      :func:`_orm.relationship`.
+
+    :param propagate=False: When True, the listener function will
+      be established not just for the class attribute given, but
+      for attributes of the same name on all current subclasses
+      of that class, as well as all future subclasses of that
+      class, using an additional listener that listens for
+      instrumentation events.
+    :param raw=False: When True, the "target" argument to the
+      event will be the :class:`.InstanceState` management
+      object, rather than the mapped instance itself.
+    :param retval=False: when True, the user-defined event
+      listening must return the "value" argument from the
+      function.  This gives the listening function the opportunity
+      to change the value that is ultimately used for a "set"
+      or "append" event.
+
+    """
+
+    _target_class_doc = "SomeClass.some_attribute"
+    _dispatch_target = QueryableAttribute
+
+    @staticmethod
+    def _set_dispatch(
+        cls: Type[_HasEventsDispatch[Any]], dispatch_cls: Type[_Dispatch[Any]]
+    ) -> _Dispatch[Any]:
+        dispatch = event.Events._set_dispatch(cls, dispatch_cls)
+        dispatch_cls._active_history = False
+        return dispatch
+
+    @classmethod
+    def _accept_with(
+        cls,
+        target: Union[QueryableAttribute[Any], Type[QueryableAttribute[Any]]],
+        identifier: str,
+    ) -> Union[QueryableAttribute[Any], Type[QueryableAttribute[Any]]]:
+        # TODO: coverage
+        if isinstance(target, interfaces.MapperProperty):
+            return getattr(target.parent.class_, target.key)
+        else:
+            return target
+
+    @classmethod
+    def _listen(  # type: ignore [override]
+        cls,
+        event_key: _EventKey[QueryableAttribute[Any]],
+        active_history: bool = False,
+        raw: bool = False,
+        retval: bool = False,
+        propagate: bool = False,
+        include_key: bool = False,
+    ) -> None:
+        target, fn = event_key.dispatch_target, event_key._listen_fn
+
+        if active_history:
+            target.dispatch._active_history = True
+
+        if not raw or not retval or not include_key:
+
+            def wrap(target: InstanceState[_O], *arg: Any, **kw: Any) -> Any:
+                if not raw:
+                    target = target.obj()  # type: ignore [assignment]
+                if not retval:
+                    if arg:
+                        value = arg[0]
+                    else:
+                        value = None
+                    if include_key:
+                        fn(target, *arg, **kw)
+                    else:
+                        fn(target, *arg)
+                    return value
+                else:
+                    if include_key:
+                        return fn(target, *arg, **kw)
+                    else:
+                        return fn(target, *arg)
+
+            event_key = event_key.with_wrapper(wrap)
+
+        event_key.base_listen(propagate=propagate)
+
+        if propagate:
+            manager = instrumentation.manager_of_class(target.class_)
+
+            for mgr in manager.subclass_managers(True):  # type: ignore [no-untyped-call] # noqa: E501
+                event_key.with_dispatch_target(mgr[target.key]).base_listen(
+                    propagate=True
+                )
+                if active_history:
+                    mgr[target.key].dispatch._active_history = True
+
+    def append(
+        self,
+        target: _O,
+        value: _T,
+        initiator: Event,
+        *,
+        key: EventConstants = NO_KEY,
+    ) -> Optional[_T]:
+        """Receive a collection append event.
+
+        The append event is invoked for each element as it is appended
+        to the collection.  This occurs for single-item appends as well
+        as for a "bulk replace" operation.
+
+        :param target: the object instance receiving the event.
+          If the listener is registered with ``raw=True``, this will
+          be the :class:`.InstanceState` object.
+        :param value: the value being appended.  If this listener
+          is registered with ``retval=True``, the listener
+          function must return this value, or a new value which
+          replaces it.
+        :param initiator: An instance of :class:`.attributes.Event`
+          representing the initiation of the event.  May be modified
+          from its original value by backref handlers in order to control
+          chained event propagation, as well as be inspected for information
+          about the source of the event.
+        :param key: When the event is established using the
+         :paramref:`.AttributeEvents.include_key` parameter set to
+         True, this will be the key used in the operation, such as
+         ``collection[some_key_or_index] = value``.
+         The parameter is not passed
+         to the event at all if the the
+         :paramref:`.AttributeEvents.include_key`
+         was not used to set up the event; this is to allow backwards
+         compatibility with existing event handlers that don't include the
+         ``key`` parameter.
+
+         .. versionadded:: 2.0
+
+        :return: if the event was registered with ``retval=True``,
+         the given value, or a new effective value, should be returned.
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+            :meth:`.AttributeEvents.bulk_replace`
+
+        """
+
+    def append_wo_mutation(
+        self,
+        target: _O,
+        value: _T,
+        initiator: Event,
+        *,
+        key: EventConstants = NO_KEY,
+    ) -> None:
+        """Receive a collection append event where the collection was not
+        actually mutated.
+
+        This event differs from :meth:`_orm.AttributeEvents.append` in that
+        it is fired off for de-duplicating collections such as sets and
+        dictionaries, when the object already exists in the target collection.
+        The event does not have a return value and the identity of the
+        given object cannot be changed.
+
+        The event is used for cascading objects into a :class:`_orm.Session`
+        when the collection has already been mutated via a backref event.
+
+        :param target: the object instance receiving the event.
+          If the listener is registered with ``raw=True``, this will
+          be the :class:`.InstanceState` object.
+        :param value: the value that would be appended if the object did not
+          already exist in the collection.
+        :param initiator: An instance of :class:`.attributes.Event`
+          representing the initiation of the event.  May be modified
+          from its original value by backref handlers in order to control
+          chained event propagation, as well as be inspected for information
+          about the source of the event.
+        :param key: When the event is established using the
+         :paramref:`.AttributeEvents.include_key` parameter set to
+         True, this will be the key used in the operation, such as
+         ``collection[some_key_or_index] = value``.
+         The parameter is not passed
+         to the event at all if the the
+         :paramref:`.AttributeEvents.include_key`
+         was not used to set up the event; this is to allow backwards
+         compatibility with existing event handlers that don't include the
+         ``key`` parameter.
+
+         .. versionadded:: 2.0
+
+        :return: No return value is defined for this event.
+
+        .. versionadded:: 1.4.15
+
+        """
+
+    def bulk_replace(
+        self,
+        target: _O,
+        values: Iterable[_T],
+        initiator: Event,
+        *,
+        keys: Optional[Iterable[EventConstants]] = None,
+    ) -> None:
+        """Receive a collection 'bulk replace' event.
+
+        This event is invoked for a sequence of values as they are incoming
+        to a bulk collection set operation, which can be
+        modified in place before the values are treated as ORM objects.
+        This is an "early hook" that runs before the bulk replace routine
+        attempts to reconcile which objects are already present in the
+        collection and which are being removed by the net replace operation.
+
+        It is typical that this method be combined with use of the
+        :meth:`.AttributeEvents.append` event.    When using both of these
+        events, note that a bulk replace operation will invoke
+        the :meth:`.AttributeEvents.append` event for all new items,
+        even after :meth:`.AttributeEvents.bulk_replace` has been invoked
+        for the collection as a whole.  In order to determine if an
+        :meth:`.AttributeEvents.append` event is part of a bulk replace,
+        use the symbol :attr:`~.attributes.OP_BULK_REPLACE` to test the
+        incoming initiator::
+
+            from sqlalchemy.orm.attributes import OP_BULK_REPLACE
+
+
+            @event.listens_for(SomeObject.collection, "bulk_replace")
+            def process_collection(target, values, initiator):
+                values[:] = [_make_value(value) for value in values]
+
+
+            @event.listens_for(SomeObject.collection, "append", retval=True)
+            def process_collection(target, value, initiator):
+                # make sure bulk_replace didn't already do it
+                if initiator is None or initiator.op is not OP_BULK_REPLACE:
+                    return _make_value(value)
+                else:
+                    return value
+
+        .. versionadded:: 1.2
+
+        :param target: the object instance receiving the event.
+          If the listener is registered with ``raw=True``, this will
+          be the :class:`.InstanceState` object.
+        :param value: a sequence (e.g. a list) of the values being set.  The
+          handler can modify this list in place.
+        :param initiator: An instance of :class:`.attributes.Event`
+          representing the initiation of the event.
+        :param keys: When the event is established using the
+         :paramref:`.AttributeEvents.include_key` parameter set to
+         True, this will be the sequence of keys used in the operation,
+         typically only for a dictionary update.  The parameter is not passed
+         to the event at all if the the
+         :paramref:`.AttributeEvents.include_key`
+         was not used to set up the event; this is to allow backwards
+         compatibility with existing event handlers that don't include the
+         ``key`` parameter.
+
+         .. versionadded:: 2.0
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+
+        """
+
+    def remove(
+        self,
+        target: _O,
+        value: _T,
+        initiator: Event,
+        *,
+        key: EventConstants = NO_KEY,
+    ) -> None:
+        """Receive a collection remove event.
+
+        :param target: the object instance receiving the event.
+          If the listener is registered with ``raw=True``, this will
+          be the :class:`.InstanceState` object.
+        :param value: the value being removed.
+        :param initiator: An instance of :class:`.attributes.Event`
+          representing the initiation of the event.  May be modified
+          from its original value by backref handlers in order to control
+          chained event propagation.
+
+        :param key: When the event is established using the
+         :paramref:`.AttributeEvents.include_key` parameter set to
+         True, this will be the key used in the operation, such as
+         ``del collection[some_key_or_index]``.  The parameter is not passed
+         to the event at all if the the
+         :paramref:`.AttributeEvents.include_key`
+         was not used to set up the event; this is to allow backwards
+         compatibility with existing event handlers that don't include the
+         ``key`` parameter.
+
+         .. versionadded:: 2.0
+
+        :return: No return value is defined for this event.
+
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+        """
+
+    def set(
+        self, target: _O, value: _T, oldvalue: _T, initiator: Event
+    ) -> None:
+        """Receive a scalar set event.
+
+        :param target: the object instance receiving the event.
+          If the listener is registered with ``raw=True``, this will
+          be the :class:`.InstanceState` object.
+        :param value: the value being set.  If this listener
+          is registered with ``retval=True``, the listener
+          function must return this value, or a new value which
+          replaces it.
+        :param oldvalue: the previous value being replaced.  This
+          may also be the symbol ``NEVER_SET`` or ``NO_VALUE``.
+          If the listener is registered with ``active_history=True``,
+          the previous value of the attribute will be loaded from
+          the database if the existing value is currently unloaded
+          or expired.
+        :param initiator: An instance of :class:`.attributes.Event`
+          representing the initiation of the event.  May be modified
+          from its original value by backref handlers in order to control
+          chained event propagation.
+
+        :return: if the event was registered with ``retval=True``,
+         the given value, or a new effective value, should be returned.
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+        """
+
+    def init_scalar(
+        self, target: _O, value: _T, dict_: Dict[Any, Any]
+    ) -> None:
+        r"""Receive a scalar "init" event.
+
+        This event is invoked when an uninitialized, unpersisted scalar
+        attribute is accessed, e.g. read::
+
+
+            x = my_object.some_attribute
+
+        The ORM's default behavior when this occurs for an un-initialized
+        attribute is to return the value ``None``; note this differs from
+        Python's usual behavior of raising ``AttributeError``.    The
+        event here can be used to customize what value is actually returned,
+        with the assumption that the event listener would be mirroring
+        a default generator that is configured on the Core
+        :class:`_schema.Column`
+        object as well.
+
+        Since a default generator on a :class:`_schema.Column`
+        might also produce
+        a changing value such as a timestamp, the
+        :meth:`.AttributeEvents.init_scalar`
+        event handler can also be used to **set** the newly returned value, so
+        that a Core-level default generation function effectively fires off
+        only once, but at the moment the attribute is accessed on the
+        non-persisted object.   Normally, no change to the object's state
+        is made when an uninitialized attribute is accessed (much older
+        SQLAlchemy versions did in fact change the object's state).
+
+        If a default generator on a column returned a particular constant,
+        a handler might be used as follows::
+
+            SOME_CONSTANT = 3.1415926
+
+
+            class MyClass(Base):
+                # ...
+
+                some_attribute = Column(Numeric, default=SOME_CONSTANT)
+
+
+            @event.listens_for(
+                MyClass.some_attribute, "init_scalar", retval=True, propagate=True
+            )
+            def _init_some_attribute(target, dict_, value):
+                dict_["some_attribute"] = SOME_CONSTANT
+                return SOME_CONSTANT
+
+        Above, we initialize the attribute ``MyClass.some_attribute`` to the
+        value of ``SOME_CONSTANT``.   The above code includes the following
+        features:
+
+        * By setting the value ``SOME_CONSTANT`` in the given ``dict_``,
+          we indicate that this value is to be persisted to the database.
+          This supersedes the use of ``SOME_CONSTANT`` in the default generator
+          for the :class:`_schema.Column`.  The ``active_column_defaults.py``
+          example given at :ref:`examples_instrumentation` illustrates using
+          the same approach for a changing default, e.g. a timestamp
+          generator.    In this particular example, it is not strictly
+          necessary to do this since ``SOME_CONSTANT`` would be part of the
+          INSERT statement in either case.
+
+        * By establishing the ``retval=True`` flag, the value we return
+          from the function will be returned by the attribute getter.
+          Without this flag, the event is assumed to be a passive observer
+          and the return value of our function is ignored.
+
+        * The ``propagate=True`` flag is significant if the mapped class
+          includes inheriting subclasses, which would also make use of this
+          event listener.  Without this flag, an inheriting subclass will
+          not use our event handler.
+
+        In the above example, the attribute set event
+        :meth:`.AttributeEvents.set` as well as the related validation feature
+        provided by :obj:`_orm.validates` is **not** invoked when we apply our
+        value to the given ``dict_``.  To have these events to invoke in
+        response to our newly generated value, apply the value to the given
+        object as a normal attribute set operation::
+
+            SOME_CONSTANT = 3.1415926
+
+
+            @event.listens_for(
+                MyClass.some_attribute, "init_scalar", retval=True, propagate=True
+            )
+            def _init_some_attribute(target, dict_, value):
+                # will also fire off attribute set events
+                target.some_attribute = SOME_CONSTANT
+                return SOME_CONSTANT
+
+        When multiple listeners are set up, the generation of the value
+        is "chained" from one listener to the next by passing the value
+        returned by the previous listener that specifies ``retval=True``
+        as the ``value`` argument of the next listener.
+
+        :param target: the object instance receiving the event.
+         If the listener is registered with ``raw=True``, this will
+         be the :class:`.InstanceState` object.
+        :param value: the value that is to be returned before this event
+         listener were invoked.  This value begins as the value ``None``,
+         however will be the return value of the previous event handler
+         function if multiple listeners are present.
+        :param dict\_: the attribute dictionary of this mapped object.
+         This is normally the ``__dict__`` of the object, but in all cases
+         represents the destination that the attribute system uses to get
+         at the actual value of this attribute.  Placing the value in this
+         dictionary has the effect that the value will be used in the
+         INSERT statement generated by the unit of work.
+
+
+        .. seealso::
+
+            :meth:`.AttributeEvents.init_collection` - collection version
+            of this event
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+            :ref:`examples_instrumentation` - see the
+            ``active_column_defaults.py`` example.
+
+        """  # noqa: E501
+
+    def init_collection(
+        self,
+        target: _O,
+        collection: Type[Collection[Any]],
+        collection_adapter: CollectionAdapter,
+    ) -> None:
+        """Receive a 'collection init' event.
+
+        This event is triggered for a collection-based attribute, when
+        the initial "empty collection" is first generated for a blank
+        attribute, as well as for when the collection is replaced with
+        a new one, such as via a set event.
+
+        E.g., given that ``User.addresses`` is a relationship-based
+        collection, the event is triggered here::
+
+            u1 = User()
+            u1.addresses.append(a1)  #  <- new collection
+
+        and also during replace operations::
+
+            u1.addresses = [a2, a3]  #  <- new collection
+
+        :param target: the object instance receiving the event.
+         If the listener is registered with ``raw=True``, this will
+         be the :class:`.InstanceState` object.
+        :param collection: the new collection.  This will always be generated
+         from what was specified as
+         :paramref:`_orm.relationship.collection_class`, and will always
+         be empty.
+        :param collection_adapter: the :class:`.CollectionAdapter` that will
+         mediate internal access to the collection.
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+            :meth:`.AttributeEvents.init_scalar` - "scalar" version of this
+            event.
+
+        """
+
+    def dispose_collection(
+        self,
+        target: _O,
+        collection: Collection[Any],
+        collection_adapter: CollectionAdapter,
+    ) -> None:
+        """Receive a 'collection dispose' event.
+
+        This event is triggered for a collection-based attribute when
+        a collection is replaced, that is::
+
+            u1.addresses.append(a1)
+
+            u1.addresses = [a2, a3]  # <- old collection is disposed
+
+        The old collection received will contain its previous contents.
+
+        .. versionchanged:: 1.2 The collection passed to
+           :meth:`.AttributeEvents.dispose_collection` will now have its
+           contents before the dispose intact; previously, the collection
+           would be empty.
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+        """
+
+    def modified(self, target: _O, initiator: Event) -> None:
+        """Receive a 'modified' event.
+
+        This event is triggered when the :func:`.attributes.flag_modified`
+        function is used to trigger a modify event on an attribute without
+        any specific value being set.
+
+        .. versionadded:: 1.2
+
+        :param target: the object instance receiving the event.
+          If the listener is registered with ``raw=True``, this will
+          be the :class:`.InstanceState` object.
+
+        :param initiator: An instance of :class:`.attributes.Event`
+          representing the initiation of the event.
+
+        .. seealso::
+
+            :class:`.AttributeEvents` - background on listener options such
+            as propagation to subclasses.
+
+        """
+
+
+class QueryEvents(event.Events[Query[Any]]):
+    """Represent events within the construction of a :class:`_query.Query`
+    object.
+
+    .. legacy:: The :class:`_orm.QueryEvents` event methods are legacy
+        as of SQLAlchemy 2.0, and only apply to direct use of the
+        :class:`_orm.Query` object. They are not used for :term:`2.0 style`
+        statements. For events to intercept and modify 2.0 style ORM use,
+        use the :meth:`_orm.SessionEvents.do_orm_execute` hook.
+
+
+    The :class:`_orm.QueryEvents` hooks are now superseded by the
+    :meth:`_orm.SessionEvents.do_orm_execute` event hook.
+
+    """
+
+    _target_class_doc = "SomeQuery"
+    _dispatch_target = Query
+
+    def before_compile(self, query: Query[Any]) -> None:
+        """Receive the :class:`_query.Query`
+        object before it is composed into a
+        core :class:`_expression.Select` object.
+
+        .. deprecated:: 1.4  The :meth:`_orm.QueryEvents.before_compile` event
+           is superseded by the much more capable
+           :meth:`_orm.SessionEvents.do_orm_execute` hook.   In version 1.4,
+           the :meth:`_orm.QueryEvents.before_compile` event is **no longer
+           used** for ORM-level attribute loads, such as loads of deferred
+           or expired attributes as well as relationship loaders.   See the
+           new examples in :ref:`examples_session_orm_events` which
+           illustrate new ways of intercepting and modifying ORM queries
+           for the most common purpose of adding arbitrary filter criteria.
+
+
+        This event is intended to allow changes to the query given::
+
+            @event.listens_for(Query, "before_compile", retval=True)
+            def no_deleted(query):
+                for desc in query.column_descriptions:
+                    if desc["type"] is User:
+                        entity = desc["entity"]
+                        query = query.filter(entity.deleted == False)
+                return query
+
+        The event should normally be listened with the ``retval=True``
+        parameter set, so that the modified query may be returned.
+
+        The :meth:`.QueryEvents.before_compile` event by default
+        will disallow "baked" queries from caching a query, if the event
+        hook returns a new :class:`_query.Query` object.
+        This affects both direct
+        use of the baked query extension as well as its operation within
+        lazy loaders and eager loaders for relationships.  In order to
+        re-establish the query being cached, apply the event adding the
+        ``bake_ok`` flag::
+
+            @event.listens_for(Query, "before_compile", retval=True, bake_ok=True)
+            def my_event(query):
+                for desc in query.column_descriptions:
+                    if desc["type"] is User:
+                        entity = desc["entity"]
+                        query = query.filter(entity.deleted == False)
+                return query
+
+        When ``bake_ok`` is set to True, the event hook will only be invoked
+        once, and not called for subsequent invocations of a particular query
+        that is being cached.
+
+        .. versionadded:: 1.3.11  - added the "bake_ok" flag to the
+           :meth:`.QueryEvents.before_compile` event and disallowed caching via
+           the "baked" extension from occurring for event handlers that
+           return  a new :class:`_query.Query` object if this flag is not set.
+
+        .. seealso::
+
+            :meth:`.QueryEvents.before_compile_update`
+
+            :meth:`.QueryEvents.before_compile_delete`
+
+            :ref:`baked_with_before_compile`
+
+        """  # noqa: E501
+
+    def before_compile_update(
+        self, query: Query[Any], update_context: BulkUpdate
+    ) -> None:
+        """Allow modifications to the :class:`_query.Query` object within
+        :meth:`_query.Query.update`.
+
+        .. deprecated:: 1.4  The :meth:`_orm.QueryEvents.before_compile_update`
+           event is superseded by the much more capable
+           :meth:`_orm.SessionEvents.do_orm_execute` hook.
+
+        Like the :meth:`.QueryEvents.before_compile` event, if the event
+        is to be used to alter the :class:`_query.Query` object, it should
+        be configured with ``retval=True``, and the modified
+        :class:`_query.Query` object returned, as in ::
+
+            @event.listens_for(Query, "before_compile_update", retval=True)
+            def no_deleted(query, update_context):
+                for desc in query.column_descriptions:
+                    if desc["type"] is User:
+                        entity = desc["entity"]
+                        query = query.filter(entity.deleted == False)
+
+                        update_context.values["timestamp"] = datetime.datetime.now(
+                            datetime.UTC
+                        )
+                return query
+
+        The ``.values`` dictionary of the "update context" object can also
+        be modified in place as illustrated above.
+
+        :param query: a :class:`_query.Query` instance; this is also
+         the ``.query`` attribute of the given "update context"
+         object.
+
+        :param update_context: an "update context" object which is
+         the same kind of object as described in
+         :paramref:`.QueryEvents.after_bulk_update.update_context`.
+         The object has a ``.values`` attribute in an UPDATE context which is
+         the dictionary of parameters passed to :meth:`_query.Query.update`.
+         This
+         dictionary can be modified to alter the VALUES clause of the
+         resulting UPDATE statement.
+
+        .. versionadded:: 1.2.17
+
+        .. seealso::
+
+            :meth:`.QueryEvents.before_compile`
+
+            :meth:`.QueryEvents.before_compile_delete`
+
+
+        """  # noqa: E501
+
+    def before_compile_delete(
+        self, query: Query[Any], delete_context: BulkDelete
+    ) -> None:
+        """Allow modifications to the :class:`_query.Query` object within
+        :meth:`_query.Query.delete`.
+
+        .. deprecated:: 1.4  The :meth:`_orm.QueryEvents.before_compile_delete`
+           event is superseded by the much more capable
+           :meth:`_orm.SessionEvents.do_orm_execute` hook.
+
+        Like the :meth:`.QueryEvents.before_compile` event, this event
+        should be configured with ``retval=True``, and the modified
+        :class:`_query.Query` object returned, as in ::
+
+            @event.listens_for(Query, "before_compile_delete", retval=True)
+            def no_deleted(query, delete_context):
+                for desc in query.column_descriptions:
+                    if desc["type"] is User:
+                        entity = desc["entity"]
+                        query = query.filter(entity.deleted == False)
+                return query
+
+        :param query: a :class:`_query.Query` instance; this is also
+         the ``.query`` attribute of the given "delete context"
+         object.
+
+        :param delete_context: a "delete context" object which is
+         the same kind of object as described in
+         :paramref:`.QueryEvents.after_bulk_delete.delete_context`.
+
+        .. versionadded:: 1.2.17
+
+        .. seealso::
+
+            :meth:`.QueryEvents.before_compile`
+
+            :meth:`.QueryEvents.before_compile_update`
+
+
+        """
+
+    @classmethod
+    def _listen(
+        cls,
+        event_key: _EventKey[_ET],
+        retval: bool = False,
+        bake_ok: bool = False,
+        **kw: Any,
+    ) -> None:
+        fn = event_key._listen_fn
+
+        if not retval:
+
+            def wrap(*arg: Any, **kw: Any) -> Any:
+                if not retval:
+                    query = arg[0]
+                    fn(*arg, **kw)
+                    return query
+                else:
+                    return fn(*arg, **kw)
+
+            event_key = event_key.with_wrapper(wrap)
+        else:
+            # don't assume we can apply an attribute to the callable
+            def wrap(*arg: Any, **kw: Any) -> Any:
+                return fn(*arg, **kw)
+
+            event_key = event_key.with_wrapper(wrap)
+
+        wrap._bake_ok = bake_ok  # type: ignore [attr-defined]
+
+        event_key.base_listen(**kw)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py
new file mode 100644
index 00000000..0494edf9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/exc.py
@@ -0,0 +1,228 @@
+# orm/exc.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""SQLAlchemy ORM exceptions."""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+
+from .util import _mapper_property_as_plain_name
+from .. import exc as sa_exc
+from .. import util
+from ..exc import MultipleResultsFound  # noqa
+from ..exc import NoResultFound  # noqa
+
+if TYPE_CHECKING:
+    from .interfaces import LoaderStrategy
+    from .interfaces import MapperProperty
+    from .state import InstanceState
+
+_T = TypeVar("_T", bound=Any)
+
+NO_STATE = (AttributeError, KeyError)
+"""Exception types that may be raised by instrumentation implementations."""
+
+
+class StaleDataError(sa_exc.SQLAlchemyError):
+    """An operation encountered database state that is unaccounted for.
+
+    Conditions which cause this to happen include:
+
+    * A flush may have attempted to update or delete rows
+      and an unexpected number of rows were matched during
+      the UPDATE or DELETE statement.   Note that when
+      version_id_col is used, rows in UPDATE or DELETE statements
+      are also matched against the current known version
+      identifier.
+
+    * A mapped object with version_id_col was refreshed,
+      and the version number coming back from the database does
+      not match that of the object itself.
+
+    * A object is detached from its parent object, however
+      the object was previously attached to a different parent
+      identity which was garbage collected, and a decision
+      cannot be made if the new parent was really the most
+      recent "parent".
+
+    """
+
+
+ConcurrentModificationError = StaleDataError
+
+
+class FlushError(sa_exc.SQLAlchemyError):
+    """A invalid condition was detected during flush()."""
+
+
+class UnmappedError(sa_exc.InvalidRequestError):
+    """Base for exceptions that involve expected mappings not present."""
+
+
+class ObjectDereferencedError(sa_exc.SQLAlchemyError):
+    """An operation cannot complete due to an object being garbage
+    collected.
+
+    """
+
+
+class DetachedInstanceError(sa_exc.SQLAlchemyError):
+    """An attempt to access unloaded attributes on a
+    mapped instance that is detached."""
+
+    code = "bhk3"
+
+
+class UnmappedInstanceError(UnmappedError):
+    """An mapping operation was requested for an unknown instance."""
+
+    @util.preload_module("sqlalchemy.orm.base")
+    def __init__(self, obj: object, msg: Optional[str] = None):
+        base = util.preloaded.orm_base
+
+        if not msg:
+            try:
+                base.class_mapper(type(obj))
+                name = _safe_cls_name(type(obj))
+                msg = (
+                    "Class %r is mapped, but this instance lacks "
+                    "instrumentation.  This occurs when the instance "
+                    "is created before sqlalchemy.orm.mapper(%s) "
+                    "was called." % (name, name)
+                )
+            except UnmappedClassError:
+                msg = f"Class '{_safe_cls_name(type(obj))}' is not mapped"
+                if isinstance(obj, type):
+                    msg += (
+                        "; was a class (%s) supplied where an instance was "
+                        "required?" % _safe_cls_name(obj)
+                    )
+        UnmappedError.__init__(self, msg)
+
+    def __reduce__(self) -> Any:
+        return self.__class__, (None, self.args[0])
+
+
+class UnmappedClassError(UnmappedError):
+    """An mapping operation was requested for an unknown class."""
+
+    def __init__(self, cls: Type[_T], msg: Optional[str] = None):
+        if not msg:
+            msg = _default_unmapped(cls)
+        UnmappedError.__init__(self, msg)
+
+    def __reduce__(self) -> Any:
+        return self.__class__, (None, self.args[0])
+
+
+class ObjectDeletedError(sa_exc.InvalidRequestError):
+    """A refresh operation failed to retrieve the database
+    row corresponding to an object's known primary key identity.
+
+    A refresh operation proceeds when an expired attribute is
+    accessed on an object, or when :meth:`_query.Query.get` is
+    used to retrieve an object which is, upon retrieval, detected
+    as expired.   A SELECT is emitted for the target row
+    based on primary key; if no row is returned, this
+    exception is raised.
+
+    The true meaning of this exception is simply that
+    no row exists for the primary key identifier associated
+    with a persistent object.   The row may have been
+    deleted, or in some cases the primary key updated
+    to a new value, outside of the ORM's management of the target
+    object.
+
+    """
+
+    @util.preload_module("sqlalchemy.orm.base")
+    def __init__(self, state: InstanceState[Any], msg: Optional[str] = None):
+        base = util.preloaded.orm_base
+
+        if not msg:
+            msg = (
+                "Instance '%s' has been deleted, or its "
+                "row is otherwise not present." % base.state_str(state)
+            )
+
+        sa_exc.InvalidRequestError.__init__(self, msg)
+
+    def __reduce__(self) -> Any:
+        return self.__class__, (None, self.args[0])
+
+
+class UnmappedColumnError(sa_exc.InvalidRequestError):
+    """Mapping operation was requested on an unknown column."""
+
+
+class LoaderStrategyException(sa_exc.InvalidRequestError):
+    """A loader strategy for an attribute does not exist."""
+
+    def __init__(
+        self,
+        applied_to_property_type: Type[Any],
+        requesting_property: MapperProperty[Any],
+        applies_to: Optional[Type[MapperProperty[Any]]],
+        actual_strategy_type: Optional[Type[LoaderStrategy]],
+        strategy_key: Tuple[Any, ...],
+    ):
+        if actual_strategy_type is None:
+            sa_exc.InvalidRequestError.__init__(
+                self,
+                "Can't find strategy %s for %s"
+                % (strategy_key, requesting_property),
+            )
+        else:
+            assert applies_to is not None
+            sa_exc.InvalidRequestError.__init__(
+                self,
+                'Can\'t apply "%s" strategy to property "%s", '
+                'which is a "%s"; this loader strategy is intended '
+                'to be used with a "%s".'
+                % (
+                    util.clsname_as_plain_name(actual_strategy_type),
+                    requesting_property,
+                    _mapper_property_as_plain_name(applied_to_property_type),
+                    _mapper_property_as_plain_name(applies_to),
+                ),
+            )
+
+
+def _safe_cls_name(cls: Type[Any]) -> str:
+    cls_name: Optional[str]
+    try:
+        cls_name = ".".join((cls.__module__, cls.__name__))
+    except AttributeError:
+        cls_name = getattr(cls, "__name__", None)
+        if cls_name is None:
+            cls_name = repr(cls)
+    return cls_name
+
+
+@util.preload_module("sqlalchemy.orm.base")
+def _default_unmapped(cls: Type[Any]) -> Optional[str]:
+    base = util.preloaded.orm_base
+
+    try:
+        mappers = base.manager_of_class(cls).mappers  # type: ignore
+    except (
+        UnmappedClassError,
+        TypeError,
+    ) + NO_STATE:
+        mappers = {}
+    name = _safe_cls_name(cls)
+
+    if not mappers:
+        return f"Class '{name}' is not mapped"
+    else:
+        return None
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py
new file mode 100644
index 00000000..1808b2d5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/identity.py
@@ -0,0 +1,302 @@
+# orm/identity.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import NoReturn
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+import weakref
+
+from . import util as orm_util
+from .. import exc as sa_exc
+
+if TYPE_CHECKING:
+    from ._typing import _IdentityKeyType
+    from .state import InstanceState
+
+
+_T = TypeVar("_T", bound=Any)
+
+_O = TypeVar("_O", bound=object)
+
+
+class IdentityMap:
+    _wr: weakref.ref[IdentityMap]
+
+    _dict: Dict[_IdentityKeyType[Any], Any]
+    _modified: Set[InstanceState[Any]]
+
+    def __init__(self) -> None:
+        self._dict = {}
+        self._modified = set()
+        self._wr = weakref.ref(self)
+
+    def _kill(self) -> None:
+        self._add_unpresent = _killed  # type: ignore
+
+    def all_states(self) -> List[InstanceState[Any]]:
+        raise NotImplementedError()
+
+    def contains_state(self, state: InstanceState[Any]) -> bool:
+        raise NotImplementedError()
+
+    def __contains__(self, key: _IdentityKeyType[Any]) -> bool:
+        raise NotImplementedError()
+
+    def safe_discard(self, state: InstanceState[Any]) -> None:
+        raise NotImplementedError()
+
+    def __getitem__(self, key: _IdentityKeyType[_O]) -> _O:
+        raise NotImplementedError()
+
+    def get(
+        self, key: _IdentityKeyType[_O], default: Optional[_O] = None
+    ) -> Optional[_O]:
+        raise NotImplementedError()
+
+    def fast_get_state(
+        self, key: _IdentityKeyType[_O]
+    ) -> Optional[InstanceState[_O]]:
+        raise NotImplementedError()
+
+    def keys(self) -> Iterable[_IdentityKeyType[Any]]:
+        return self._dict.keys()
+
+    def values(self) -> Iterable[object]:
+        raise NotImplementedError()
+
+    def replace(self, state: InstanceState[_O]) -> Optional[InstanceState[_O]]:
+        raise NotImplementedError()
+
+    def add(self, state: InstanceState[Any]) -> bool:
+        raise NotImplementedError()
+
+    def _fast_discard(self, state: InstanceState[Any]) -> None:
+        raise NotImplementedError()
+
+    def _add_unpresent(
+        self, state: InstanceState[Any], key: _IdentityKeyType[Any]
+    ) -> None:
+        """optional inlined form of add() which can assume item isn't present
+        in the map"""
+        self.add(state)
+
+    def _manage_incoming_state(self, state: InstanceState[Any]) -> None:
+        state._instance_dict = self._wr
+
+        if state.modified:
+            self._modified.add(state)
+
+    def _manage_removed_state(self, state: InstanceState[Any]) -> None:
+        del state._instance_dict
+        if state.modified:
+            self._modified.discard(state)
+
+    def _dirty_states(self) -> Set[InstanceState[Any]]:
+        return self._modified
+
+    def check_modified(self) -> bool:
+        """return True if any InstanceStates present have been marked
+        as 'modified'.
+
+        """
+        return bool(self._modified)
+
+    def has_key(self, key: _IdentityKeyType[Any]) -> bool:
+        return key in self
+
+    def __len__(self) -> int:
+        return len(self._dict)
+
+
+class WeakInstanceDict(IdentityMap):
+    _dict: Dict[_IdentityKeyType[Any], InstanceState[Any]]
+
+    def __getitem__(self, key: _IdentityKeyType[_O]) -> _O:
+        state = cast("InstanceState[_O]", self._dict[key])
+        o = state.obj()
+        if o is None:
+            raise KeyError(key)
+        return o
+
+    def __contains__(self, key: _IdentityKeyType[Any]) -> bool:
+        try:
+            if key in self._dict:
+                state = self._dict[key]
+                o = state.obj()
+            else:
+                return False
+        except KeyError:
+            return False
+        else:
+            return o is not None
+
+    def contains_state(self, state: InstanceState[Any]) -> bool:
+        if state.key in self._dict:
+            if TYPE_CHECKING:
+                assert state.key is not None
+            try:
+                return self._dict[state.key] is state
+            except KeyError:
+                return False
+        else:
+            return False
+
+    def replace(
+        self, state: InstanceState[Any]
+    ) -> Optional[InstanceState[Any]]:
+        assert state.key is not None
+        if state.key in self._dict:
+            try:
+                existing = existing_non_none = self._dict[state.key]
+            except KeyError:
+                # catch gc removed the key after we just checked for it
+                existing = None
+            else:
+                if existing_non_none is not state:
+                    self._manage_removed_state(existing_non_none)
+                else:
+                    return None
+        else:
+            existing = None
+
+        self._dict[state.key] = state
+        self._manage_incoming_state(state)
+        return existing
+
+    def add(self, state: InstanceState[Any]) -> bool:
+        key = state.key
+        assert key is not None
+        # inline of self.__contains__
+        if key in self._dict:
+            try:
+                existing_state = self._dict[key]
+            except KeyError:
+                # catch gc removed the key after we just checked for it
+                pass
+            else:
+                if existing_state is not state:
+                    o = existing_state.obj()
+                    if o is not None:
+                        raise sa_exc.InvalidRequestError(
+                            "Can't attach instance "
+                            "%s; another instance with key %s is already "
+                            "present in this session."
+                            % (orm_util.state_str(state), state.key)
+                        )
+                else:
+                    return False
+        self._dict[key] = state
+        self._manage_incoming_state(state)
+        return True
+
+    def _add_unpresent(
+        self, state: InstanceState[Any], key: _IdentityKeyType[Any]
+    ) -> None:
+        # inlined form of add() called by loading.py
+        self._dict[key] = state
+        state._instance_dict = self._wr
+
+    def fast_get_state(
+        self, key: _IdentityKeyType[_O]
+    ) -> Optional[InstanceState[_O]]:
+        return self._dict.get(key)
+
+    def get(
+        self, key: _IdentityKeyType[_O], default: Optional[_O] = None
+    ) -> Optional[_O]:
+        if key not in self._dict:
+            return default
+        try:
+            state = cast("InstanceState[_O]", self._dict[key])
+        except KeyError:
+            # catch gc removed the key after we just checked for it
+            return default
+        else:
+            o = state.obj()
+            if o is None:
+                return default
+            return o
+
+    def items(self) -> List[Tuple[_IdentityKeyType[Any], InstanceState[Any]]]:
+        values = self.all_states()
+        result = []
+        for state in values:
+            value = state.obj()
+            key = state.key
+            assert key is not None
+            if value is not None:
+                result.append((key, value))
+        return result
+
+    def values(self) -> List[object]:
+        values = self.all_states()
+        result = []
+        for state in values:
+            value = state.obj()
+            if value is not None:
+                result.append(value)
+
+        return result
+
+    def __iter__(self) -> Iterator[_IdentityKeyType[Any]]:
+        return iter(self.keys())
+
+    def all_states(self) -> List[InstanceState[Any]]:
+        return list(self._dict.values())
+
+    def _fast_discard(self, state: InstanceState[Any]) -> None:
+        # used by InstanceState for state being
+        # GC'ed, inlines _managed_removed_state
+        key = state.key
+        assert key is not None
+        try:
+            st = self._dict[key]
+        except KeyError:
+            # catch gc removed the key after we just checked for it
+            pass
+        else:
+            if st is state:
+                self._dict.pop(key, None)
+
+    def discard(self, state: InstanceState[Any]) -> None:
+        self.safe_discard(state)
+
+    def safe_discard(self, state: InstanceState[Any]) -> None:
+        key = state.key
+        if key in self._dict:
+            assert key is not None
+            try:
+                st = self._dict[key]
+            except KeyError:
+                # catch gc removed the key after we just checked for it
+                pass
+            else:
+                if st is state:
+                    self._dict.pop(key, None)
+                    self._manage_removed_state(state)
+
+
+def _killed(state: InstanceState[Any], key: _IdentityKeyType[Any]) -> NoReturn:
+    # external function to avoid creating cycles when assigned to
+    # the IdentityMap
+    raise sa_exc.InvalidRequestError(
+        "Object %s cannot be converted to 'persistent' state, as this "
+        "identity map is no longer valid.  Has the owning Session "
+        "been closed?" % orm_util.state_str(state),
+        code="lkrp",
+    )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py
new file mode 100644
index 00000000..f87023f1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/instrumentation.py
@@ -0,0 +1,754 @@
+# orm/instrumentation.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Defines SQLAlchemy's system of class instrumentation.
+
+This module is usually not directly visible to user applications, but
+defines a large part of the ORM's interactivity.
+
+instrumentation.py deals with registration of end-user classes
+for state tracking.   It interacts closely with state.py
+and attributes.py which establish per-instance and per-class-attribute
+instrumentation, respectively.
+
+The class instrumentation system can be customized on a per-class
+or global basis using the :mod:`sqlalchemy.ext.instrumentation`
+module, which provides the means to build and specify
+alternate instrumentation forms.
+
+.. versionchanged: 0.8
+   The instrumentation extension system was moved out of the
+   ORM and into the external :mod:`sqlalchemy.ext.instrumentation`
+   package.  When that package is imported, it installs
+   itself within sqlalchemy.orm so that its more comprehensive
+   resolution mechanics take effect.
+
+"""
+
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import base
+from . import collections
+from . import exc
+from . import interfaces
+from . import state
+from ._typing import _O
+from .attributes import _is_collection_attribute_impl
+from .. import util
+from ..event import EventTarget
+from ..util import HasMemoized
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if TYPE_CHECKING:
+    from ._typing import _RegistryType
+    from .attributes import AttributeImpl
+    from .attributes import QueryableAttribute
+    from .collections import _AdaptedCollectionProtocol
+    from .collections import _CollectionFactoryType
+    from .decl_base import _MapperConfig
+    from .events import InstanceEvents
+    from .mapper import Mapper
+    from .state import InstanceState
+    from ..event import dispatcher
+
+_T = TypeVar("_T", bound=Any)
+DEL_ATTR = util.symbol("DEL_ATTR")
+
+
+class _ExpiredAttributeLoaderProto(Protocol):
+    def __call__(
+        self,
+        state: state.InstanceState[Any],
+        toload: Set[str],
+        passive: base.PassiveFlag,
+    ) -> None: ...
+
+
+class _ManagerFactory(Protocol):
+    def __call__(self, class_: Type[_O]) -> ClassManager[_O]: ...
+
+
+class ClassManager(
+    HasMemoized,
+    Dict[str, "QueryableAttribute[Any]"],
+    Generic[_O],
+    EventTarget,
+):
+    """Tracks state information at the class level."""
+
+    dispatch: dispatcher[ClassManager[_O]]
+
+    MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
+    STATE_ATTR = base.DEFAULT_STATE_ATTR
+
+    _state_setter = staticmethod(util.attrsetter(STATE_ATTR))
+
+    expired_attribute_loader: _ExpiredAttributeLoaderProto
+    "previously known as deferred_scalar_loader"
+
+    init_method: Optional[Callable[..., None]]
+    original_init: Optional[Callable[..., None]] = None
+
+    factory: Optional[_ManagerFactory]
+
+    declarative_scan: Optional[weakref.ref[_MapperConfig]] = None
+
+    registry: _RegistryType
+
+    if not TYPE_CHECKING:
+        # starts as None during setup
+        registry = None
+
+    class_: Type[_O]
+
+    _bases: List[ClassManager[Any]]
+
+    @property
+    @util.deprecated(
+        "1.4",
+        message="The ClassManager.deferred_scalar_loader attribute is now "
+        "named expired_attribute_loader",
+    )
+    def deferred_scalar_loader(self):
+        return self.expired_attribute_loader
+
+    @deferred_scalar_loader.setter
+    @util.deprecated(
+        "1.4",
+        message="The ClassManager.deferred_scalar_loader attribute is now "
+        "named expired_attribute_loader",
+    )
+    def deferred_scalar_loader(self, obj):
+        self.expired_attribute_loader = obj
+
+    def __init__(self, class_):
+        self.class_ = class_
+        self.info = {}
+        self.new_init = None
+        self.local_attrs = {}
+        self.originals = {}
+        self._finalized = False
+        self.factory = None
+        self.init_method = None
+
+        self._bases = [
+            mgr
+            for mgr in cast(
+                "List[Optional[ClassManager[Any]]]",
+                [
+                    opt_manager_of_class(base)
+                    for base in self.class_.__bases__
+                    if isinstance(base, type)
+                ],
+            )
+            if mgr is not None
+        ]
+
+        for base_ in self._bases:
+            self.update(base_)
+
+        cast(
+            "InstanceEvents", self.dispatch._events
+        )._new_classmanager_instance(class_, self)
+
+        for basecls in class_.__mro__:
+            mgr = opt_manager_of_class(basecls)
+            if mgr is not None:
+                self.dispatch._update(mgr.dispatch)
+
+        self.manage()
+
+        if "__del__" in class_.__dict__:
+            util.warn(
+                "__del__() method on class %s will "
+                "cause unreachable cycles and memory leaks, "
+                "as SQLAlchemy instrumentation often creates "
+                "reference cycles.  Please remove this method." % class_
+            )
+
+    def _update_state(
+        self,
+        finalize: bool = False,
+        mapper: Optional[Mapper[_O]] = None,
+        registry: Optional[_RegistryType] = None,
+        declarative_scan: Optional[_MapperConfig] = None,
+        expired_attribute_loader: Optional[
+            _ExpiredAttributeLoaderProto
+        ] = None,
+        init_method: Optional[Callable[..., None]] = None,
+    ) -> None:
+        if mapper:
+            self.mapper = mapper  #
+        if registry:
+            registry._add_manager(self)
+        if declarative_scan:
+            self.declarative_scan = weakref.ref(declarative_scan)
+        if expired_attribute_loader:
+            self.expired_attribute_loader = expired_attribute_loader
+
+        if init_method:
+            assert not self._finalized, (
+                "class is already instrumented, "
+                "init_method %s can't be applied" % init_method
+            )
+            self.init_method = init_method
+
+        if not self._finalized:
+            self.original_init = (
+                self.init_method
+                if self.init_method is not None
+                and self.class_.__init__ is object.__init__
+                else self.class_.__init__
+            )
+
+        if finalize and not self._finalized:
+            self._finalize()
+
+    def _finalize(self) -> None:
+        if self._finalized:
+            return
+        self._finalized = True
+
+        self._instrument_init()
+
+        _instrumentation_factory.dispatch.class_instrument(self.class_)
+
+    def __hash__(self) -> int:  # type: ignore[override]
+        return id(self)
+
+    def __eq__(self, other: Any) -> bool:
+        return other is self
+
+    @property
+    def is_mapped(self) -> bool:
+        return "mapper" in self.__dict__
+
+    @HasMemoized.memoized_attribute
+    def _all_key_set(self):
+        return frozenset(self)
+
+    @HasMemoized.memoized_attribute
+    def _collection_impl_keys(self):
+        return frozenset(
+            [attr.key for attr in self.values() if attr.impl.collection]
+        )
+
+    @HasMemoized.memoized_attribute
+    def _scalar_loader_impls(self):
+        return frozenset(
+            [
+                attr.impl
+                for attr in self.values()
+                if attr.impl.accepts_scalar_loader
+            ]
+        )
+
+    @HasMemoized.memoized_attribute
+    def _loader_impls(self):
+        return frozenset([attr.impl for attr in self.values()])
+
+    @util.memoized_property
+    def mapper(self) -> Mapper[_O]:
+        # raises unless self.mapper has been assigned
+        raise exc.UnmappedClassError(self.class_)
+
+    def _all_sqla_attributes(self, exclude=None):
+        """return an iterator of all classbound attributes that are
+        implement :class:`.InspectionAttr`.
+
+        This includes :class:`.QueryableAttribute` as well as extension
+        types such as :class:`.hybrid_property` and
+        :class:`.AssociationProxy`.
+
+        """
+
+        found: Dict[str, Any] = {}
+
+        # constraints:
+        # 1. yield keys in cls.__dict__ order
+        # 2. if a subclass has the same key as a superclass, include that
+        #    key as part of the ordering of the superclass, because an
+        #    overridden key is usually installed by the mapper which is going
+        #    on a different ordering
+        # 3. don't use getattr() as this fires off descriptors
+
+        for supercls in self.class_.__mro__[0:-1]:
+            inherits = supercls.__mro__[1]
+            for key in supercls.__dict__:
+                found.setdefault(key, supercls)
+                if key in inherits.__dict__:
+                    continue
+                val = found[key].__dict__[key]
+                if (
+                    isinstance(val, interfaces.InspectionAttr)
+                    and val.is_attribute
+                ):
+                    yield key, val
+
+    def _get_class_attr_mro(self, key, default=None):
+        """return an attribute on the class without tripping it."""
+
+        for supercls in self.class_.__mro__:
+            if key in supercls.__dict__:
+                return supercls.__dict__[key]
+        else:
+            return default
+
+    def _attr_has_impl(self, key: str) -> bool:
+        """Return True if the given attribute is fully initialized.
+
+        i.e. has an impl.
+        """
+
+        return key in self and self[key].impl is not None
+
+    def _subclass_manager(self, cls: Type[_T]) -> ClassManager[_T]:
+        """Create a new ClassManager for a subclass of this ClassManager's
+        class.
+
+        This is called automatically when attributes are instrumented so that
+        the attributes can be propagated to subclasses against their own
+        class-local manager, without the need for mappers etc. to have already
+        pre-configured managers for the full class hierarchy.   Mappers
+        can post-configure the auto-generated ClassManager when needed.
+
+        """
+        return register_class(cls, finalize=False)
+
+    def _instrument_init(self):
+        self.new_init = _generate_init(self.class_, self, self.original_init)
+        self.install_member("__init__", self.new_init)
+
+    @util.memoized_property
+    def _state_constructor(self) -> Type[state.InstanceState[_O]]:
+        self.dispatch.first_init(self, self.class_)
+        return state.InstanceState
+
+    def manage(self):
+        """Mark this instance as the manager for its class."""
+
+        setattr(self.class_, self.MANAGER_ATTR, self)
+
+    @util.hybridmethod
+    def manager_getter(self):
+        return _default_manager_getter
+
+    @util.hybridmethod
+    def state_getter(self):
+        """Return a (instance) -> InstanceState callable.
+
+        "state getter" callables should raise either KeyError or
+        AttributeError if no InstanceState could be found for the
+        instance.
+        """
+
+        return _default_state_getter
+
+    @util.hybridmethod
+    def dict_getter(self):
+        return _default_dict_getter
+
+    def instrument_attribute(
+        self,
+        key: str,
+        inst: QueryableAttribute[Any],
+        propagated: bool = False,
+    ) -> None:
+        if propagated:
+            if key in self.local_attrs:
+                return  # don't override local attr with inherited attr
+        else:
+            self.local_attrs[key] = inst
+            self.install_descriptor(key, inst)
+        self._reset_memoizations()
+        self[key] = inst
+
+        for cls in self.class_.__subclasses__():
+            manager = self._subclass_manager(cls)
+            manager.instrument_attribute(key, inst, True)
+
+    def subclass_managers(self, recursive):
+        for cls in self.class_.__subclasses__():
+            mgr = opt_manager_of_class(cls)
+            if mgr is not None and mgr is not self:
+                yield mgr
+                if recursive:
+                    yield from mgr.subclass_managers(True)
+
+    def post_configure_attribute(self, key):
+        _instrumentation_factory.dispatch.attribute_instrument(
+            self.class_, key, self[key]
+        )
+
+    def uninstrument_attribute(self, key, propagated=False):
+        if key not in self:
+            return
+        if propagated:
+            if key in self.local_attrs:
+                return  # don't get rid of local attr
+        else:
+            del self.local_attrs[key]
+            self.uninstall_descriptor(key)
+        self._reset_memoizations()
+        del self[key]
+        for cls in self.class_.__subclasses__():
+            manager = opt_manager_of_class(cls)
+            if manager:
+                manager.uninstrument_attribute(key, True)
+
+    def unregister(self) -> None:
+        """remove all instrumentation established by this ClassManager."""
+
+        for key in list(self.originals):
+            self.uninstall_member(key)
+
+        self.mapper = None
+        self.dispatch = None  # type: ignore
+        self.new_init = None
+        self.info.clear()
+
+        for key in list(self):
+            if key in self.local_attrs:
+                self.uninstrument_attribute(key)
+
+        if self.MANAGER_ATTR in self.class_.__dict__:
+            delattr(self.class_, self.MANAGER_ATTR)
+
+    def install_descriptor(
+        self, key: str, inst: QueryableAttribute[Any]
+    ) -> None:
+        if key in (self.STATE_ATTR, self.MANAGER_ATTR):
+            raise KeyError(
+                "%r: requested attribute name conflicts with "
+                "instrumentation attribute of the same name." % key
+            )
+        setattr(self.class_, key, inst)
+
+    def uninstall_descriptor(self, key: str) -> None:
+        delattr(self.class_, key)
+
+    def install_member(self, key: str, implementation: Any) -> None:
+        if key in (self.STATE_ATTR, self.MANAGER_ATTR):
+            raise KeyError(
+                "%r: requested attribute name conflicts with "
+                "instrumentation attribute of the same name." % key
+            )
+        self.originals.setdefault(key, self.class_.__dict__.get(key, DEL_ATTR))
+        setattr(self.class_, key, implementation)
+
+    def uninstall_member(self, key: str) -> None:
+        original = self.originals.pop(key, None)
+        if original is not DEL_ATTR:
+            setattr(self.class_, key, original)
+        else:
+            delattr(self.class_, key)
+
+    def instrument_collection_class(
+        self, key: str, collection_class: Type[Collection[Any]]
+    ) -> _CollectionFactoryType:
+        return collections.prepare_instrumentation(collection_class)
+
+    def initialize_collection(
+        self,
+        key: str,
+        state: InstanceState[_O],
+        factory: _CollectionFactoryType,
+    ) -> Tuple[collections.CollectionAdapter, _AdaptedCollectionProtocol]:
+        user_data = factory()
+        impl = self.get_impl(key)
+        assert _is_collection_attribute_impl(impl)
+        adapter = collections.CollectionAdapter(impl, state, user_data)
+        return adapter, user_data
+
+    def is_instrumented(self, key: str, search: bool = False) -> bool:
+        if search:
+            return key in self
+        else:
+            return key in self.local_attrs
+
+    def get_impl(self, key: str) -> AttributeImpl:
+        return self[key].impl
+
+    @property
+    def attributes(self) -> Iterable[Any]:
+        return iter(self.values())
+
+    # InstanceState management
+
+    def new_instance(self, state: Optional[InstanceState[_O]] = None) -> _O:
+        # here, we would prefer _O to be bound to "object"
+        # so that mypy sees that __new__ is present.   currently
+        # it's bound to Any as there were other problems not having
+        # it that way but these can be revisited
+        instance = self.class_.__new__(self.class_)
+        if state is None:
+            state = self._state_constructor(instance, self)
+        self._state_setter(instance, state)
+        return instance
+
+    def setup_instance(
+        self, instance: _O, state: Optional[InstanceState[_O]] = None
+    ) -> None:
+        if state is None:
+            state = self._state_constructor(instance, self)
+        self._state_setter(instance, state)
+
+    def teardown_instance(self, instance: _O) -> None:
+        delattr(instance, self.STATE_ATTR)
+
+    def _serialize(
+        self, state: InstanceState[_O], state_dict: Dict[str, Any]
+    ) -> _SerializeManager:
+        return _SerializeManager(state, state_dict)
+
+    def _new_state_if_none(
+        self, instance: _O
+    ) -> Union[Literal[False], InstanceState[_O]]:
+        """Install a default InstanceState if none is present.
+
+        A private convenience method used by the __init__ decorator.
+
+        """
+        if hasattr(instance, self.STATE_ATTR):
+            return False
+        elif self.class_ is not instance.__class__ and self.is_mapped:
+            # this will create a new ClassManager for the
+            # subclass, without a mapper.  This is likely a
+            # user error situation but allow the object
+            # to be constructed, so that it is usable
+            # in a non-ORM context at least.
+            return self._subclass_manager(
+                instance.__class__
+            )._new_state_if_none(instance)
+        else:
+            state = self._state_constructor(instance, self)
+            self._state_setter(instance, state)
+            return state
+
+    def has_state(self, instance: _O) -> bool:
+        return hasattr(instance, self.STATE_ATTR)
+
+    def has_parent(
+        self, state: InstanceState[_O], key: str, optimistic: bool = False
+    ) -> bool:
+        """TODO"""
+        return self.get_impl(key).hasparent(state, optimistic=optimistic)
+
+    def __bool__(self) -> bool:
+        """All ClassManagers are non-zero regardless of attribute state."""
+        return True
+
+    def __repr__(self) -> str:
+        return "<%s of %r at %x>" % (
+            self.__class__.__name__,
+            self.class_,
+            id(self),
+        )
+
+
+class _SerializeManager:
+    """Provide serialization of a :class:`.ClassManager`.
+
+    The :class:`.InstanceState` uses ``__init__()`` on serialize
+    and ``__call__()`` on deserialize.
+
+    """
+
+    def __init__(self, state: state.InstanceState[Any], d: Dict[str, Any]):
+        self.class_ = state.class_
+        manager = state.manager
+        manager.dispatch.pickle(state, d)
+
+    def __call__(self, state, inst, state_dict):
+        state.manager = manager = opt_manager_of_class(self.class_)
+        if manager is None:
+            raise exc.UnmappedInstanceError(
+                inst,
+                "Cannot deserialize object of type %r - "
+                "no mapper() has "
+                "been configured for this class within the current "
+                "Python process!" % self.class_,
+            )
+        elif manager.is_mapped and not manager.mapper.configured:
+            manager.mapper._check_configure()
+
+        # setup _sa_instance_state ahead of time so that
+        # unpickle events can access the object normally.
+        # see [ticket:2362]
+        if inst is not None:
+            manager.setup_instance(inst, state)
+        manager.dispatch.unpickle(state, state_dict)
+
+
+class InstrumentationFactory(EventTarget):
+    """Factory for new ClassManager instances."""
+
+    dispatch: dispatcher[InstrumentationFactory]
+
+    def create_manager_for_cls(self, class_: Type[_O]) -> ClassManager[_O]:
+        assert class_ is not None
+        assert opt_manager_of_class(class_) is None
+
+        # give a more complicated subclass
+        # a chance to do what it wants here
+        manager, factory = self._locate_extended_factory(class_)
+
+        if factory is None:
+            factory = ClassManager
+            manager = ClassManager(class_)
+        else:
+            assert manager is not None
+
+        self._check_conflicts(class_, factory)
+
+        manager.factory = factory
+
+        return manager
+
+    def _locate_extended_factory(
+        self, class_: Type[_O]
+    ) -> Tuple[Optional[ClassManager[_O]], Optional[_ManagerFactory]]:
+        """Overridden by a subclass to do an extended lookup."""
+        return None, None
+
+    def _check_conflicts(
+        self, class_: Type[_O], factory: Callable[[Type[_O]], ClassManager[_O]]
+    ) -> None:
+        """Overridden by a subclass to test for conflicting factories."""
+
+    def unregister(self, class_: Type[_O]) -> None:
+        manager = manager_of_class(class_)
+        manager.unregister()
+        self.dispatch.class_uninstrument(class_)
+
+
+# this attribute is replaced by sqlalchemy.ext.instrumentation
+# when imported.
+_instrumentation_factory = InstrumentationFactory()
+
+# these attributes are replaced by sqlalchemy.ext.instrumentation
+# when a non-standard InstrumentationManager class is first
+# used to instrument a class.
+instance_state = _default_state_getter = base.instance_state
+
+instance_dict = _default_dict_getter = base.instance_dict
+
+manager_of_class = _default_manager_getter = base.manager_of_class
+opt_manager_of_class = _default_opt_manager_getter = base.opt_manager_of_class
+
+
+def register_class(
+    class_: Type[_O],
+    finalize: bool = True,
+    mapper: Optional[Mapper[_O]] = None,
+    registry: Optional[_RegistryType] = None,
+    declarative_scan: Optional[_MapperConfig] = None,
+    expired_attribute_loader: Optional[_ExpiredAttributeLoaderProto] = None,
+    init_method: Optional[Callable[..., None]] = None,
+) -> ClassManager[_O]:
+    """Register class instrumentation.
+
+    Returns the existing or newly created class manager.
+
+    """
+
+    manager = opt_manager_of_class(class_)
+    if manager is None:
+        manager = _instrumentation_factory.create_manager_for_cls(class_)
+    manager._update_state(
+        mapper=mapper,
+        registry=registry,
+        declarative_scan=declarative_scan,
+        expired_attribute_loader=expired_attribute_loader,
+        init_method=init_method,
+        finalize=finalize,
+    )
+
+    return manager
+
+
+def unregister_class(class_):
+    """Unregister class instrumentation."""
+
+    _instrumentation_factory.unregister(class_)
+
+
+def is_instrumented(instance, key):
+    """Return True if the given attribute on the given instance is
+    instrumented by the attributes package.
+
+    This function may be used regardless of instrumentation
+    applied directly to the class, i.e. no descriptors are required.
+
+    """
+    return manager_of_class(instance.__class__).is_instrumented(
+        key, search=True
+    )
+
+
+def _generate_init(class_, class_manager, original_init):
+    """Build an __init__ decorator that triggers ClassManager events."""
+
+    # TODO: we should use the ClassManager's notion of the
+    # original '__init__' method, once ClassManager is fixed
+    # to always reference that.
+
+    if original_init is None:
+        original_init = class_.__init__
+
+    # Go through some effort here and don't change the user's __init__
+    # calling signature, including the unlikely case that it has
+    # a return value.
+    # FIXME: need to juggle local names to avoid constructor argument
+    # clashes.
+    func_body = """\
+def __init__(%(apply_pos)s):
+    new_state = class_manager._new_state_if_none(%(self_arg)s)
+    if new_state:
+        return new_state._initialize_instance(%(apply_kw)s)
+    else:
+        return original_init(%(apply_kw)s)
+"""
+    func_vars = util.format_argspec_init(original_init, grouped=False)
+    func_text = func_body % func_vars
+
+    func_defaults = getattr(original_init, "__defaults__", None)
+    func_kw_defaults = getattr(original_init, "__kwdefaults__", None)
+
+    env = locals().copy()
+    env["__name__"] = __name__
+    exec(func_text, env)
+    __init__ = env["__init__"]
+    __init__.__doc__ = original_init.__doc__
+    __init__._sa_original_init = original_init
+
+    if func_defaults:
+        __init__.__defaults__ = func_defaults
+    if func_kw_defaults:
+        __init__.__kwdefaults__ = func_kw_defaults
+
+    return __init__
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py
new file mode 100644
index 00000000..b4462e54
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/interfaces.py
@@ -0,0 +1,1490 @@
+# orm/interfaces.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""
+
+Contains various base classes used throughout the ORM.
+
+Defines some key base classes prominent within the internals.
+
+This module and the classes within are mostly private, though some attributes
+are exposed when inspecting mappings.
+
+"""
+
+from __future__ import annotations
+
+import collections
+import dataclasses
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import ClassVar
+from typing import Dict
+from typing import Generic
+from typing import Iterator
+from typing import List
+from typing import NamedTuple
+from typing import NoReturn
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import exc as orm_exc
+from . import path_registry
+from .base import _MappedAttribute as _MappedAttribute
+from .base import EXT_CONTINUE as EXT_CONTINUE  # noqa: F401
+from .base import EXT_SKIP as EXT_SKIP  # noqa: F401
+from .base import EXT_STOP as EXT_STOP  # noqa: F401
+from .base import InspectionAttr as InspectionAttr  # noqa: F401
+from .base import InspectionAttrInfo as InspectionAttrInfo
+from .base import MANYTOMANY as MANYTOMANY  # noqa: F401
+from .base import MANYTOONE as MANYTOONE  # noqa: F401
+from .base import NO_KEY as NO_KEY  # noqa: F401
+from .base import NO_VALUE as NO_VALUE  # noqa: F401
+from .base import NotExtension as NotExtension  # noqa: F401
+from .base import ONETOMANY as ONETOMANY  # noqa: F401
+from .base import RelationshipDirection as RelationshipDirection  # noqa: F401
+from .base import SQLORMOperations
+from .. import ColumnElement
+from .. import exc as sa_exc
+from .. import inspection
+from .. import util
+from ..sql import operators
+from ..sql import roles
+from ..sql import visitors
+from ..sql.base import _NoArg
+from ..sql.base import ExecutableOption
+from ..sql.cache_key import HasCacheKey
+from ..sql.operators import ColumnOperators
+from ..sql.schema import Column
+from ..sql.type_api import TypeEngine
+from ..util import warn_deprecated
+from ..util.typing import RODescriptorReference
+from ..util.typing import TypedDict
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _IdentityKeyType
+    from ._typing import _InstanceDict
+    from ._typing import _InternalEntityType
+    from ._typing import _ORMAdapterProto
+    from .attributes import InstrumentedAttribute
+    from .base import Mapped
+    from .context import _MapperEntity
+    from .context import ORMCompileState
+    from .context import QueryContext
+    from .decl_api import RegistryType
+    from .decl_base import _ClassScanMapperConfig
+    from .loading import _PopulatorDict
+    from .mapper import Mapper
+    from .path_registry import AbstractEntityRegistry
+    from .query import Query
+    from .session import Session
+    from .state import InstanceState
+    from .strategy_options import _LoadElement
+    from .util import AliasedInsp
+    from .util import ORMAdapter
+    from ..engine.result import Result
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _ColumnsClauseArgument
+    from ..sql._typing import _DMLColumnArgument
+    from ..sql._typing import _InfoType
+    from ..sql.operators import OperatorType
+    from ..sql.visitors import _TraverseInternalsType
+    from ..util.typing import _AnnotationScanType
+
+_StrategyKey = Tuple[Any, ...]
+
+_T = TypeVar("_T", bound=Any)
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+
+_TLS = TypeVar("_TLS", bound="Type[LoaderStrategy]")
+
+
+class ORMStatementRole(roles.StatementRole):
+    __slots__ = ()
+    _role_name = (
+        "Executable SQL or text() construct, including ORM aware objects"
+    )
+
+
+class ORMColumnsClauseRole(
+    roles.ColumnsClauseRole, roles.TypedColumnsClauseRole[_T]
+):
+    __slots__ = ()
+    _role_name = "ORM mapped entity, aliased entity, or Column expression"
+
+
+class ORMEntityColumnsClauseRole(ORMColumnsClauseRole[_T]):
+    __slots__ = ()
+    _role_name = "ORM mapped or aliased entity"
+
+
+class ORMFromClauseRole(roles.StrictFromClauseRole):
+    __slots__ = ()
+    _role_name = "ORM mapped entity, aliased entity, or FROM expression"
+
+
+class ORMColumnDescription(TypedDict):
+    name: str
+    # TODO: add python_type and sql_type here; combining them
+    # into "type" is a bad idea
+    type: Union[Type[Any], TypeEngine[Any]]
+    aliased: bool
+    expr: _ColumnsClauseArgument[Any]
+    entity: Optional[_ColumnsClauseArgument[Any]]
+
+
+class _IntrospectsAnnotations:
+    __slots__ = ()
+
+    @classmethod
+    def _mapper_property_name(cls) -> str:
+        return cls.__name__
+
+    def found_in_pep593_annotated(self) -> Any:
+        """return a copy of this object to use in declarative when the
+        object is found inside of an Annotated object."""
+
+        raise NotImplementedError(
+            f"Use of the {self._mapper_property_name()!r} "
+            "construct inside of an Annotated object is not yet supported."
+        )
+
+    def declarative_scan(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        registry: RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
+        annotation: Optional[_AnnotationScanType],
+        extracted_mapped_annotation: Optional[_AnnotationScanType],
+        is_dataclass_field: bool,
+    ) -> None:
+        """Perform class-specific initializaton at early declarative scanning
+        time.
+
+        .. versionadded:: 2.0
+
+        """
+
+    def _raise_for_required(self, key: str, cls: Type[Any]) -> NoReturn:
+        raise sa_exc.ArgumentError(
+            f"Python typing annotation is required for attribute "
+            f'"{cls.__name__}.{key}" when primary argument(s) for '
+            f'"{self._mapper_property_name()}" '
+            "construct are None or not present"
+        )
+
+
+class _AttributeOptions(NamedTuple):
+    """define Python-local attribute behavior options common to all
+    :class:`.MapperProperty` objects.
+
+    Currently this includes dataclass-generation arguments.
+
+    .. versionadded:: 2.0
+
+    """
+
+    dataclasses_init: Union[_NoArg, bool]
+    dataclasses_repr: Union[_NoArg, bool]
+    dataclasses_default: Union[_NoArg, Any]
+    dataclasses_default_factory: Union[_NoArg, Callable[[], Any]]
+    dataclasses_compare: Union[_NoArg, bool]
+    dataclasses_kw_only: Union[_NoArg, bool]
+    dataclasses_hash: Union[_NoArg, bool, None]
+
+    def _as_dataclass_field(self, key: str) -> Any:
+        """Return a ``dataclasses.Field`` object given these arguments."""
+
+        kw: Dict[str, Any] = {}
+        if self.dataclasses_default_factory is not _NoArg.NO_ARG:
+            kw["default_factory"] = self.dataclasses_default_factory
+        if self.dataclasses_default is not _NoArg.NO_ARG:
+            kw["default"] = self.dataclasses_default
+        if self.dataclasses_init is not _NoArg.NO_ARG:
+            kw["init"] = self.dataclasses_init
+        if self.dataclasses_repr is not _NoArg.NO_ARG:
+            kw["repr"] = self.dataclasses_repr
+        if self.dataclasses_compare is not _NoArg.NO_ARG:
+            kw["compare"] = self.dataclasses_compare
+        if self.dataclasses_kw_only is not _NoArg.NO_ARG:
+            kw["kw_only"] = self.dataclasses_kw_only
+        if self.dataclasses_hash is not _NoArg.NO_ARG:
+            kw["hash"] = self.dataclasses_hash
+
+        if "default" in kw and callable(kw["default"]):
+            # callable defaults are ambiguous. deprecate them in favour of
+            # insert_default or default_factory. #9936
+            warn_deprecated(
+                f"Callable object passed to the ``default`` parameter for "
+                f"attribute {key!r} in a ORM-mapped Dataclasses context is "
+                "ambiguous, "
+                "and this use will raise an error in a future release.  "
+                "If this callable is intended to produce Core level INSERT "
+                "default values for an underlying ``Column``, use "
+                "the ``mapped_column.insert_default`` parameter instead.  "
+                "To establish this callable as providing a default value "
+                "for instances of the dataclass itself, use the "
+                "``default_factory`` dataclasses parameter.",
+                "2.0",
+            )
+
+        if (
+            "init" in kw
+            and not kw["init"]
+            and "default" in kw
+            and not callable(kw["default"])  # ignore callable defaults. #9936
+            and "default_factory" not in kw  # illegal but let dc.field raise
+        ):
+            # fix for #9879
+            default = kw.pop("default")
+            kw["default_factory"] = lambda: default
+
+        return dataclasses.field(**kw)
+
+    @classmethod
+    def _get_arguments_for_make_dataclass(
+        cls,
+        key: str,
+        annotation: _AnnotationScanType,
+        mapped_container: Optional[Any],
+        elem: _T,
+    ) -> Union[
+        Tuple[str, _AnnotationScanType],
+        Tuple[str, _AnnotationScanType, dataclasses.Field[Any]],
+    ]:
+        """given attribute key, annotation, and value from a class, return
+        the argument tuple we would pass to dataclasses.make_dataclass()
+        for this attribute.
+
+        """
+        if isinstance(elem, _DCAttributeOptions):
+            dc_field = elem._attribute_options._as_dataclass_field(key)
+
+            return (key, annotation, dc_field)
+        elif elem is not _NoArg.NO_ARG:
+            # why is typing not erroring on this?
+            return (key, annotation, elem)
+        elif mapped_container is not None:
+            # it's Mapped[], but there's no "element", which means declarative
+            # did not actually do anything for this field.  this shouldn't
+            # happen.
+            # previously, this would occur because _scan_attributes would
+            # skip a field that's on an already mapped superclass, but it
+            # would still include it in the annotations, leading
+            # to issue #8718
+
+            assert False, "Mapped[] received without a mapping declaration"
+
+        else:
+            # plain dataclass field, not mapped.  Is only possible
+            # if __allow_unmapped__ is set up.  I can see this mode causing
+            # problems...
+            return (key, annotation)
+
+
+_DEFAULT_ATTRIBUTE_OPTIONS = _AttributeOptions(
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+)
+
+_DEFAULT_READONLY_ATTRIBUTE_OPTIONS = _AttributeOptions(
+    False,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+    _NoArg.NO_ARG,
+)
+
+
+class _DCAttributeOptions:
+    """mixin for descriptors or configurational objects that include dataclass
+    field options.
+
+    This includes :class:`.MapperProperty`, :class:`._MapsColumn` within
+    the ORM, but also includes :class:`.AssociationProxy` within ext.
+    Can in theory be used for other descriptors that serve a similar role
+    as association proxy.   (*maybe* hybrids, not sure yet.)
+
+    """
+
+    __slots__ = ()
+
+    _attribute_options: _AttributeOptions
+    """behavioral options for ORM-enabled Python attributes
+
+    .. versionadded:: 2.0
+
+    """
+
+    _has_dataclass_arguments: bool
+
+
+class _MapsColumns(_DCAttributeOptions, _MappedAttribute[_T]):
+    """interface for declarative-capable construct that delivers one or more
+    Column objects to the declarative process to be part of a Table.
+    """
+
+    __slots__ = ()
+
+    @property
+    def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]:
+        """return a MapperProperty to be assigned to the declarative mapping"""
+        raise NotImplementedError()
+
+    @property
+    def columns_to_assign(self) -> List[Tuple[Column[_T], int]]:
+        """A list of Column objects that should be declaratively added to the
+        new Table object.
+
+        """
+        raise NotImplementedError()
+
+
+# NOTE: MapperProperty needs to extend _MappedAttribute so that declarative
+# typing works, i.e. "Mapped[A] = relationship()".   This introduces an
+# inconvenience which is that all the MapperProperty objects are treated
+# as descriptors by typing tools, which are misled by this as assignment /
+# access to a descriptor attribute wants to move through __get__.
+# Therefore, references to MapperProperty as an instance variable, such
+# as in PropComparator, may have some special typing workarounds such as the
+# use of sqlalchemy.util.typing.DescriptorReference to avoid mis-interpretation
+# by typing tools
+@inspection._self_inspects
+class MapperProperty(
+    HasCacheKey,
+    _DCAttributeOptions,
+    _MappedAttribute[_T],
+    InspectionAttrInfo,
+    util.MemoizedSlots,
+):
+    """Represent a particular class attribute mapped by :class:`_orm.Mapper`.
+
+    The most common occurrences of :class:`.MapperProperty` are the
+    mapped :class:`_schema.Column`, which is represented in a mapping as
+    an instance of :class:`.ColumnProperty`,
+    and a reference to another class produced by :func:`_orm.relationship`,
+    represented in the mapping as an instance of
+    :class:`.Relationship`.
+
+    """
+
+    __slots__ = (
+        "_configure_started",
+        "_configure_finished",
+        "_attribute_options",
+        "_has_dataclass_arguments",
+        "parent",
+        "key",
+        "info",
+        "doc",
+    )
+
+    _cache_key_traversal: _TraverseInternalsType = [
+        ("parent", visitors.ExtendedInternalTraversal.dp_has_cache_key),
+        ("key", visitors.ExtendedInternalTraversal.dp_string),
+    ]
+
+    if not TYPE_CHECKING:
+        cascade = None
+
+    is_property = True
+    """Part of the InspectionAttr interface; states this object is a
+    mapper property.
+
+    """
+
+    comparator: PropComparator[_T]
+    """The :class:`_orm.PropComparator` instance that implements SQL
+    expression construction on behalf of this mapped attribute."""
+
+    key: str
+    """name of class attribute"""
+
+    parent: Mapper[Any]
+    """the :class:`.Mapper` managing this property."""
+
+    _is_relationship = False
+
+    _links_to_entity: bool
+    """True if this MapperProperty refers to a mapped entity.
+
+    Should only be True for Relationship, False for all others.
+
+    """
+
+    doc: Optional[str]
+    """optional documentation string"""
+
+    info: _InfoType
+    """Info dictionary associated with the object, allowing user-defined
+    data to be associated with this :class:`.InspectionAttr`.
+
+    The dictionary is generated when first accessed.  Alternatively,
+    it can be specified as a constructor argument to the
+    :func:`.column_property`, :func:`_orm.relationship`, or :func:`.composite`
+    functions.
+
+    .. seealso::
+
+        :attr:`.QueryableAttribute.info`
+
+        :attr:`.SchemaItem.info`
+
+    """
+
+    def _memoized_attr_info(self) -> _InfoType:
+        """Info dictionary associated with the object, allowing user-defined
+        data to be associated with this :class:`.InspectionAttr`.
+
+        The dictionary is generated when first accessed.  Alternatively,
+        it can be specified as a constructor argument to the
+        :func:`.column_property`, :func:`_orm.relationship`, or
+        :func:`.composite`
+        functions.
+
+        .. seealso::
+
+            :attr:`.QueryableAttribute.info`
+
+            :attr:`.SchemaItem.info`
+
+        """
+        return {}
+
+    def setup(
+        self,
+        context: ORMCompileState,
+        query_entity: _MapperEntity,
+        path: AbstractEntityRegistry,
+        adapter: Optional[ORMAdapter],
+        **kwargs: Any,
+    ) -> None:
+        """Called by Query for the purposes of constructing a SQL statement.
+
+        Each MapperProperty associated with the target mapper processes the
+        statement referenced by the query context, adding columns and/or
+        criterion as appropriate.
+
+        """
+
+    def create_row_processor(
+        self,
+        context: ORMCompileState,
+        query_entity: _MapperEntity,
+        path: AbstractEntityRegistry,
+        mapper: Mapper[Any],
+        result: Result[Any],
+        adapter: Optional[ORMAdapter],
+        populators: _PopulatorDict,
+    ) -> None:
+        """Produce row processing functions and append to the given
+        set of populators lists.
+
+        """
+
+    def cascade_iterator(
+        self,
+        type_: str,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        visited_states: Set[InstanceState[Any]],
+        halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None,
+    ) -> Iterator[
+        Tuple[object, Mapper[Any], InstanceState[Any], _InstanceDict]
+    ]:
+        """Iterate through instances related to the given instance for
+        a particular 'cascade', starting with this MapperProperty.
+
+        Return an iterator3-tuples (instance, mapper, state).
+
+        Note that the 'cascade' collection on this MapperProperty is
+        checked first for the given type before cascade_iterator is called.
+
+        This method typically only applies to Relationship.
+
+        """
+
+        return iter(())
+
+    def set_parent(self, parent: Mapper[Any], init: bool) -> None:
+        """Set the parent mapper that references this MapperProperty.
+
+        This method is overridden by some subclasses to perform extra
+        setup when the mapper is first known.
+
+        """
+        self.parent = parent
+
+    def instrument_class(self, mapper: Mapper[Any]) -> None:
+        """Hook called by the Mapper to the property to initiate
+        instrumentation of the class attribute managed by this
+        MapperProperty.
+
+        The MapperProperty here will typically call out to the
+        attributes module to set up an InstrumentedAttribute.
+
+        This step is the first of two steps to set up an InstrumentedAttribute,
+        and is called early in the mapper setup process.
+
+        The second step is typically the init_class_attribute step,
+        called from StrategizedProperty via the post_instrument_class()
+        hook.  This step assigns additional state to the InstrumentedAttribute
+        (specifically the "impl") which has been determined after the
+        MapperProperty has determined what kind of persistence
+        management it needs to do (e.g. scalar, object, collection, etc).
+
+        """
+
+    def __init__(
+        self,
+        attribute_options: Optional[_AttributeOptions] = None,
+        _assume_readonly_dc_attributes: bool = False,
+    ) -> None:
+        self._configure_started = False
+        self._configure_finished = False
+
+        if _assume_readonly_dc_attributes:
+            default_attrs = _DEFAULT_READONLY_ATTRIBUTE_OPTIONS
+        else:
+            default_attrs = _DEFAULT_ATTRIBUTE_OPTIONS
+
+        if attribute_options and attribute_options != default_attrs:
+            self._has_dataclass_arguments = True
+            self._attribute_options = attribute_options
+        else:
+            self._has_dataclass_arguments = False
+            self._attribute_options = default_attrs
+
+    def init(self) -> None:
+        """Called after all mappers are created to assemble
+        relationships between mappers and perform other post-mapper-creation
+        initialization steps.
+
+
+        """
+        self._configure_started = True
+        self.do_init()
+        self._configure_finished = True
+
+    @property
+    def class_attribute(self) -> InstrumentedAttribute[_T]:
+        """Return the class-bound descriptor corresponding to this
+        :class:`.MapperProperty`.
+
+        This is basically a ``getattr()`` call::
+
+            return getattr(self.parent.class_, self.key)
+
+        I.e. if this :class:`.MapperProperty` were named ``addresses``,
+        and the class to which it is mapped is ``User``, this sequence
+        is possible::
+
+            >>> from sqlalchemy import inspect
+            >>> mapper = inspect(User)
+            >>> addresses_property = mapper.attrs.addresses
+            >>> addresses_property.class_attribute is User.addresses
+            True
+            >>> User.addresses.property is addresses_property
+            True
+
+
+        """
+
+        return getattr(self.parent.class_, self.key)  # type: ignore
+
+    def do_init(self) -> None:
+        """Perform subclass-specific initialization post-mapper-creation
+        steps.
+
+        This is a template method called by the ``MapperProperty``
+        object's init() method.
+
+        """
+
+    def post_instrument_class(self, mapper: Mapper[Any]) -> None:
+        """Perform instrumentation adjustments that need to occur
+        after init() has completed.
+
+        The given Mapper is the Mapper invoking the operation, which
+        may not be the same Mapper as self.parent in an inheritance
+        scenario; however, Mapper will always at least be a sub-mapper of
+        self.parent.
+
+        This method is typically used by StrategizedProperty, which delegates
+        it to LoaderStrategy.init_class_attribute() to perform final setup
+        on the class-bound InstrumentedAttribute.
+
+        """
+
+    def merge(
+        self,
+        session: Session,
+        source_state: InstanceState[Any],
+        source_dict: _InstanceDict,
+        dest_state: InstanceState[Any],
+        dest_dict: _InstanceDict,
+        load: bool,
+        _recursive: Dict[Any, object],
+        _resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
+    ) -> None:
+        """Merge the attribute represented by this ``MapperProperty``
+        from source to destination object.
+
+        """
+
+    def __repr__(self) -> str:
+        return "<%s at 0x%x; %s>" % (
+            self.__class__.__name__,
+            id(self),
+            getattr(self, "key", "no key"),
+        )
+
+
+@inspection._self_inspects
+class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
+    r"""Defines SQL operations for ORM mapped attributes.
+
+    SQLAlchemy allows for operators to
+    be redefined at both the Core and ORM level.  :class:`.PropComparator`
+    is the base class of operator redefinition for ORM-level operations,
+    including those of :class:`.ColumnProperty`,
+    :class:`.Relationship`, and :class:`.Composite`.
+
+    User-defined subclasses of :class:`.PropComparator` may be created. The
+    built-in Python comparison and math operator methods, such as
+    :meth:`.operators.ColumnOperators.__eq__`,
+    :meth:`.operators.ColumnOperators.__lt__`, and
+    :meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
+    new operator behavior. The custom :class:`.PropComparator` is passed to
+    the :class:`.MapperProperty` instance via the ``comparator_factory``
+    argument. In each case,
+    the appropriate subclass of :class:`.PropComparator` should be used::
+
+        # definition of custom PropComparator subclasses
+
+        from sqlalchemy.orm.properties import (
+            ColumnProperty,
+            Composite,
+            Relationship,
+        )
+
+
+        class MyColumnComparator(ColumnProperty.Comparator):
+            def __eq__(self, other):
+                return self.__clause_element__() == other
+
+
+        class MyRelationshipComparator(Relationship.Comparator):
+            def any(self, expression):
+                "define the 'any' operation"
+                # ...
+
+
+        class MyCompositeComparator(Composite.Comparator):
+            def __gt__(self, other):
+                "redefine the 'greater than' operation"
+
+                return sql.and_(
+                    *[
+                        a > b
+                        for a, b in zip(
+                            self.__clause_element__().clauses,
+                            other.__composite_values__(),
+                        )
+                    ]
+                )
+
+
+        # application of custom PropComparator subclasses
+
+        from sqlalchemy.orm import column_property, relationship, composite
+        from sqlalchemy import Column, String
+
+
+        class SomeMappedClass(Base):
+            some_column = column_property(
+                Column("some_column", String),
+                comparator_factory=MyColumnComparator,
+            )
+
+            some_relationship = relationship(
+                SomeOtherClass, comparator_factory=MyRelationshipComparator
+            )
+
+            some_composite = composite(
+                Column("a", String),
+                Column("b", String),
+                comparator_factory=MyCompositeComparator,
+            )
+
+    Note that for column-level operator redefinition, it's usually
+    simpler to define the operators at the Core level, using the
+    :attr:`.TypeEngine.comparator_factory` attribute.  See
+    :ref:`types_operators` for more detail.
+
+    .. seealso::
+
+        :class:`.ColumnProperty.Comparator`
+
+        :class:`.Relationship.Comparator`
+
+        :class:`.Composite.Comparator`
+
+        :class:`.ColumnOperators`
+
+        :ref:`types_operators`
+
+        :attr:`.TypeEngine.comparator_factory`
+
+    """
+
+    __slots__ = "prop", "_parententity", "_adapt_to_entity"
+
+    __visit_name__ = "orm_prop_comparator"
+
+    _parententity: _InternalEntityType[Any]
+    _adapt_to_entity: Optional[AliasedInsp[Any]]
+    prop: RODescriptorReference[MapperProperty[_T_co]]
+
+    def __init__(
+        self,
+        prop: MapperProperty[_T],
+        parentmapper: _InternalEntityType[Any],
+        adapt_to_entity: Optional[AliasedInsp[Any]] = None,
+    ):
+        self.prop = prop
+        self._parententity = adapt_to_entity or parentmapper
+        self._adapt_to_entity = adapt_to_entity
+
+    @util.non_memoized_property
+    def property(self) -> MapperProperty[_T_co]:
+        """Return the :class:`.MapperProperty` associated with this
+        :class:`.PropComparator`.
+
+
+        Return values here will commonly be instances of
+        :class:`.ColumnProperty` or :class:`.Relationship`.
+
+
+        """
+        return self.prop
+
+    def __clause_element__(self) -> roles.ColumnsClauseRole:
+        raise NotImplementedError("%r" % self)
+
+    def _bulk_update_tuples(
+        self, value: Any
+    ) -> Sequence[Tuple[_DMLColumnArgument, Any]]:
+        """Receive a SQL expression that represents a value in the SET
+        clause of an UPDATE statement.
+
+        Return a tuple that can be passed to a :class:`_expression.Update`
+        construct.
+
+        """
+
+        return [(cast("_DMLColumnArgument", self.__clause_element__()), value)]
+
+    def adapt_to_entity(
+        self, adapt_to_entity: AliasedInsp[Any]
+    ) -> PropComparator[_T_co]:
+        """Return a copy of this PropComparator which will use the given
+        :class:`.AliasedInsp` to produce corresponding expressions.
+        """
+        return self.__class__(self.prop, self._parententity, adapt_to_entity)
+
+    @util.ro_non_memoized_property
+    def _parentmapper(self) -> Mapper[Any]:
+        """legacy; this is renamed to _parententity to be
+        compatible with QueryableAttribute."""
+        return self._parententity.mapper
+
+    def _criterion_exists(
+        self,
+        criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+        **kwargs: Any,
+    ) -> ColumnElement[Any]:
+        return self.prop.comparator._criterion_exists(criterion, **kwargs)
+
+    @util.ro_non_memoized_property
+    def adapter(self) -> Optional[_ORMAdapterProto]:
+        """Produce a callable that adapts column expressions
+        to suit an aliased version of this comparator.
+
+        """
+        if self._adapt_to_entity is None:
+            return None
+        else:
+            return self._adapt_to_entity._orm_adapt_element
+
+    @util.ro_non_memoized_property
+    def info(self) -> _InfoType:
+        return self.prop.info
+
+    @staticmethod
+    def _any_op(a: Any, b: Any, **kwargs: Any) -> Any:
+        return a.any(b, **kwargs)
+
+    @staticmethod
+    def _has_op(left: Any, other: Any, **kwargs: Any) -> Any:
+        return left.has(other, **kwargs)
+
+    @staticmethod
+    def _of_type_op(a: Any, class_: Any) -> Any:
+        return a.of_type(class_)
+
+    any_op = cast(operators.OperatorType, _any_op)
+    has_op = cast(operators.OperatorType, _has_op)
+    of_type_op = cast(operators.OperatorType, _of_type_op)
+
+    if typing.TYPE_CHECKING:
+
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]: ...
+
+        def reverse_operate(
+            self, op: OperatorType, other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]: ...
+
+    def of_type(self, class_: _EntityType[Any]) -> PropComparator[_T_co]:
+        r"""Redefine this object in terms of a polymorphic subclass,
+        :func:`_orm.with_polymorphic` construct, or :func:`_orm.aliased`
+        construct.
+
+        Returns a new PropComparator from which further criterion can be
+        evaluated.
+
+        e.g.::
+
+            query.join(Company.employees.of_type(Engineer)).filter(
+                Engineer.name == "foo"
+            )
+
+        :param \class_: a class or mapper indicating that criterion will be
+            against this specific subclass.
+
+        .. seealso::
+
+            :ref:`orm_queryguide_joining_relationships_aliased` - in the
+            :ref:`queryguide_toplevel`
+
+            :ref:`inheritance_of_type`
+
+        """
+
+        return self.operate(PropComparator.of_type_op, class_)  # type: ignore
+
+    def and_(
+        self, *criteria: _ColumnExpressionArgument[bool]
+    ) -> PropComparator[bool]:
+        """Add additional criteria to the ON clause that's represented by this
+        relationship attribute.
+
+        E.g.::
+
+
+            stmt = select(User).join(
+                User.addresses.and_(Address.email_address != "foo")
+            )
+
+            stmt = select(User).options(
+                joinedload(User.addresses.and_(Address.email_address != "foo"))
+            )
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :ref:`orm_queryguide_join_on_augmented`
+
+            :ref:`loader_option_criteria`
+
+            :func:`.with_loader_criteria`
+
+        """
+        return self.operate(operators.and_, *criteria)  # type: ignore
+
+    def any(
+        self,
+        criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+        **kwargs: Any,
+    ) -> ColumnElement[bool]:
+        r"""Return a SQL expression representing true if this element
+        references a member which meets the given criterion.
+
+        The usual implementation of ``any()`` is
+        :meth:`.Relationship.Comparator.any`.
+
+        :param criterion: an optional ClauseElement formulated against the
+          member class' table or attributes.
+
+        :param \**kwargs: key/value pairs corresponding to member class
+          attribute names which will be compared via equality to the
+          corresponding values.
+
+        """
+
+        return self.operate(PropComparator.any_op, criterion, **kwargs)
+
+    def has(
+        self,
+        criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+        **kwargs: Any,
+    ) -> ColumnElement[bool]:
+        r"""Return a SQL expression representing true if this element
+        references a member which meets the given criterion.
+
+        The usual implementation of ``has()`` is
+        :meth:`.Relationship.Comparator.has`.
+
+        :param criterion: an optional ClauseElement formulated against the
+          member class' table or attributes.
+
+        :param \**kwargs: key/value pairs corresponding to member class
+          attribute names which will be compared via equality to the
+          corresponding values.
+
+        """
+
+        return self.operate(PropComparator.has_op, criterion, **kwargs)
+
+
+class StrategizedProperty(MapperProperty[_T]):
+    """A MapperProperty which uses selectable strategies to affect
+    loading behavior.
+
+    There is a single strategy selected by default.  Alternate
+    strategies can be selected at Query time through the usage of
+    ``StrategizedOption`` objects via the Query.options() method.
+
+    The mechanics of StrategizedProperty are used for every Query
+    invocation for every mapped attribute participating in that Query,
+    to determine first how the attribute will be rendered in SQL
+    and secondly how the attribute will retrieve a value from a result
+    row and apply it to a mapped object.  The routines here are very
+    performance-critical.
+
+    """
+
+    __slots__ = (
+        "_strategies",
+        "strategy",
+        "_wildcard_token",
+        "_default_path_loader_key",
+        "strategy_key",
+    )
+    inherit_cache = True
+    strategy_wildcard_key: ClassVar[str]
+
+    strategy_key: _StrategyKey
+
+    _strategies: Dict[_StrategyKey, LoaderStrategy]
+
+    def _memoized_attr__wildcard_token(self) -> Tuple[str]:
+        return (
+            f"{self.strategy_wildcard_key}:{path_registry._WILDCARD_TOKEN}",
+        )
+
+    def _memoized_attr__default_path_loader_key(
+        self,
+    ) -> Tuple[str, Tuple[str]]:
+        return (
+            "loader",
+            (f"{self.strategy_wildcard_key}:{path_registry._DEFAULT_TOKEN}",),
+        )
+
+    def _get_context_loader(
+        self, context: ORMCompileState, path: AbstractEntityRegistry
+    ) -> Optional[_LoadElement]:
+        load: Optional[_LoadElement] = None
+
+        search_path = path[self]
+
+        # search among: exact match, "attr.*", "default" strategy
+        # if any.
+        for path_key in (
+            search_path._loader_key,
+            search_path._wildcard_path_loader_key,
+            search_path._default_path_loader_key,
+        ):
+            if path_key in context.attributes:
+                load = context.attributes[path_key]
+                break
+
+                # note that if strategy_options.Load is placing non-actionable
+                # objects in the context like defaultload(), we would
+                # need to continue the loop here if we got such an
+                # option as below.
+                # if load.strategy or load.local_opts:
+                #    break
+
+        return load
+
+    def _get_strategy(self, key: _StrategyKey) -> LoaderStrategy:
+        try:
+            return self._strategies[key]
+        except KeyError:
+            pass
+
+        # run outside to prevent transfer of exception context
+        cls = self._strategy_lookup(self, *key)
+        # this previously was setting self._strategies[cls], that's
+        # a bad idea; should use strategy key at all times because every
+        # strategy has multiple keys at this point
+        self._strategies[key] = strategy = cls(self, key)
+        return strategy
+
+    def setup(
+        self,
+        context: ORMCompileState,
+        query_entity: _MapperEntity,
+        path: AbstractEntityRegistry,
+        adapter: Optional[ORMAdapter],
+        **kwargs: Any,
+    ) -> None:
+        loader = self._get_context_loader(context, path)
+        if loader and loader.strategy:
+            strat = self._get_strategy(loader.strategy)
+        else:
+            strat = self.strategy
+        strat.setup_query(
+            context, query_entity, path, loader, adapter, **kwargs
+        )
+
+    def create_row_processor(
+        self,
+        context: ORMCompileState,
+        query_entity: _MapperEntity,
+        path: AbstractEntityRegistry,
+        mapper: Mapper[Any],
+        result: Result[Any],
+        adapter: Optional[ORMAdapter],
+        populators: _PopulatorDict,
+    ) -> None:
+        loader = self._get_context_loader(context, path)
+        if loader and loader.strategy:
+            strat = self._get_strategy(loader.strategy)
+        else:
+            strat = self.strategy
+        strat.create_row_processor(
+            context,
+            query_entity,
+            path,
+            loader,
+            mapper,
+            result,
+            adapter,
+            populators,
+        )
+
+    def do_init(self) -> None:
+        self._strategies = {}
+        self.strategy = self._get_strategy(self.strategy_key)
+
+    def post_instrument_class(self, mapper: Mapper[Any]) -> None:
+        if (
+            not self.parent.non_primary
+            and not mapper.class_manager._attr_has_impl(self.key)
+        ):
+            self.strategy.init_class_attribute(mapper)
+
+    _all_strategies: collections.defaultdict[
+        Type[MapperProperty[Any]], Dict[_StrategyKey, Type[LoaderStrategy]]
+    ] = collections.defaultdict(dict)
+
+    @classmethod
+    def strategy_for(cls, **kw: Any) -> Callable[[_TLS], _TLS]:
+        def decorate(dec_cls: _TLS) -> _TLS:
+            # ensure each subclass of the strategy has its
+            # own _strategy_keys collection
+            if "_strategy_keys" not in dec_cls.__dict__:
+                dec_cls._strategy_keys = []
+            key = tuple(sorted(kw.items()))
+            cls._all_strategies[cls][key] = dec_cls
+            dec_cls._strategy_keys.append(key)
+            return dec_cls
+
+        return decorate
+
+    @classmethod
+    def _strategy_lookup(
+        cls, requesting_property: MapperProperty[Any], *key: Any
+    ) -> Type[LoaderStrategy]:
+        requesting_property.parent._with_polymorphic_mappers
+
+        for prop_cls in cls.__mro__:
+            if prop_cls in cls._all_strategies:
+                if TYPE_CHECKING:
+                    assert issubclass(prop_cls, MapperProperty)
+                strategies = cls._all_strategies[prop_cls]
+                try:
+                    return strategies[key]
+                except KeyError:
+                    pass
+
+        for property_type, strats in cls._all_strategies.items():
+            if key in strats:
+                intended_property_type = property_type
+                actual_strategy = strats[key]
+                break
+        else:
+            intended_property_type = None
+            actual_strategy = None
+
+        raise orm_exc.LoaderStrategyException(
+            cls,
+            requesting_property,
+            intended_property_type,
+            actual_strategy,
+            key,
+        )
+
+
+class ORMOption(ExecutableOption):
+    """Base class for option objects that are passed to ORM queries.
+
+    These options may be consumed by :meth:`.Query.options`,
+    :meth:`.Select.options`, or in a more general sense by any
+    :meth:`.Executable.options` method.   They are interpreted at
+    statement compile time or execution time in modern use.  The
+    deprecated :class:`.MapperOption` is consumed at ORM query construction
+    time.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ()
+
+    _is_legacy_option = False
+
+    propagate_to_loaders = False
+    """if True, indicate this option should be carried along
+    to "secondary" SELECT statements that occur for relationship
+    lazy loaders as well as attribute load / refresh operations.
+
+    """
+
+    _is_core = False
+
+    _is_user_defined = False
+
+    _is_compile_state = False
+
+    _is_criteria_option = False
+
+    _is_strategy_option = False
+
+    def _adapt_cached_option_to_uncached_option(
+        self, context: QueryContext, uncached_opt: ORMOption
+    ) -> ORMOption:
+        """adapt this option to the "uncached" version of itself in a
+        loader strategy context.
+
+        given "self" which is an option from a cached query, as well as the
+        corresponding option from the uncached version of the same query,
+        return the option we should use in a new query, in the context of a
+        loader strategy being asked to load related rows on behalf of that
+        cached query, which is assumed to be building a new query based on
+        entities passed to us from the cached query.
+
+        Currently this routine chooses between "self" and "uncached" without
+        manufacturing anything new. If the option is itself a loader strategy
+        option which has a path, that path needs to match to the entities being
+        passed to us by the cached query, so the :class:`_orm.Load` subclass
+        overrides this to return "self". For all other options, we return the
+        uncached form which may have changing state, such as a
+        with_loader_criteria() option which will very often have new state.
+
+        This routine could in the future involve
+        generating a new option based on both inputs if use cases arise,
+        such as if with_loader_criteria() needed to match up to
+        ``AliasedClass`` instances given in the parent query.
+
+        However, longer term it might be better to restructure things such that
+        ``AliasedClass`` entities are always matched up on their cache key,
+        instead of identity, in things like paths and such, so that this whole
+        issue of "the uncached option does not match the entities" goes away.
+        However this would make ``PathRegistry`` more complicated and difficult
+        to debug as well as potentially less performant in that it would be
+        hashing enormous cache keys rather than a simple AliasedInsp. UNLESS,
+        we could get cache keys overall to be reliably hashed into something
+        like an md5 key.
+
+        .. versionadded:: 1.4.41
+
+        """
+        if uncached_opt is not None:
+            return uncached_opt
+        else:
+            return self
+
+
+class CompileStateOption(HasCacheKey, ORMOption):
+    """base for :class:`.ORMOption` classes that affect the compilation of
+    a SQL query and therefore need to be part of the cache key.
+
+    .. note::  :class:`.CompileStateOption` is generally non-public and
+       should not be used as a base class for user-defined options; instead,
+       use :class:`.UserDefinedOption`, which is easier to use as it does not
+       interact with ORM compilation internals or caching.
+
+    :class:`.CompileStateOption` defines an internal attribute
+    ``_is_compile_state=True`` which has the effect of the ORM compilation
+    routines for SELECT and other statements will call upon these options when
+    a SQL string is being compiled. As such, these classes implement
+    :class:`.HasCacheKey` and need to provide robust ``_cache_key_traversal``
+    structures.
+
+    The :class:`.CompileStateOption` class is used to implement the ORM
+    :class:`.LoaderOption` and :class:`.CriteriaOption` classes.
+
+    .. versionadded:: 1.4.28
+
+
+    """
+
+    __slots__ = ()
+
+    _is_compile_state = True
+
+    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+        """Apply a modification to a given :class:`.ORMCompileState`.
+
+        This method is part of the implementation of a particular
+        :class:`.CompileStateOption` and is only invoked internally
+        when an ORM query is compiled.
+
+        """
+
+    def process_compile_state_replaced_entities(
+        self,
+        compile_state: ORMCompileState,
+        mapper_entities: Sequence[_MapperEntity],
+    ) -> None:
+        """Apply a modification to a given :class:`.ORMCompileState`,
+        given entities that were replaced by with_only_columns() or
+        with_entities().
+
+        This method is part of the implementation of a particular
+        :class:`.CompileStateOption` and is only invoked internally
+        when an ORM query is compiled.
+
+        .. versionadded:: 1.4.19
+
+        """
+
+
+class LoaderOption(CompileStateOption):
+    """Describe a loader modification to an ORM statement at compilation time.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ()
+
+    def process_compile_state_replaced_entities(
+        self,
+        compile_state: ORMCompileState,
+        mapper_entities: Sequence[_MapperEntity],
+    ) -> None:
+        self.process_compile_state(compile_state)
+
+
+class CriteriaOption(CompileStateOption):
+    """Describe a WHERE criteria modification to an ORM statement at
+    compilation time.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ()
+
+    _is_criteria_option = True
+
+    def get_global_criteria(self, attributes: Dict[str, Any]) -> None:
+        """update additional entity criteria options in the given
+        attributes dictionary.
+
+        """
+
+
+class UserDefinedOption(ORMOption):
+    """Base class for a user-defined option that can be consumed from the
+    :meth:`.SessionEvents.do_orm_execute` event hook.
+
+    """
+
+    __slots__ = ("payload",)
+
+    _is_legacy_option = False
+
+    _is_user_defined = True
+
+    propagate_to_loaders = False
+    """if True, indicate this option should be carried along
+    to "secondary" Query objects produced during lazy loads
+    or refresh operations.
+
+    """
+
+    def __init__(self, payload: Optional[Any] = None):
+        self.payload = payload
+
+
+@util.deprecated_cls(
+    "1.4",
+    "The :class:`.MapperOption class is deprecated and will be removed "
+    "in a future release.   For "
+    "modifications to queries on a per-execution basis, use the "
+    ":class:`.UserDefinedOption` class to establish state within a "
+    ":class:`.Query` or other Core statement, then use the "
+    ":meth:`.SessionEvents.before_orm_execute` hook to consume them.",
+    constructor=None,
+)
+class MapperOption(ORMOption):
+    """Describe a modification to a Query"""
+
+    __slots__ = ()
+
+    _is_legacy_option = True
+
+    propagate_to_loaders = False
+    """if True, indicate this option should be carried along
+    to "secondary" Query objects produced during lazy loads
+    or refresh operations.
+
+    """
+
+    def process_query(self, query: Query[Any]) -> None:
+        """Apply a modification to the given :class:`_query.Query`."""
+
+    def process_query_conditionally(self, query: Query[Any]) -> None:
+        """same as process_query(), except that this option may not
+        apply to the given query.
+
+        This is typically applied during a lazy load or scalar refresh
+        operation to propagate options stated in the original Query to the
+        new Query being used for the load.  It occurs for those options that
+        specify propagate_to_loaders=True.
+
+        """
+
+        self.process_query(query)
+
+
+class LoaderStrategy:
+    """Describe the loading behavior of a StrategizedProperty object.
+
+    The ``LoaderStrategy`` interacts with the querying process in three
+    ways:
+
+    * it controls the configuration of the ``InstrumentedAttribute``
+      placed on a class to handle the behavior of the attribute.  this
+      may involve setting up class-level callable functions to fire
+      off a select operation when the attribute is first accessed
+      (i.e. a lazy load)
+
+    * it processes the ``QueryContext`` at statement construction time,
+      where it can modify the SQL statement that is being produced.
+      For example, simple column attributes will add their represented
+      column to the list of selected columns, a joined eager loader
+      may establish join clauses to add to the statement.
+
+    * It produces "row processor" functions at result fetching time.
+      These "row processor" functions populate a particular attribute
+      on a particular mapped instance.
+
+    """
+
+    __slots__ = (
+        "parent_property",
+        "is_class_level",
+        "parent",
+        "key",
+        "strategy_key",
+        "strategy_opts",
+    )
+
+    _strategy_keys: ClassVar[List[_StrategyKey]]
+
+    def __init__(
+        self, parent: MapperProperty[Any], strategy_key: _StrategyKey
+    ):
+        self.parent_property = parent
+        self.is_class_level = False
+        self.parent = self.parent_property.parent
+        self.key = self.parent_property.key
+        self.strategy_key = strategy_key
+        self.strategy_opts = dict(strategy_key)
+
+    def init_class_attribute(self, mapper: Mapper[Any]) -> None:
+        pass
+
+    def setup_query(
+        self,
+        compile_state: ORMCompileState,
+        query_entity: _MapperEntity,
+        path: AbstractEntityRegistry,
+        loadopt: Optional[_LoadElement],
+        adapter: Optional[ORMAdapter],
+        **kwargs: Any,
+    ) -> None:
+        """Establish column and other state for a given QueryContext.
+
+        This method fulfills the contract specified by MapperProperty.setup().
+
+        StrategizedProperty delegates its setup() method
+        directly to this method.
+
+        """
+
+    def create_row_processor(
+        self,
+        context: ORMCompileState,
+        query_entity: _MapperEntity,
+        path: AbstractEntityRegistry,
+        loadopt: Optional[_LoadElement],
+        mapper: Mapper[Any],
+        result: Result[Any],
+        adapter: Optional[ORMAdapter],
+        populators: _PopulatorDict,
+    ) -> None:
+        """Establish row processing functions for a given QueryContext.
+
+        This method fulfills the contract specified by
+        MapperProperty.create_row_processor().
+
+        StrategizedProperty delegates its create_row_processor() method
+        directly to this method.
+
+        """
+
+    def __str__(self) -> str:
+        return str(self.parent_property)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py
new file mode 100644
index 00000000..679286f5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/loading.py
@@ -0,0 +1,1682 @@
+# orm/loading.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""private module containing functions used to convert database
+rows into object instances and associated state.
+
+the functions here are called primarily by Query, Mapper,
+as well as some of the attribute loading strategies.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import attributes
+from . import exc as orm_exc
+from . import path_registry
+from .base import _DEFER_FOR_STATE
+from .base import _RAISE_FOR_STATE
+from .base import _SET_DEFERRED_EXPIRED
+from .base import PassiveFlag
+from .context import FromStatement
+from .context import ORMCompileState
+from .context import QueryContext
+from .util import _none_set
+from .util import state_str
+from .. import exc as sa_exc
+from .. import util
+from ..engine import result_tuple
+from ..engine.result import ChunkedIteratorResult
+from ..engine.result import FrozenResult
+from ..engine.result import SimpleResultMetaData
+from ..sql import select
+from ..sql import util as sql_util
+from ..sql.selectable import ForUpdateArg
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+from ..sql.selectable import SelectState
+from ..util import EMPTY_DICT
+
+if TYPE_CHECKING:
+    from ._typing import _IdentityKeyType
+    from .base import LoaderCallableStatus
+    from .interfaces import ORMOption
+    from .mapper import Mapper
+    from .query import Query
+    from .session import Session
+    from .state import InstanceState
+    from ..engine.cursor import CursorResult
+    from ..engine.interfaces import _ExecuteOptions
+    from ..engine.result import Result
+    from ..sql import Select
+
+_T = TypeVar("_T", bound=Any)
+_O = TypeVar("_O", bound=object)
+_new_runid = util.counter()
+
+
+_PopulatorDict = Dict[str, List[Tuple[str, Any]]]
+
+
+def instances(cursor: CursorResult[Any], context: QueryContext) -> Result[Any]:
+    """Return a :class:`.Result` given an ORM query context.
+
+    :param cursor: a :class:`.CursorResult`, generated by a statement
+     which came from :class:`.ORMCompileState`
+
+    :param context: a :class:`.QueryContext` object
+
+    :return: a :class:`.Result` object representing ORM results
+
+    .. versionchanged:: 1.4 The instances() function now uses
+       :class:`.Result` objects and has an all new interface.
+
+    """
+
+    context.runid = _new_runid()
+
+    if context.top_level_context:
+        is_top_level = False
+        context.post_load_paths = context.top_level_context.post_load_paths
+    else:
+        is_top_level = True
+        context.post_load_paths = {}
+
+    compile_state = context.compile_state
+    filtered = compile_state._has_mapper_entities
+    single_entity = (
+        not context.load_options._only_return_tuples
+        and len(compile_state._entities) == 1
+        and compile_state._entities[0].supports_single_entity
+    )
+
+    try:
+        (process, labels, extra) = list(
+            zip(
+                *[
+                    query_entity.row_processor(context, cursor)
+                    for query_entity in context.compile_state._entities
+                ]
+            )
+        )
+
+        if context.yield_per and (
+            context.loaders_require_buffering
+            or context.loaders_require_uniquing
+        ):
+            raise sa_exc.InvalidRequestError(
+                "Can't use yield_per with eager loaders that require uniquing "
+                "or row buffering, e.g. joinedload() against collections "
+                "or subqueryload().  Consider the selectinload() strategy "
+                "for better flexibility in loading objects."
+            )
+
+    except Exception:
+        with util.safe_reraise():
+            cursor.close()
+
+    def _no_unique(entry):
+        raise sa_exc.InvalidRequestError(
+            "Can't use the ORM yield_per feature in conjunction with unique()"
+        )
+
+    def _not_hashable(datatype, *, legacy=False, uncertain=False):
+        if not legacy:
+
+            def go(obj):
+                if uncertain:
+                    try:
+                        return hash(obj)
+                    except:
+                        pass
+
+                raise sa_exc.InvalidRequestError(
+                    "Can't apply uniqueness to row tuple containing value of "
+                    f"""type {datatype!r}; {
+                        'the values returned appear to be'
+                        if uncertain
+                        else 'this datatype produces'
+                    } non-hashable values"""
+                )
+
+            return go
+        elif not uncertain:
+            return id
+        else:
+            _use_id = False
+
+            def go(obj):
+                nonlocal _use_id
+
+                if not _use_id:
+                    try:
+                        return hash(obj)
+                    except:
+                        pass
+
+                    # in #10459, we considered using a warning here, however
+                    # as legacy query uses result.unique() in all cases, this
+                    # would lead to too many warning cases.
+                    _use_id = True
+
+                return id(obj)
+
+            return go
+
+    unique_filters = [
+        (
+            _no_unique
+            if context.yield_per
+            else (
+                _not_hashable(
+                    ent.column.type,  # type: ignore
+                    legacy=context.load_options._legacy_uniquing,
+                    uncertain=ent._null_column_type,
+                )
+                if (
+                    not ent.use_id_for_hash
+                    and (ent._non_hashable_value or ent._null_column_type)
+                )
+                else id if ent.use_id_for_hash else None
+            )
+        )
+        for ent in context.compile_state._entities
+    ]
+
+    row_metadata = SimpleResultMetaData(
+        labels, extra, _unique_filters=unique_filters
+    )
+
+    def chunks(size):  # type: ignore
+        while True:
+            yield_per = size
+
+            context.partials = {}
+
+            if yield_per:
+                fetch = cursor.fetchmany(yield_per)
+
+                if not fetch:
+                    break
+            else:
+                fetch = cursor._raw_all_rows()
+
+            if single_entity:
+                proc = process[0]
+                rows = [proc(row) for row in fetch]
+            else:
+                rows = [
+                    tuple([proc(row) for proc in process]) for row in fetch
+                ]
+
+            # if we are the originating load from a query, meaning we
+            # aren't being called as a result of a nested "post load",
+            # iterate through all the collected post loaders and fire them
+            # off.  Previously this used to work recursively, however that
+            # prevented deeply nested structures from being loadable
+            if is_top_level:
+                if yield_per:
+                    # if using yield per, memoize the state of the
+                    # collection so that it can be restored
+                    top_level_post_loads = list(
+                        context.post_load_paths.items()
+                    )
+
+                while context.post_load_paths:
+                    post_loads = list(context.post_load_paths.items())
+                    context.post_load_paths.clear()
+                    for path, post_load in post_loads:
+                        post_load.invoke(context, path)
+
+                if yield_per:
+                    context.post_load_paths.clear()
+                    context.post_load_paths.update(top_level_post_loads)
+
+            yield rows
+
+            if not yield_per:
+                break
+
+    if context.execution_options.get("prebuffer_rows", False):
+        # this is a bit of a hack at the moment.
+        # I would rather have some option in the result to pre-buffer
+        # internally.
+        _prebuffered = list(chunks(None))
+
+        def chunks(size):
+            return iter(_prebuffered)
+
+    result = ChunkedIteratorResult(
+        row_metadata,
+        chunks,
+        source_supports_scalars=single_entity,
+        raw=cursor,
+        dynamic_yield_per=cursor.context._is_server_side,
+    )
+
+    # filtered and single_entity are used to indicate to legacy Query that the
+    # query has ORM entities, so legacy deduping and scalars should be called
+    # on the result.
+    result._attributes = result._attributes.union(
+        dict(filtered=filtered, is_single_entity=single_entity)
+    )
+
+    # multi_row_eager_loaders OTOH is specific to joinedload.
+    if context.compile_state.multi_row_eager_loaders:
+
+        def require_unique(obj):
+            raise sa_exc.InvalidRequestError(
+                "The unique() method must be invoked on this Result, "
+                "as it contains results that include joined eager loads "
+                "against collections"
+            )
+
+        result._unique_filter_state = (None, require_unique)
+
+    if context.yield_per:
+        result.yield_per(context.yield_per)
+
+    return result
+
+
+@util.preload_module("sqlalchemy.orm.context")
+def merge_frozen_result(session, statement, frozen_result, load=True):
+    """Merge a :class:`_engine.FrozenResult` back into a :class:`_orm.Session`,
+    returning a new :class:`_engine.Result` object with :term:`persistent`
+    objects.
+
+    See the section :ref:`do_orm_execute_re_executing` for an example.
+
+    .. seealso::
+
+        :ref:`do_orm_execute_re_executing`
+
+        :meth:`_engine.Result.freeze`
+
+        :class:`_engine.FrozenResult`
+
+    """
+    querycontext = util.preloaded.orm_context
+
+    if load:
+        # flush current contents if we expect to load data
+        session._autoflush()
+
+    ctx = querycontext.ORMSelectCompileState._create_entities_collection(
+        statement, legacy=False
+    )
+
+    autoflush = session.autoflush
+    try:
+        session.autoflush = False
+        mapped_entities = [
+            i
+            for i, e in enumerate(ctx._entities)
+            if isinstance(e, querycontext._MapperEntity)
+        ]
+        keys = [ent._label_name for ent in ctx._entities]
+
+        keyed_tuple = result_tuple(
+            keys, [ent._extra_entities for ent in ctx._entities]
+        )
+
+        result = []
+        for newrow in frozen_result.rewrite_rows():
+            for i in mapped_entities:
+                if newrow[i] is not None:
+                    newrow[i] = session._merge(
+                        attributes.instance_state(newrow[i]),
+                        attributes.instance_dict(newrow[i]),
+                        load=load,
+                        _recursive={},
+                        _resolve_conflict_map={},
+                    )
+
+            result.append(keyed_tuple(newrow))
+
+        return frozen_result.with_new_rows(result)
+    finally:
+        session.autoflush = autoflush
+
+
+@util.became_legacy_20(
+    ":func:`_orm.merge_result`",
+    alternative="The function as well as the method on :class:`_orm.Query` "
+    "is superseded by the :func:`_orm.merge_frozen_result` function.",
+)
+@util.preload_module("sqlalchemy.orm.context")
+def merge_result(
+    query: Query[Any],
+    iterator: Union[FrozenResult, Iterable[Sequence[Any]], Iterable[object]],
+    load: bool = True,
+) -> Union[FrozenResult, Iterable[Any]]:
+    """Merge a result into the given :class:`.Query` object's Session.
+
+    See :meth:`_orm.Query.merge_result` for top-level documentation on this
+    function.
+
+    """
+
+    querycontext = util.preloaded.orm_context
+
+    session = query.session
+    if load:
+        # flush current contents if we expect to load data
+        session._autoflush()
+
+    # TODO: need test coverage and documentation for the FrozenResult
+    # use case.
+    if isinstance(iterator, FrozenResult):
+        frozen_result = iterator
+        iterator = iter(frozen_result.data)
+    else:
+        frozen_result = None
+
+    ctx = querycontext.ORMSelectCompileState._create_entities_collection(
+        query, legacy=True
+    )
+
+    autoflush = session.autoflush
+    try:
+        session.autoflush = False
+        single_entity = not frozen_result and len(ctx._entities) == 1
+
+        if single_entity:
+            if isinstance(ctx._entities[0], querycontext._MapperEntity):
+                result = [
+                    session._merge(
+                        attributes.instance_state(instance),
+                        attributes.instance_dict(instance),
+                        load=load,
+                        _recursive={},
+                        _resolve_conflict_map={},
+                    )
+                    for instance in iterator
+                ]
+            else:
+                result = list(iterator)
+        else:
+            mapped_entities = [
+                i
+                for i, e in enumerate(ctx._entities)
+                if isinstance(e, querycontext._MapperEntity)
+            ]
+            result = []
+            keys = [ent._label_name for ent in ctx._entities]
+
+            keyed_tuple = result_tuple(
+                keys, [ent._extra_entities for ent in ctx._entities]
+            )
+
+            for row in iterator:
+                newrow = list(row)
+                for i in mapped_entities:
+                    if newrow[i] is not None:
+                        newrow[i] = session._merge(
+                            attributes.instance_state(newrow[i]),
+                            attributes.instance_dict(newrow[i]),
+                            load=load,
+                            _recursive={},
+                            _resolve_conflict_map={},
+                        )
+                result.append(keyed_tuple(newrow))
+
+        if frozen_result:
+            return frozen_result.with_new_rows(result)
+        else:
+            return iter(result)
+    finally:
+        session.autoflush = autoflush
+
+
+def get_from_identity(
+    session: Session,
+    mapper: Mapper[_O],
+    key: _IdentityKeyType[_O],
+    passive: PassiveFlag,
+) -> Union[LoaderCallableStatus, Optional[_O]]:
+    """Look up the given key in the given session's identity map,
+    check the object for expired state if found.
+
+    """
+    instance = session.identity_map.get(key)
+    if instance is not None:
+        state = attributes.instance_state(instance)
+
+        if mapper.inherits and not state.mapper.isa(mapper):
+            return attributes.PASSIVE_CLASS_MISMATCH
+
+        # expired - ensure it still exists
+        if state.expired:
+            if not passive & attributes.SQL_OK:
+                # TODO: no coverage here
+                return attributes.PASSIVE_NO_RESULT
+            elif not passive & attributes.RELATED_OBJECT_OK:
+                # this mode is used within a flush and the instance's
+                # expired state will be checked soon enough, if necessary.
+                # also used by immediateloader for a mutually-dependent
+                # o2m->m2m load, :ticket:`6301`
+                return instance
+            try:
+                state._load_expired(state, passive)
+            except orm_exc.ObjectDeletedError:
+                session._remove_newly_deleted([state])
+                return None
+        return instance
+    else:
+        return None
+
+
+def load_on_ident(
+    session: Session,
+    statement: Union[Select, FromStatement],
+    key: Optional[_IdentityKeyType],
+    *,
+    load_options: Optional[Sequence[ORMOption]] = None,
+    refresh_state: Optional[InstanceState[Any]] = None,
+    with_for_update: Optional[ForUpdateArg] = None,
+    only_load_props: Optional[Iterable[str]] = None,
+    no_autoflush: bool = False,
+    bind_arguments: Mapping[str, Any] = util.EMPTY_DICT,
+    execution_options: _ExecuteOptions = util.EMPTY_DICT,
+    require_pk_cols: bool = False,
+    is_user_refresh: bool = False,
+):
+    """Load the given identity key from the database."""
+    if key is not None:
+        ident = key[1]
+        identity_token = key[2]
+    else:
+        ident = identity_token = None
+
+    return load_on_pk_identity(
+        session,
+        statement,
+        ident,
+        load_options=load_options,
+        refresh_state=refresh_state,
+        with_for_update=with_for_update,
+        only_load_props=only_load_props,
+        identity_token=identity_token,
+        no_autoflush=no_autoflush,
+        bind_arguments=bind_arguments,
+        execution_options=execution_options,
+        require_pk_cols=require_pk_cols,
+        is_user_refresh=is_user_refresh,
+    )
+
+
+def load_on_pk_identity(
+    session: Session,
+    statement: Union[Select, FromStatement],
+    primary_key_identity: Optional[Tuple[Any, ...]],
+    *,
+    load_options: Optional[Sequence[ORMOption]] = None,
+    refresh_state: Optional[InstanceState[Any]] = None,
+    with_for_update: Optional[ForUpdateArg] = None,
+    only_load_props: Optional[Iterable[str]] = None,
+    identity_token: Optional[Any] = None,
+    no_autoflush: bool = False,
+    bind_arguments: Mapping[str, Any] = util.EMPTY_DICT,
+    execution_options: _ExecuteOptions = util.EMPTY_DICT,
+    require_pk_cols: bool = False,
+    is_user_refresh: bool = False,
+):
+    """Load the given primary key identity from the database."""
+
+    query = statement
+    q = query._clone()
+
+    assert not q._is_lambda_element
+
+    if load_options is None:
+        load_options = QueryContext.default_load_options
+
+    if (
+        statement._compile_options
+        is SelectState.default_select_compile_options
+    ):
+        compile_options = ORMCompileState.default_compile_options
+    else:
+        compile_options = statement._compile_options
+
+    if primary_key_identity is not None:
+        mapper = query._propagate_attrs["plugin_subject"]
+
+        (_get_clause, _get_params) = mapper._get_clause
+
+        # None present in ident - turn those comparisons
+        # into "IS NULL"
+        if None in primary_key_identity:
+            nones = {
+                _get_params[col].key
+                for col, value in zip(mapper.primary_key, primary_key_identity)
+                if value is None
+            }
+
+            _get_clause = sql_util.adapt_criterion_to_null(_get_clause, nones)
+
+            if len(nones) == len(primary_key_identity):
+                util.warn(
+                    "fully NULL primary key identity cannot load any "
+                    "object.  This condition may raise an error in a future "
+                    "release."
+                )
+
+        q._where_criteria = (
+            sql_util._deep_annotate(_get_clause, {"_orm_adapt": True}),
+        )
+
+        params = {
+            _get_params[primary_key].key: id_val
+            for id_val, primary_key in zip(
+                primary_key_identity, mapper.primary_key
+            )
+        }
+    else:
+        params = None
+
+    if with_for_update is not None:
+        version_check = True
+        q._for_update_arg = with_for_update
+    elif query._for_update_arg is not None:
+        version_check = True
+        q._for_update_arg = query._for_update_arg
+    else:
+        version_check = False
+
+    if require_pk_cols and only_load_props:
+        if not refresh_state:
+            raise sa_exc.ArgumentError(
+                "refresh_state is required when require_pk_cols is present"
+            )
+
+        refresh_state_prokeys = refresh_state.mapper._primary_key_propkeys
+        has_changes = {
+            key
+            for key in refresh_state_prokeys.difference(only_load_props)
+            if refresh_state.attrs[key].history.has_changes()
+        }
+        if has_changes:
+            # raise if pending pk changes are present.
+            # technically, this could be limited to the case where we have
+            # relationships in the only_load_props collection to be refreshed
+            # also (and only ones that have a secondary eager loader, at that).
+            # however, the error is in place across the board so that behavior
+            # here is easier to predict.   The use case it prevents is one
+            # of mutating PK attrs, leaving them unflushed,
+            # calling session.refresh(), and expecting those attrs to remain
+            # still unflushed.   It seems likely someone doing all those
+            # things would be better off having the PK attributes flushed
+            # to the database before tinkering like that (session.refresh() is
+            # tinkering).
+            raise sa_exc.InvalidRequestError(
+                f"Please flush pending primary key changes on "
+                "attributes "
+                f"{has_changes} for mapper {refresh_state.mapper} before "
+                "proceeding with a refresh"
+            )
+
+        # overall, the ORM has no internal flow right now for "dont load the
+        # primary row of an object at all, but fire off
+        # selectinload/subqueryload/immediateload for some relationships".
+        # It would probably be a pretty big effort to add such a flow.  So
+        # here, the case for #8703 is introduced; user asks to refresh some
+        # relationship attributes only which are
+        # selectinload/subqueryload/immediateload/ etc. (not joinedload).
+        # ORM complains there's no columns in the primary row to load.
+        # So here, we just add the PK cols if that
+        # case is detected, so that there is a SELECT emitted for the primary
+        # row.
+        #
+        # Let's just state right up front, for this one little case,
+        # the ORM here is adding a whole extra SELECT just to satisfy
+        # limitations in the internal flow.  This is really not a thing
+        # SQLAlchemy finds itself doing like, ever, obviously, we are
+        # constantly working to *remove* SELECTs we don't need.   We
+        # rationalize this for now based on 1. session.refresh() is not
+        # commonly used 2. session.refresh() with only relationship attrs is
+        # even less commonly used 3. the SELECT in question is very low
+        # latency.
+        #
+        # to add the flow to not include the SELECT, the quickest way
+        # might be to just manufacture a single-row result set to send off to
+        # instances(), but we'd have to weave that into context.py and all
+        # that.  For 2.0.0, we have enough big changes to navigate for now.
+        #
+        mp = refresh_state.mapper._props
+        for p in only_load_props:
+            if mp[p]._is_relationship:
+                only_load_props = refresh_state_prokeys.union(only_load_props)
+                break
+
+    if refresh_state and refresh_state.load_options:
+        compile_options += {"_current_path": refresh_state.load_path.parent}
+        q = q.options(*refresh_state.load_options)
+
+    new_compile_options, load_options = _set_get_options(
+        compile_options,
+        load_options,
+        version_check=version_check,
+        only_load_props=only_load_props,
+        refresh_state=refresh_state,
+        identity_token=identity_token,
+        is_user_refresh=is_user_refresh,
+    )
+
+    q._compile_options = new_compile_options
+    q._order_by = None
+
+    if no_autoflush:
+        load_options += {"_autoflush": False}
+
+    execution_options = util.EMPTY_DICT.merge_with(
+        execution_options, {"_sa_orm_load_options": load_options}
+    )
+    result = (
+        session.execute(
+            q,
+            params=params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+        )
+        .unique()
+        .scalars()
+    )
+
+    try:
+        return result.one()
+    except orm_exc.NoResultFound:
+        return None
+
+
+def _set_get_options(
+    compile_opt,
+    load_opt,
+    populate_existing=None,
+    version_check=None,
+    only_load_props=None,
+    refresh_state=None,
+    identity_token=None,
+    is_user_refresh=None,
+):
+    compile_options = {}
+    load_options = {}
+    if version_check:
+        load_options["_version_check"] = version_check
+    if populate_existing:
+        load_options["_populate_existing"] = populate_existing
+    if refresh_state:
+        load_options["_refresh_state"] = refresh_state
+        compile_options["_for_refresh_state"] = True
+    if only_load_props:
+        compile_options["_only_load_props"] = frozenset(only_load_props)
+    if identity_token:
+        load_options["_identity_token"] = identity_token
+
+    if is_user_refresh:
+        load_options["_is_user_refresh"] = is_user_refresh
+    if load_options:
+        load_opt += load_options
+    if compile_options:
+        compile_opt += compile_options
+
+    return compile_opt, load_opt
+
+
+def _setup_entity_query(
+    compile_state,
+    mapper,
+    query_entity,
+    path,
+    adapter,
+    column_collection,
+    with_polymorphic=None,
+    only_load_props=None,
+    polymorphic_discriminator=None,
+    **kw,
+):
+    if with_polymorphic:
+        poly_properties = mapper._iterate_polymorphic_properties(
+            with_polymorphic
+        )
+    else:
+        poly_properties = mapper._polymorphic_properties
+
+    quick_populators = {}
+
+    path.set(compile_state.attributes, "memoized_setups", quick_populators)
+
+    # for the lead entities in the path, e.g. not eager loads, and
+    # assuming a user-passed aliased class, e.g. not a from_self() or any
+    # implicit aliasing, don't add columns to the SELECT that aren't
+    # in the thing that's aliased.
+    check_for_adapt = adapter and len(path) == 1 and path[-1].is_aliased_class
+
+    for value in poly_properties:
+        if only_load_props and value.key not in only_load_props:
+            continue
+        value.setup(
+            compile_state,
+            query_entity,
+            path,
+            adapter,
+            only_load_props=only_load_props,
+            column_collection=column_collection,
+            memoized_populators=quick_populators,
+            check_for_adapt=check_for_adapt,
+            **kw,
+        )
+
+    if (
+        polymorphic_discriminator is not None
+        and polymorphic_discriminator is not mapper.polymorphic_on
+    ):
+        if adapter:
+            pd = adapter.columns[polymorphic_discriminator]
+        else:
+            pd = polymorphic_discriminator
+        column_collection.append(pd)
+
+
+def _warn_for_runid_changed(state):
+    util.warn(
+        "Loading context for %s has changed within a load/refresh "
+        "handler, suggesting a row refresh operation took place. If this "
+        "event handler is expected to be "
+        "emitting row refresh operations within an existing load or refresh "
+        "operation, set restore_load_context=True when establishing the "
+        "listener to ensure the context remains unchanged when the event "
+        "handler completes." % (state_str(state),)
+    )
+
+
+def _instance_processor(
+    query_entity,
+    mapper,
+    context,
+    result,
+    path,
+    adapter,
+    only_load_props=None,
+    refresh_state=None,
+    polymorphic_discriminator=None,
+    _polymorphic_from=None,
+):
+    """Produce a mapper level row processor callable
+    which processes rows into mapped instances."""
+
+    # note that this method, most of which exists in a closure
+    # called _instance(), resists being broken out, as
+    # attempts to do so tend to add significant function
+    # call overhead.  _instance() is the most
+    # performance-critical section in the whole ORM.
+
+    identity_class = mapper._identity_class
+    compile_state = context.compile_state
+
+    # look for "row getter" functions that have been assigned along
+    # with the compile state that were cached from a previous load.
+    # these are operator.itemgetter() objects that each will extract a
+    # particular column from each row.
+
+    getter_key = ("getters", mapper)
+    getters = path.get(compile_state.attributes, getter_key, None)
+
+    if getters is None:
+        # no getters, so go through a list of attributes we are loading for,
+        # and the ones that are column based will have already put information
+        # for us in another collection "memoized_setups", which represents the
+        # output of the LoaderStrategy.setup_query() method.  We can just as
+        # easily call LoaderStrategy.create_row_processor for each, but by
+        # getting it all at once from setup_query we save another method call
+        # per attribute.
+        props = mapper._prop_set
+        if only_load_props is not None:
+            props = props.intersection(
+                mapper._props[k] for k in only_load_props
+            )
+
+        quick_populators = path.get(
+            context.attributes, "memoized_setups", EMPTY_DICT
+        )
+
+        todo = []
+        cached_populators = {
+            "new": [],
+            "quick": [],
+            "deferred": [],
+            "expire": [],
+            "existing": [],
+            "eager": [],
+        }
+
+        if refresh_state is None:
+            # we can also get the "primary key" tuple getter function
+            pk_cols = mapper.primary_key
+
+            if adapter:
+                pk_cols = [adapter.columns[c] for c in pk_cols]
+            primary_key_getter = result._tuple_getter(pk_cols)
+        else:
+            primary_key_getter = None
+
+        getters = {
+            "cached_populators": cached_populators,
+            "todo": todo,
+            "primary_key_getter": primary_key_getter,
+        }
+        for prop in props:
+            if prop in quick_populators:
+                # this is an inlined path just for column-based attributes.
+                col = quick_populators[prop]
+                if col is _DEFER_FOR_STATE:
+                    cached_populators["new"].append(
+                        (prop.key, prop._deferred_column_loader)
+                    )
+                elif col is _SET_DEFERRED_EXPIRED:
+                    # note that in this path, we are no longer
+                    # searching in the result to see if the column might
+                    # be present in some unexpected way.
+                    cached_populators["expire"].append((prop.key, False))
+                elif col is _RAISE_FOR_STATE:
+                    cached_populators["new"].append(
+                        (prop.key, prop._raise_column_loader)
+                    )
+                else:
+                    getter = None
+                    if adapter:
+                        # this logic had been removed for all 1.4 releases
+                        # up until 1.4.18; the adapter here is particularly
+                        # the compound eager adapter which isn't accommodated
+                        # in the quick_populators right now.  The "fallback"
+                        # logic below instead took over in many more cases
+                        # until issue #6596 was identified.
+
+                        # note there is still an issue where this codepath
+                        # produces no "getter" for cases where a joined-inh
+                        # mapping includes a labeled column property, meaning
+                        # KeyError is caught internally and we fall back to
+                        # _getter(col), which works anyway.   The adapter
+                        # here for joined inh without any aliasing might not
+                        # be useful.  Tests which see this include
+                        # test.orm.inheritance.test_basic ->
+                        # EagerTargetingTest.test_adapt_stringency
+                        # OptimizedLoadTest.test_column_expression_joined
+                        # PolymorphicOnNotLocalTest.test_polymorphic_on_column_prop  # noqa: E501
+                        #
+
+                        adapted_col = adapter.columns[col]
+                        if adapted_col is not None:
+                            getter = result._getter(adapted_col, False)
+                    if not getter:
+                        getter = result._getter(col, False)
+                    if getter:
+                        cached_populators["quick"].append((prop.key, getter))
+                    else:
+                        # fall back to the ColumnProperty itself, which
+                        # will iterate through all of its columns
+                        # to see if one fits
+                        prop.create_row_processor(
+                            context,
+                            query_entity,
+                            path,
+                            mapper,
+                            result,
+                            adapter,
+                            cached_populators,
+                        )
+            else:
+                # loader strategies like subqueryload, selectinload,
+                # joinedload, basically relationships, these need to interact
+                # with the context each time to work correctly.
+                todo.append(prop)
+
+        path.set(compile_state.attributes, getter_key, getters)
+
+    cached_populators = getters["cached_populators"]
+
+    populators = {key: list(value) for key, value in cached_populators.items()}
+    for prop in getters["todo"]:
+        prop.create_row_processor(
+            context, query_entity, path, mapper, result, adapter, populators
+        )
+
+    propagated_loader_options = context.propagated_loader_options
+    load_path = (
+        context.compile_state.current_path + path
+        if context.compile_state.current_path.path
+        else path
+    )
+
+    session_identity_map = context.session.identity_map
+
+    populate_existing = context.populate_existing or mapper.always_refresh
+    load_evt = bool(mapper.class_manager.dispatch.load)
+    refresh_evt = bool(mapper.class_manager.dispatch.refresh)
+    persistent_evt = bool(context.session.dispatch.loaded_as_persistent)
+    if persistent_evt:
+        loaded_as_persistent = context.session.dispatch.loaded_as_persistent
+    instance_state = attributes.instance_state
+    instance_dict = attributes.instance_dict
+    session_id = context.session.hash_key
+    runid = context.runid
+    identity_token = context.identity_token
+
+    version_check = context.version_check
+    if version_check:
+        version_id_col = mapper.version_id_col
+        if version_id_col is not None:
+            if adapter:
+                version_id_col = adapter.columns[version_id_col]
+            version_id_getter = result._getter(version_id_col)
+        else:
+            version_id_getter = None
+
+    if not refresh_state and _polymorphic_from is not None:
+        key = ("loader", path.path)
+
+        if key in context.attributes and context.attributes[key].strategy == (
+            ("selectinload_polymorphic", True),
+        ):
+            option_entities = context.attributes[key].local_opts["entities"]
+        else:
+            option_entities = None
+        selectin_load_via = mapper._should_selectin_load(
+            option_entities,
+            _polymorphic_from,
+        )
+
+        if selectin_load_via and selectin_load_via is not _polymorphic_from:
+            # only_load_props goes w/ refresh_state only, and in a refresh
+            # we are a single row query for the exact entity; polymorphic
+            # loading does not apply
+            assert only_load_props is None
+
+            if selectin_load_via.is_mapper:
+                _load_supers = []
+                _endmost_mapper = selectin_load_via
+                while (
+                    _endmost_mapper
+                    and _endmost_mapper is not _polymorphic_from
+                ):
+                    _load_supers.append(_endmost_mapper)
+                    _endmost_mapper = _endmost_mapper.inherits
+            else:
+                _load_supers = [selectin_load_via]
+
+            for _selectinload_entity in _load_supers:
+                if PostLoad.path_exists(
+                    context, load_path, _selectinload_entity
+                ):
+                    continue
+                callable_ = _load_subclass_via_in(
+                    context,
+                    path,
+                    _selectinload_entity,
+                    _polymorphic_from,
+                    option_entities,
+                )
+                PostLoad.callable_for_path(
+                    context,
+                    load_path,
+                    _selectinload_entity.mapper,
+                    _selectinload_entity,
+                    callable_,
+                    _selectinload_entity,
+                )
+
+    post_load = PostLoad.for_context(context, load_path, only_load_props)
+
+    if refresh_state:
+        refresh_identity_key = refresh_state.key
+        if refresh_identity_key is None:
+            # super-rare condition; a refresh is being called
+            # on a non-instance-key instance; this is meant to only
+            # occur within a flush()
+            refresh_identity_key = mapper._identity_key_from_state(
+                refresh_state
+            )
+    else:
+        refresh_identity_key = None
+
+        primary_key_getter = getters["primary_key_getter"]
+
+    if mapper.allow_partial_pks:
+        is_not_primary_key = _none_set.issuperset
+    else:
+        is_not_primary_key = _none_set.intersection
+
+    def _instance(row):
+        # determine the state that we'll be populating
+        if refresh_identity_key:
+            # fixed state that we're refreshing
+            state = refresh_state
+            instance = state.obj()
+            dict_ = instance_dict(instance)
+            isnew = state.runid != runid
+            currentload = True
+            loaded_instance = False
+        else:
+            # look at the row, see if that identity is in the
+            # session, or we have to create a new one
+            identitykey = (
+                identity_class,
+                primary_key_getter(row),
+                identity_token,
+            )
+
+            instance = session_identity_map.get(identitykey)
+
+            if instance is not None:
+                # existing instance
+                state = instance_state(instance)
+                dict_ = instance_dict(instance)
+
+                isnew = state.runid != runid
+                currentload = not isnew
+                loaded_instance = False
+
+                if version_check and version_id_getter and not currentload:
+                    _validate_version_id(
+                        mapper, state, dict_, row, version_id_getter
+                    )
+
+            else:
+                # create a new instance
+
+                # check for non-NULL values in the primary key columns,
+                # else no entity is returned for the row
+                if is_not_primary_key(identitykey[1]):
+                    return None
+
+                isnew = True
+                currentload = True
+                loaded_instance = True
+
+                instance = mapper.class_manager.new_instance()
+
+                dict_ = instance_dict(instance)
+                state = instance_state(instance)
+                state.key = identitykey
+                state.identity_token = identity_token
+
+                # attach instance to session.
+                state.session_id = session_id
+                session_identity_map._add_unpresent(state, identitykey)
+
+        effective_populate_existing = populate_existing
+        if refresh_state is state:
+            effective_populate_existing = True
+
+        # populate.  this looks at whether this state is new
+        # for this load or was existing, and whether or not this
+        # row is the first row with this identity.
+        if currentload or effective_populate_existing:
+            # full population routines.  Objects here are either
+            # just created, or we are doing a populate_existing
+
+            # be conservative about setting load_path when populate_existing
+            # is in effect; want to maintain options from the original
+            # load.  see test_expire->test_refresh_maintains_deferred_options
+            if isnew and (
+                propagated_loader_options or not effective_populate_existing
+            ):
+                state.load_options = propagated_loader_options
+                state.load_path = load_path
+
+            _populate_full(
+                context,
+                row,
+                state,
+                dict_,
+                isnew,
+                load_path,
+                loaded_instance,
+                effective_populate_existing,
+                populators,
+            )
+
+            if isnew:
+                # state.runid should be equal to context.runid / runid
+                # here, however for event checks we are being more conservative
+                # and checking against existing run id
+                # assert state.runid == runid
+
+                existing_runid = state.runid
+
+                if loaded_instance:
+                    if load_evt:
+                        state.manager.dispatch.load(state, context)
+                        if state.runid != existing_runid:
+                            _warn_for_runid_changed(state)
+                    if persistent_evt:
+                        loaded_as_persistent(context.session, state)
+                        if state.runid != existing_runid:
+                            _warn_for_runid_changed(state)
+                elif refresh_evt:
+                    state.manager.dispatch.refresh(
+                        state, context, only_load_props
+                    )
+                    if state.runid != runid:
+                        _warn_for_runid_changed(state)
+
+                if effective_populate_existing or state.modified:
+                    if refresh_state and only_load_props:
+                        state._commit(dict_, only_load_props)
+                    else:
+                        state._commit_all(dict_, session_identity_map)
+
+            if post_load:
+                post_load.add_state(state, True)
+
+        else:
+            # partial population routines, for objects that were already
+            # in the Session, but a row matches them; apply eager loaders
+            # on existing objects, etc.
+            unloaded = state.unloaded
+            isnew = state not in context.partials
+
+            if not isnew or unloaded or populators["eager"]:
+                # state is having a partial set of its attributes
+                # refreshed.  Populate those attributes,
+                # and add to the "context.partials" collection.
+
+                to_load = _populate_partial(
+                    context,
+                    row,
+                    state,
+                    dict_,
+                    isnew,
+                    load_path,
+                    unloaded,
+                    populators,
+                )
+
+                if isnew:
+                    if refresh_evt:
+                        existing_runid = state.runid
+                        state.manager.dispatch.refresh(state, context, to_load)
+                        if state.runid != existing_runid:
+                            _warn_for_runid_changed(state)
+
+                    state._commit(dict_, to_load)
+
+            if post_load and context.invoke_all_eagers:
+                post_load.add_state(state, False)
+
+        return instance
+
+    if mapper.polymorphic_map and not _polymorphic_from and not refresh_state:
+        # if we are doing polymorphic, dispatch to a different _instance()
+        # method specific to the subclass mapper
+        def ensure_no_pk(row):
+            identitykey = (
+                identity_class,
+                primary_key_getter(row),
+                identity_token,
+            )
+            if not is_not_primary_key(identitykey[1]):
+                return identitykey
+            else:
+                return None
+
+        _instance = _decorate_polymorphic_switch(
+            _instance,
+            context,
+            query_entity,
+            mapper,
+            result,
+            path,
+            polymorphic_discriminator,
+            adapter,
+            ensure_no_pk,
+        )
+
+    return _instance
+
+
+def _load_subclass_via_in(
+    context, path, entity, polymorphic_from, option_entities
+):
+    mapper = entity.mapper
+
+    # TODO: polymorphic_from seems to be a Mapper in all cases.
+    # this is likely not needed, but as we dont have typing in loading.py
+    # yet, err on the safe side
+    polymorphic_from_mapper = polymorphic_from.mapper
+    not_against_basemost = polymorphic_from_mapper.inherits is not None
+
+    zero_idx = len(mapper.base_mapper.primary_key) == 1
+
+    if entity.is_aliased_class or not_against_basemost:
+        q, enable_opt, disable_opt = mapper._subclass_load_via_in(
+            entity, polymorphic_from
+        )
+    else:
+        q, enable_opt, disable_opt = mapper._subclass_load_via_in_mapper
+
+    def do_load(context, path, states, load_only, effective_entity):
+        if not option_entities:
+            # filter out states for those that would have selectinloaded
+            # from another loader
+            # TODO: we are currently ignoring the case where the
+            # "selectin_polymorphic" option is used, as this is much more
+            # complex / specific / very uncommon API use
+            states = [
+                (s, v)
+                for s, v in states
+                if s.mapper._would_selectin_load_only_from_given_mapper(mapper)
+            ]
+
+            if not states:
+                return
+
+        orig_query = context.query
+
+        if path.parent:
+            enable_opt_lcl = enable_opt._prepend_path(path)
+            disable_opt_lcl = disable_opt._prepend_path(path)
+        else:
+            enable_opt_lcl = enable_opt
+            disable_opt_lcl = disable_opt
+        options = (
+            (enable_opt_lcl,) + orig_query._with_options + (disable_opt_lcl,)
+        )
+
+        q2 = q.options(*options)
+
+        q2._compile_options = context.compile_state.default_compile_options
+        q2._compile_options += {"_current_path": path.parent}
+
+        if context.populate_existing:
+            q2 = q2.execution_options(populate_existing=True)
+
+        context.session.execute(
+            q2,
+            dict(
+                primary_keys=[
+                    state.key[1][0] if zero_idx else state.key[1]
+                    for state, load_attrs in states
+                ]
+            ),
+        ).unique().scalars().all()
+
+    return do_load
+
+
+def _populate_full(
+    context,
+    row,
+    state,
+    dict_,
+    isnew,
+    load_path,
+    loaded_instance,
+    populate_existing,
+    populators,
+):
+    if isnew:
+        # first time we are seeing a row with this identity.
+        state.runid = context.runid
+
+        for key, getter in populators["quick"]:
+            dict_[key] = getter(row)
+        if populate_existing:
+            for key, set_callable in populators["expire"]:
+                dict_.pop(key, None)
+                if set_callable:
+                    state.expired_attributes.add(key)
+        else:
+            for key, set_callable in populators["expire"]:
+                if set_callable:
+                    state.expired_attributes.add(key)
+
+        for key, populator in populators["new"]:
+            populator(state, dict_, row)
+
+    elif load_path != state.load_path:
+        # new load path, e.g. object is present in more than one
+        # column position in a series of rows
+        state.load_path = load_path
+
+        # if we have data, and the data isn't in the dict, OK, let's put
+        # it in.
+        for key, getter in populators["quick"]:
+            if key not in dict_:
+                dict_[key] = getter(row)
+
+        # otherwise treat like an "already seen" row
+        for key, populator in populators["existing"]:
+            populator(state, dict_, row)
+            # TODO:  allow "existing" populator to know this is
+            # a new path for the state:
+            # populator(state, dict_, row, new_path=True)
+
+    else:
+        # have already seen rows with this identity in this same path.
+        for key, populator in populators["existing"]:
+            populator(state, dict_, row)
+
+            # TODO: same path
+            # populator(state, dict_, row, new_path=False)
+
+
+def _populate_partial(
+    context, row, state, dict_, isnew, load_path, unloaded, populators
+):
+    if not isnew:
+        if unloaded:
+            # extra pass, see #8166
+            for key, getter in populators["quick"]:
+                if key in unloaded:
+                    dict_[key] = getter(row)
+
+        to_load = context.partials[state]
+        for key, populator in populators["existing"]:
+            if key in to_load:
+                populator(state, dict_, row)
+    else:
+        to_load = unloaded
+        context.partials[state] = to_load
+
+        for key, getter in populators["quick"]:
+            if key in to_load:
+                dict_[key] = getter(row)
+        for key, set_callable in populators["expire"]:
+            if key in to_load:
+                dict_.pop(key, None)
+                if set_callable:
+                    state.expired_attributes.add(key)
+        for key, populator in populators["new"]:
+            if key in to_load:
+                populator(state, dict_, row)
+
+    for key, populator in populators["eager"]:
+        if key not in unloaded:
+            populator(state, dict_, row)
+
+    return to_load
+
+
+def _validate_version_id(mapper, state, dict_, row, getter):
+    if mapper._get_state_attr_by_column(
+        state, dict_, mapper.version_id_col
+    ) != getter(row):
+        raise orm_exc.StaleDataError(
+            "Instance '%s' has version id '%s' which "
+            "does not match database-loaded version id '%s'."
+            % (
+                state_str(state),
+                mapper._get_state_attr_by_column(
+                    state, dict_, mapper.version_id_col
+                ),
+                getter(row),
+            )
+        )
+
+
+def _decorate_polymorphic_switch(
+    instance_fn,
+    context,
+    query_entity,
+    mapper,
+    result,
+    path,
+    polymorphic_discriminator,
+    adapter,
+    ensure_no_pk,
+):
+    if polymorphic_discriminator is not None:
+        polymorphic_on = polymorphic_discriminator
+    else:
+        polymorphic_on = mapper.polymorphic_on
+    if polymorphic_on is None:
+        return instance_fn
+
+    if adapter:
+        polymorphic_on = adapter.columns[polymorphic_on]
+
+    def configure_subclass_mapper(discriminator):
+        try:
+            sub_mapper = mapper.polymorphic_map[discriminator]
+        except KeyError:
+            raise AssertionError(
+                "No such polymorphic_identity %r is defined" % discriminator
+            )
+        else:
+            if sub_mapper is mapper:
+                return None
+            elif not sub_mapper.isa(mapper):
+                return False
+
+            return _instance_processor(
+                query_entity,
+                sub_mapper,
+                context,
+                result,
+                path,
+                adapter,
+                _polymorphic_from=mapper,
+            )
+
+    polymorphic_instances = util.PopulateDict(configure_subclass_mapper)
+
+    getter = result._getter(polymorphic_on)
+
+    def polymorphic_instance(row):
+        discriminator = getter(row)
+        if discriminator is not None:
+            _instance = polymorphic_instances[discriminator]
+            if _instance:
+                return _instance(row)
+            elif _instance is False:
+                identitykey = ensure_no_pk(row)
+
+                if identitykey:
+                    raise sa_exc.InvalidRequestError(
+                        "Row with identity key %s can't be loaded into an "
+                        "object; the polymorphic discriminator column '%s' "
+                        "refers to %s, which is not a sub-mapper of "
+                        "the requested %s"
+                        % (
+                            identitykey,
+                            polymorphic_on,
+                            mapper.polymorphic_map[discriminator],
+                            mapper,
+                        )
+                    )
+                else:
+                    return None
+            else:
+                return instance_fn(row)
+        else:
+            identitykey = ensure_no_pk(row)
+
+            if identitykey:
+                raise sa_exc.InvalidRequestError(
+                    "Row with identity key %s can't be loaded into an "
+                    "object; the polymorphic discriminator column '%s' is "
+                    "NULL" % (identitykey, polymorphic_on)
+                )
+            else:
+                return None
+
+    return polymorphic_instance
+
+
+class PostLoad:
+    """Track loaders and states for "post load" operations."""
+
+    __slots__ = "loaders", "states", "load_keys"
+
+    def __init__(self):
+        self.loaders = {}
+        self.states = util.OrderedDict()
+        self.load_keys = None
+
+    def add_state(self, state, overwrite):
+        # the states for a polymorphic load here are all shared
+        # within a single PostLoad object among multiple subtypes.
+        # Filtering of callables on a per-subclass basis needs to be done at
+        # the invocation level
+        self.states[state] = overwrite
+
+    def invoke(self, context, path):
+        if not self.states:
+            return
+        path = path_registry.PathRegistry.coerce(path)
+        for (
+            effective_context,
+            token,
+            limit_to_mapper,
+            loader,
+            arg,
+            kw,
+        ) in self.loaders.values():
+            states = [
+                (state, overwrite)
+                for state, overwrite in self.states.items()
+                if state.manager.mapper.isa(limit_to_mapper)
+            ]
+            if states:
+                loader(
+                    effective_context, path, states, self.load_keys, *arg, **kw
+                )
+        self.states.clear()
+
+    @classmethod
+    def for_context(cls, context, path, only_load_props):
+        pl = context.post_load_paths.get(path.path)
+        if pl is not None and only_load_props:
+            pl.load_keys = only_load_props
+        return pl
+
+    @classmethod
+    def path_exists(self, context, path, key):
+        return (
+            path.path in context.post_load_paths
+            and key in context.post_load_paths[path.path].loaders
+        )
+
+    @classmethod
+    def callable_for_path(
+        cls, context, path, limit_to_mapper, token, loader_callable, *arg, **kw
+    ):
+        if path.path in context.post_load_paths:
+            pl = context.post_load_paths[path.path]
+        else:
+            pl = context.post_load_paths[path.path] = PostLoad()
+        pl.loaders[token] = (
+            context,
+            token,
+            limit_to_mapper,
+            loader_callable,
+            arg,
+            kw,
+        )
+
+
+def load_scalar_attributes(mapper, state, attribute_names, passive):
+    """initiate a column-based attribute refresh operation."""
+
+    # assert mapper is _state_mapper(state)
+    session = state.session
+    if not session:
+        raise orm_exc.DetachedInstanceError(
+            "Instance %s is not bound to a Session; "
+            "attribute refresh operation cannot proceed" % (state_str(state))
+        )
+
+    no_autoflush = bool(passive & attributes.NO_AUTOFLUSH)
+
+    # in the case of inheritance, particularly concrete and abstract
+    # concrete inheritance, the class manager might have some keys
+    # of attributes on the superclass that we didn't actually map.
+    # These could be mapped as "concrete, don't load" or could be completely
+    # excluded from the mapping and we know nothing about them.  Filter them
+    # here to prevent them from coming through.
+    if attribute_names:
+        attribute_names = attribute_names.intersection(mapper.attrs.keys())
+
+    if mapper.inherits and not mapper.concrete:
+        # load based on committed attributes in the object, formed into
+        # a truncated SELECT that only includes relevant tables.  does not
+        # currently use state.key
+        statement = mapper._optimized_get_statement(state, attribute_names)
+        if statement is not None:
+            # undefer() isn't needed here because statement has the
+            # columns needed already, this implicitly undefers that column
+            stmt = FromStatement(mapper, statement)
+
+            return load_on_ident(
+                session,
+                stmt,
+                None,
+                only_load_props=attribute_names,
+                refresh_state=state,
+                no_autoflush=no_autoflush,
+            )
+
+    # normal load, use state.key as the identity to SELECT
+    has_key = bool(state.key)
+
+    if has_key:
+        identity_key = state.key
+    else:
+        # this codepath is rare - only valid when inside a flush, and the
+        # object is becoming persistent but hasn't yet been assigned
+        # an identity_key.
+        # check here to ensure we have the attrs we need.
+        pk_attrs = [
+            mapper._columntoproperty[col].key for col in mapper.primary_key
+        ]
+        if state.expired_attributes.intersection(pk_attrs):
+            raise sa_exc.InvalidRequestError(
+                "Instance %s cannot be refreshed - it's not "
+                " persistent and does not "
+                "contain a full primary key." % state_str(state)
+            )
+        identity_key = mapper._identity_key_from_state(state)
+
+    if (
+        _none_set.issubset(identity_key) and not mapper.allow_partial_pks
+    ) or _none_set.issuperset(identity_key):
+        util.warn_limited(
+            "Instance %s to be refreshed doesn't "
+            "contain a full primary key - can't be refreshed "
+            "(and shouldn't be expired, either).",
+            state_str(state),
+        )
+        return
+
+    result = load_on_ident(
+        session,
+        select(mapper).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
+        identity_key,
+        refresh_state=state,
+        only_load_props=attribute_names,
+        no_autoflush=no_autoflush,
+    )
+
+    # if instance is pending, a refresh operation
+    # may not complete (even if PK attributes are assigned)
+    if has_key and result is None:
+        raise orm_exc.ObjectDeletedError(state)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py
new file mode 100644
index 00000000..ca085c40
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapped_collection.py
@@ -0,0 +1,557 @@
+# orm/mapped_collection.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import operator
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import base
+from .collections import collection
+from .collections import collection_adapter
+from .. import exc as sa_exc
+from .. import util
+from ..sql import coercions
+from ..sql import expression
+from ..sql import roles
+from ..util.langhelpers import Missing
+from ..util.langhelpers import MissingOr
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from . import AttributeEventToken
+    from . import Mapper
+    from .collections import CollectionAdapter
+    from ..sql.elements import ColumnElement
+
+_KT = TypeVar("_KT", bound=Any)
+_VT = TypeVar("_VT", bound=Any)
+
+
+class _PlainColumnGetter(Generic[_KT]):
+    """Plain column getter, stores collection of Column objects
+    directly.
+
+    Serializes to a :class:`._SerializableColumnGetterV2`
+    which has more expensive __call__() performance
+    and some rare caveats.
+
+    """
+
+    __slots__ = ("cols", "composite")
+
+    def __init__(self, cols: Sequence[ColumnElement[_KT]]) -> None:
+        self.cols = cols
+        self.composite = len(cols) > 1
+
+    def __reduce__(
+        self,
+    ) -> Tuple[
+        Type[_SerializableColumnGetterV2[_KT]],
+        Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
+    ]:
+        return _SerializableColumnGetterV2._reduce_from_cols(self.cols)
+
+    def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]:
+        return self.cols
+
+    def __call__(self, value: _KT) -> MissingOr[Union[_KT, Tuple[_KT, ...]]]:
+        state = base.instance_state(value)
+        m = base._state_mapper(state)
+
+        key: List[_KT] = [
+            m._get_state_attr_by_column(state, state.dict, col)
+            for col in self._cols(m)
+        ]
+        if self.composite:
+            return tuple(key)
+        else:
+            obj = key[0]
+            if obj is None:
+                return Missing
+            else:
+                return obj
+
+
+class _SerializableColumnGetterV2(_PlainColumnGetter[_KT]):
+    """Updated serializable getter which deals with
+    multi-table mapped classes.
+
+    Two extremely unusual cases are not supported.
+    Mappings which have tables across multiple metadata
+    objects, or which are mapped to non-Table selectables
+    linked across inheriting mappers may fail to function
+    here.
+
+    """
+
+    __slots__ = ("colkeys",)
+
+    def __init__(
+        self, colkeys: Sequence[Tuple[Optional[str], Optional[str]]]
+    ) -> None:
+        self.colkeys = colkeys
+        self.composite = len(colkeys) > 1
+
+    def __reduce__(
+        self,
+    ) -> Tuple[
+        Type[_SerializableColumnGetterV2[_KT]],
+        Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
+    ]:
+        return self.__class__, (self.colkeys,)
+
+    @classmethod
+    def _reduce_from_cols(cls, cols: Sequence[ColumnElement[_KT]]) -> Tuple[
+        Type[_SerializableColumnGetterV2[_KT]],
+        Tuple[Sequence[Tuple[Optional[str], Optional[str]]]],
+    ]:
+        def _table_key(c: ColumnElement[_KT]) -> Optional[str]:
+            if not isinstance(c.table, expression.TableClause):
+                return None
+            else:
+                return c.table.key  # type: ignore
+
+        colkeys = [(c.key, _table_key(c)) for c in cols]
+        return _SerializableColumnGetterV2, (colkeys,)
+
+    def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]:
+        cols: List[ColumnElement[_KT]] = []
+        metadata = getattr(mapper.local_table, "metadata", None)
+        for ckey, tkey in self.colkeys:
+            if tkey is None or metadata is None or tkey not in metadata:
+                cols.append(mapper.local_table.c[ckey])  # type: ignore
+            else:
+                cols.append(metadata.tables[tkey].c[ckey])
+        return cols
+
+
+def column_keyed_dict(
+    mapping_spec: Union[Type[_KT], Callable[[_KT], _VT]],
+    *,
+    ignore_unpopulated_attribute: bool = False,
+) -> Type[KeyFuncDict[_KT, _KT]]:
+    """A dictionary-based collection type with column-based keying.
+
+    .. versionchanged:: 2.0 Renamed :data:`.column_mapped_collection` to
+       :class:`.column_keyed_dict`.
+
+    Returns a :class:`.KeyFuncDict` factory which will produce new
+    dictionary keys based on the value of a particular :class:`.Column`-mapped
+    attribute on ORM mapped instances to be added to the dictionary.
+
+    .. note:: the value of the target attribute must be assigned with its
+       value at the time that the object is being added to the
+       dictionary collection.   Additionally, changes to the key attribute
+       are **not tracked**, which means the key in the dictionary is not
+       automatically synchronized with the key value on the target object
+       itself.  See :ref:`key_collections_mutations` for further details.
+
+    .. seealso::
+
+        :ref:`orm_dictionary_collection` - background on use
+
+    :param mapping_spec: a :class:`_schema.Column` object that is expected
+     to be mapped by the target mapper to a particular attribute on the
+     mapped class, the value of which on a particular instance is to be used
+     as the key for a new dictionary entry for that instance.
+    :param ignore_unpopulated_attribute:  if True, and the mapped attribute
+     indicated by the given :class:`_schema.Column` target attribute
+     on an object is not populated at all, the operation will be silently
+     skipped.  By default, an error is raised.
+
+     .. versionadded:: 2.0 an error is raised by default if the attribute
+        being used for the dictionary key is determined that it was never
+        populated with any value.  The
+        :paramref:`_orm.column_keyed_dict.ignore_unpopulated_attribute`
+        parameter may be set which will instead indicate that this condition
+        should be ignored, and the append operation silently skipped.
+        This is in contrast to the behavior of the 1.x series which would
+        erroneously populate the value in the dictionary with an arbitrary key
+        value of ``None``.
+
+
+    """
+    cols = [
+        coercions.expect(roles.ColumnArgumentRole, q, argname="mapping_spec")
+        for q in util.to_list(mapping_spec)
+    ]
+    keyfunc = _PlainColumnGetter(cols)
+    return _mapped_collection_cls(
+        keyfunc,
+        ignore_unpopulated_attribute=ignore_unpopulated_attribute,
+    )
+
+
+class _AttrGetter:
+    __slots__ = ("attr_name", "getter")
+
+    def __init__(self, attr_name: str):
+        self.attr_name = attr_name
+        self.getter = operator.attrgetter(attr_name)
+
+    def __call__(self, mapped_object: Any) -> Any:
+        obj = self.getter(mapped_object)
+        if obj is None:
+            state = base.instance_state(mapped_object)
+            mp = state.mapper
+            if self.attr_name in mp.attrs:
+                dict_ = state.dict
+                obj = dict_.get(self.attr_name, base.NO_VALUE)
+                if obj is None:
+                    return Missing
+            else:
+                return Missing
+
+        return obj
+
+    def __reduce__(self) -> Tuple[Type[_AttrGetter], Tuple[str]]:
+        return _AttrGetter, (self.attr_name,)
+
+
+def attribute_keyed_dict(
+    attr_name: str, *, ignore_unpopulated_attribute: bool = False
+) -> Type[KeyFuncDict[Any, Any]]:
+    """A dictionary-based collection type with attribute-based keying.
+
+    .. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to
+       :func:`.attribute_keyed_dict`.
+
+    Returns a :class:`.KeyFuncDict` factory which will produce new
+    dictionary keys based on the value of a particular named attribute on
+    ORM mapped instances to be added to the dictionary.
+
+    .. note:: the value of the target attribute must be assigned with its
+       value at the time that the object is being added to the
+       dictionary collection.   Additionally, changes to the key attribute
+       are **not tracked**, which means the key in the dictionary is not
+       automatically synchronized with the key value on the target object
+       itself.  See :ref:`key_collections_mutations` for further details.
+
+    .. seealso::
+
+        :ref:`orm_dictionary_collection` - background on use
+
+    :param attr_name: string name of an ORM-mapped attribute
+     on the mapped class, the value of which on a particular instance
+     is to be used as the key for a new dictionary entry for that instance.
+    :param ignore_unpopulated_attribute:  if True, and the target attribute
+     on an object is not populated at all, the operation will be silently
+     skipped.  By default, an error is raised.
+
+     .. versionadded:: 2.0 an error is raised by default if the attribute
+        being used for the dictionary key is determined that it was never
+        populated with any value.  The
+        :paramref:`_orm.attribute_keyed_dict.ignore_unpopulated_attribute`
+        parameter may be set which will instead indicate that this condition
+        should be ignored, and the append operation silently skipped.
+        This is in contrast to the behavior of the 1.x series which would
+        erroneously populate the value in the dictionary with an arbitrary key
+        value of ``None``.
+
+
+    """
+
+    return _mapped_collection_cls(
+        _AttrGetter(attr_name),
+        ignore_unpopulated_attribute=ignore_unpopulated_attribute,
+    )
+
+
+def keyfunc_mapping(
+    keyfunc: Callable[[Any], Any],
+    *,
+    ignore_unpopulated_attribute: bool = False,
+) -> Type[KeyFuncDict[_KT, Any]]:
+    """A dictionary-based collection type with arbitrary keying.
+
+    .. versionchanged:: 2.0 Renamed :data:`.mapped_collection` to
+       :func:`.keyfunc_mapping`.
+
+    Returns a :class:`.KeyFuncDict` factory with a keying function
+    generated from keyfunc, a callable that takes an entity and returns a
+    key value.
+
+    .. note:: the given keyfunc is called only once at the time that the
+       target object is being added to the collection.   Changes to the
+       effective value returned by the function are not tracked.
+
+
+    .. seealso::
+
+        :ref:`orm_dictionary_collection` - background on use
+
+    :param keyfunc: a callable that will be passed the ORM-mapped instance
+     which should then generate a new key to use in the dictionary.
+     If the value returned is :attr:`.LoaderCallableStatus.NO_VALUE`, an error
+     is raised.
+    :param ignore_unpopulated_attribute:  if True, and the callable returns
+     :attr:`.LoaderCallableStatus.NO_VALUE` for a particular instance, the
+     operation will be silently skipped.  By default, an error is raised.
+
+     .. versionadded:: 2.0 an error is raised by default if the callable
+        being used for the dictionary key returns
+        :attr:`.LoaderCallableStatus.NO_VALUE`, which in an ORM attribute
+        context indicates an attribute that was never populated with any value.
+        The :paramref:`_orm.mapped_collection.ignore_unpopulated_attribute`
+        parameter may be set which will instead indicate that this condition
+        should be ignored, and the append operation silently skipped. This is
+        in contrast to the behavior of the 1.x series which would erroneously
+        populate the value in the dictionary with an arbitrary key value of
+        ``None``.
+
+
+    """
+    return _mapped_collection_cls(
+        keyfunc, ignore_unpopulated_attribute=ignore_unpopulated_attribute
+    )
+
+
+class KeyFuncDict(Dict[_KT, _VT]):
+    """Base for ORM mapped dictionary classes.
+
+    Extends the ``dict`` type with additional methods needed by SQLAlchemy ORM
+    collection classes. Use of :class:`_orm.KeyFuncDict` is most directly
+    by using the :func:`.attribute_keyed_dict` or
+    :func:`.column_keyed_dict` class factories.
+    :class:`_orm.KeyFuncDict` may also serve as the base for user-defined
+    custom dictionary classes.
+
+    .. versionchanged:: 2.0 Renamed :class:`.MappedCollection` to
+       :class:`.KeyFuncDict`.
+
+    .. seealso::
+
+        :func:`_orm.attribute_keyed_dict`
+
+        :func:`_orm.column_keyed_dict`
+
+        :ref:`orm_dictionary_collection`
+
+        :ref:`orm_custom_collection`
+
+
+    """
+
+    def __init__(
+        self,
+        keyfunc: Callable[[Any], Any],
+        *dict_args: Any,
+        ignore_unpopulated_attribute: bool = False,
+    ) -> None:
+        """Create a new collection with keying provided by keyfunc.
+
+        keyfunc may be any callable that takes an object and returns an object
+        for use as a dictionary key.
+
+        The keyfunc will be called every time the ORM needs to add a member by
+        value-only (such as when loading instances from the database) or
+        remove a member.  The usual cautions about dictionary keying apply-
+        ``keyfunc(object)`` should return the same output for the life of the
+        collection.  Keying based on mutable properties can result in
+        unreachable instances "lost" in the collection.
+
+        """
+        self.keyfunc = keyfunc
+        self.ignore_unpopulated_attribute = ignore_unpopulated_attribute
+        super().__init__(*dict_args)
+
+    @classmethod
+    def _unreduce(
+        cls,
+        keyfunc: Callable[[Any], Any],
+        values: Dict[_KT, _KT],
+        adapter: Optional[CollectionAdapter] = None,
+    ) -> "KeyFuncDict[_KT, _KT]":
+        mp: KeyFuncDict[_KT, _KT] = KeyFuncDict(keyfunc)
+        mp.update(values)
+        # note that the adapter sets itself up onto this collection
+        # when its `__setstate__` method is called
+        return mp
+
+    def __reduce__(
+        self,
+    ) -> Tuple[
+        Callable[[_KT, _KT], KeyFuncDict[_KT, _KT]],
+        Tuple[Any, Union[Dict[_KT, _KT], Dict[_KT, _KT]], CollectionAdapter],
+    ]:
+        return (
+            KeyFuncDict._unreduce,
+            (
+                self.keyfunc,
+                dict(self),
+                collection_adapter(self),
+            ),
+        )
+
+    @util.preload_module("sqlalchemy.orm.attributes")
+    def _raise_for_unpopulated(
+        self,
+        value: _KT,
+        initiator: Union[AttributeEventToken, Literal[None, False]] = None,
+        *,
+        warn_only: bool,
+    ) -> None:
+        mapper = base.instance_state(value).mapper
+
+        attributes = util.preloaded.orm_attributes
+
+        if not isinstance(initiator, attributes.AttributeEventToken):
+            relationship = "unknown relationship"
+        elif initiator.key in mapper.attrs:
+            relationship = f"{mapper.attrs[initiator.key]}"
+        else:
+            relationship = initiator.key
+
+        if warn_only:
+            util.warn(
+                f"Attribute keyed dictionary value for "
+                f"attribute '{relationship}' was None; this will raise "
+                "in a future release. "
+                f"To skip this assignment entirely, "
+                f'Set the "ignore_unpopulated_attribute=True" '
+                f"parameter on the mapped collection factory."
+            )
+        else:
+            raise sa_exc.InvalidRequestError(
+                "In event triggered from population of "
+                f"attribute '{relationship}' "
+                "(potentially from a backref), "
+                f"can't populate value in KeyFuncDict; "
+                "dictionary key "
+                f"derived from {base.instance_str(value)} is not "
+                f"populated. Ensure appropriate state is set up on "
+                f"the {base.instance_str(value)} object "
+                f"before assigning to the {relationship} attribute. "
+                f"To skip this assignment entirely, "
+                f'Set the "ignore_unpopulated_attribute=True" '
+                f"parameter on the mapped collection factory."
+            )
+
+    @collection.appender  # type: ignore[misc]
+    @collection.internally_instrumented  # type: ignore[misc]
+    def set(
+        self,
+        value: _KT,
+        _sa_initiator: Union[AttributeEventToken, Literal[None, False]] = None,
+    ) -> None:
+        """Add an item by value, consulting the keyfunc for the key."""
+
+        key = self.keyfunc(value)
+
+        if key is base.NO_VALUE:
+            if not self.ignore_unpopulated_attribute:
+                self._raise_for_unpopulated(
+                    value, _sa_initiator, warn_only=False
+                )
+            else:
+                return
+        elif key is Missing:
+            if not self.ignore_unpopulated_attribute:
+                self._raise_for_unpopulated(
+                    value, _sa_initiator, warn_only=True
+                )
+                key = None
+            else:
+                return
+
+        self.__setitem__(key, value, _sa_initiator)  # type: ignore[call-arg]
+
+    @collection.remover  # type: ignore[misc]
+    @collection.internally_instrumented  # type: ignore[misc]
+    def remove(
+        self,
+        value: _KT,
+        _sa_initiator: Union[AttributeEventToken, Literal[None, False]] = None,
+    ) -> None:
+        """Remove an item by value, consulting the keyfunc for the key."""
+
+        key = self.keyfunc(value)
+
+        if key is base.NO_VALUE:
+            if not self.ignore_unpopulated_attribute:
+                self._raise_for_unpopulated(
+                    value, _sa_initiator, warn_only=False
+                )
+            return
+        elif key is Missing:
+            if not self.ignore_unpopulated_attribute:
+                self._raise_for_unpopulated(
+                    value, _sa_initiator, warn_only=True
+                )
+                key = None
+            else:
+                return
+
+        # Let self[key] raise if key is not in this collection
+        # testlib.pragma exempt:__ne__
+        if self[key] != value:
+            raise sa_exc.InvalidRequestError(
+                "Can not remove '%s': collection holds '%s' for key '%s'. "
+                "Possible cause: is the KeyFuncDict key function "
+                "based on mutable properties or properties that only obtain "
+                "values after flush?" % (value, self[key], key)
+            )
+        self.__delitem__(key, _sa_initiator)  # type: ignore[call-arg]
+
+
+def _mapped_collection_cls(
+    keyfunc: Callable[[Any], Any], ignore_unpopulated_attribute: bool
+) -> Type[KeyFuncDict[_KT, _KT]]:
+    class _MKeyfuncMapped(KeyFuncDict[_KT, _KT]):
+        def __init__(self, *dict_args: Any) -> None:
+            super().__init__(
+                keyfunc,
+                *dict_args,
+                ignore_unpopulated_attribute=ignore_unpopulated_attribute,
+            )
+
+    return _MKeyfuncMapped
+
+
+MappedCollection = KeyFuncDict
+"""A synonym for :class:`.KeyFuncDict`.
+
+.. versionchanged:: 2.0 Renamed :class:`.MappedCollection` to
+   :class:`.KeyFuncDict`.
+
+"""
+
+mapped_collection = keyfunc_mapping
+"""A synonym for :func:`_orm.keyfunc_mapping`.
+
+.. versionchanged:: 2.0 Renamed :data:`.mapped_collection` to
+   :func:`_orm.keyfunc_mapping`
+
+"""
+
+attribute_mapped_collection = attribute_keyed_dict
+"""A synonym for :func:`_orm.attribute_keyed_dict`.
+
+.. versionchanged:: 2.0 Renamed :data:`.attribute_mapped_collection` to
+   :func:`_orm.attribute_keyed_dict`
+
+"""
+
+column_mapped_collection = column_keyed_dict
+"""A synonym for :func:`_orm.column_keyed_dict.
+
+.. versionchanged:: 2.0 Renamed :func:`.column_mapped_collection` to
+   :func:`_orm.column_keyed_dict`
+
+"""
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py
new file mode 100644
index 00000000..eab2be55
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/mapper.py
@@ -0,0 +1,4431 @@
+# orm/mapper.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Logic to map Python classes to and from selectables.
+
+Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central
+configurational unit which associates a class with a database table.
+
+This is a semi-private module; the main configurational API of the ORM is
+available in :class:`~sqlalchemy.orm.`.
+
+"""
+from __future__ import annotations
+
+from collections import deque
+from functools import reduce
+from itertools import chain
+import sys
+import threading
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Deque
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import exc as orm_exc
+from . import instrumentation
+from . import loading
+from . import properties
+from . import util as orm_util
+from ._typing import _O
+from .base import _class_to_mapper
+from .base import _parse_mapper_argument
+from .base import _state_mapper
+from .base import PassiveFlag
+from .base import state_str
+from .interfaces import _MappedAttribute
+from .interfaces import EXT_SKIP
+from .interfaces import InspectionAttr
+from .interfaces import MapperProperty
+from .interfaces import ORMEntityColumnsClauseRole
+from .interfaces import ORMFromClauseRole
+from .interfaces import StrategizedProperty
+from .path_registry import PathRegistry
+from .. import event
+from .. import exc as sa_exc
+from .. import inspection
+from .. import log
+from .. import schema
+from .. import sql
+from .. import util
+from ..event import dispatcher
+from ..event import EventTarget
+from ..sql import base as sql_base
+from ..sql import coercions
+from ..sql import expression
+from ..sql import operators
+from ..sql import roles
+from ..sql import TableClause
+from ..sql import util as sql_util
+from ..sql import visitors
+from ..sql.cache_key import MemoizedHasCacheKey
+from ..sql.elements import KeyedColumnElement
+from ..sql.schema import Column
+from ..sql.schema import Table
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+from ..util import HasMemoized
+from ..util import HasMemoized_ro_memoized_attribute
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from ._typing import _IdentityKeyType
+    from ._typing import _InstanceDict
+    from ._typing import _ORMColumnExprArgument
+    from ._typing import _RegistryType
+    from .decl_api import registry
+    from .dependency import DependencyProcessor
+    from .descriptor_props import CompositeProperty
+    from .descriptor_props import SynonymProperty
+    from .events import MapperEvents
+    from .instrumentation import ClassManager
+    from .path_registry import CachingEntityRegistry
+    from .properties import ColumnProperty
+    from .relationships import RelationshipProperty
+    from .state import InstanceState
+    from .util import ORMAdapter
+    from ..engine import Row
+    from ..engine import RowMapping
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _EquivalentColumnMap
+    from ..sql.base import ReadOnlyColumnCollection
+    from ..sql.elements import ColumnClause
+    from ..sql.elements import ColumnElement
+    from ..sql.selectable import FromClause
+    from ..util import OrderedSet
+
+
+_T = TypeVar("_T", bound=Any)
+_MP = TypeVar("_MP", bound="MapperProperty[Any]")
+_Fn = TypeVar("_Fn", bound="Callable[..., Any]")
+
+
+_WithPolymorphicArg = Union[
+    Literal["*"],
+    Tuple[
+        Union[Literal["*"], Sequence[Union["Mapper[Any]", Type[Any]]]],
+        Optional["FromClause"],
+    ],
+    Sequence[Union["Mapper[Any]", Type[Any]]],
+]
+
+
+_mapper_registries: weakref.WeakKeyDictionary[_RegistryType, bool] = (
+    weakref.WeakKeyDictionary()
+)
+
+
+def _all_registries() -> Set[registry]:
+    with _CONFIGURE_MUTEX:
+        return set(_mapper_registries)
+
+
+def _unconfigured_mappers() -> Iterator[Mapper[Any]]:
+    for reg in _all_registries():
+        yield from reg._mappers_to_configure()
+
+
+_already_compiling = False
+
+
+# a constant returned by _get_attr_by_column to indicate
+# this mapper is not handling an attribute for a particular
+# column
+NO_ATTRIBUTE = util.symbol("NO_ATTRIBUTE")
+
+# lock used to synchronize the "mapper configure" step
+_CONFIGURE_MUTEX = threading.RLock()
+
+
+@inspection._self_inspects
+@log.class_logger
+class Mapper(
+    ORMFromClauseRole,
+    ORMEntityColumnsClauseRole[_O],
+    MemoizedHasCacheKey,
+    InspectionAttr,
+    log.Identified,
+    inspection.Inspectable["Mapper[_O]"],
+    EventTarget,
+    Generic[_O],
+):
+    """Defines an association between a Python class and a database table or
+    other relational structure, so that ORM operations against the class may
+    proceed.
+
+    The :class:`_orm.Mapper` object is instantiated using mapping methods
+    present on the :class:`_orm.registry` object.  For information
+    about instantiating new :class:`_orm.Mapper` objects, see
+    :ref:`orm_mapping_classes_toplevel`.
+
+    """
+
+    dispatch: dispatcher[Mapper[_O]]
+
+    _dispose_called = False
+    _configure_failed: Any = False
+    _ready_for_configure = False
+
+    @util.deprecated_params(
+        non_primary=(
+            "1.3",
+            "The :paramref:`.mapper.non_primary` parameter is deprecated, "
+            "and will be removed in a future release.  The functionality "
+            "of non primary mappers is now better suited using the "
+            ":class:`.AliasedClass` construct, which can also be used "
+            "as the target of a :func:`_orm.relationship` in 1.3.",
+        ),
+    )
+    def __init__(
+        self,
+        class_: Type[_O],
+        local_table: Optional[FromClause] = None,
+        properties: Optional[Mapping[str, MapperProperty[Any]]] = None,
+        primary_key: Optional[Iterable[_ORMColumnExprArgument[Any]]] = None,
+        non_primary: bool = False,
+        inherits: Optional[Union[Mapper[Any], Type[Any]]] = None,
+        inherit_condition: Optional[_ColumnExpressionArgument[bool]] = None,
+        inherit_foreign_keys: Optional[
+            Sequence[_ORMColumnExprArgument[Any]]
+        ] = None,
+        always_refresh: bool = False,
+        version_id_col: Optional[_ORMColumnExprArgument[Any]] = None,
+        version_id_generator: Optional[
+            Union[Literal[False], Callable[[Any], Any]]
+        ] = None,
+        polymorphic_on: Optional[
+            Union[_ORMColumnExprArgument[Any], str, MapperProperty[Any]]
+        ] = None,
+        _polymorphic_map: Optional[Dict[Any, Mapper[Any]]] = None,
+        polymorphic_identity: Optional[Any] = None,
+        concrete: bool = False,
+        with_polymorphic: Optional[_WithPolymorphicArg] = None,
+        polymorphic_abstract: bool = False,
+        polymorphic_load: Optional[Literal["selectin", "inline"]] = None,
+        allow_partial_pks: bool = True,
+        batch: bool = True,
+        column_prefix: Optional[str] = None,
+        include_properties: Optional[Sequence[str]] = None,
+        exclude_properties: Optional[Sequence[str]] = None,
+        passive_updates: bool = True,
+        passive_deletes: bool = False,
+        confirm_deleted_rows: bool = True,
+        eager_defaults: Literal[True, False, "auto"] = "auto",
+        legacy_is_orphan: bool = False,
+        _compiled_cache_size: int = 100,
+    ):
+        r"""Direct constructor for a new :class:`_orm.Mapper` object.
+
+        The :class:`_orm.Mapper` constructor is not called directly, and
+        is normally invoked through the
+        use of the :class:`_orm.registry` object through either the
+        :ref:`Declarative <orm_declarative_mapping>` or
+        :ref:`Imperative <orm_imperative_mapping>` mapping styles.
+
+        .. versionchanged:: 2.0 The public facing ``mapper()`` function is
+           removed; for a classical mapping configuration, use the
+           :meth:`_orm.registry.map_imperatively` method.
+
+        Parameters documented below may be passed to either the
+        :meth:`_orm.registry.map_imperatively` method, or may be passed in the
+        ``__mapper_args__`` declarative class attribute described at
+        :ref:`orm_declarative_mapper_options`.
+
+        :param class\_: The class to be mapped.  When using Declarative,
+          this argument is automatically passed as the declared class
+          itself.
+
+        :param local_table: The :class:`_schema.Table` or other
+           :class:`_sql.FromClause` (i.e. selectable) to which the class is
+           mapped. May be ``None`` if this mapper inherits from another mapper
+           using single-table inheritance. When using Declarative, this
+           argument is automatically passed by the extension, based on what is
+           configured via the :attr:`_orm.DeclarativeBase.__table__` attribute
+           or via the :class:`_schema.Table` produced as a result of
+           the :attr:`_orm.DeclarativeBase.__tablename__` attribute being
+           present.
+
+        :param polymorphic_abstract: Indicates this class will be mapped in a
+            polymorphic hierarchy, but not directly instantiated. The class is
+            mapped normally, except that it has no requirement for a
+            :paramref:`_orm.Mapper.polymorphic_identity` within an inheritance
+            hierarchy. The class however must be part of a polymorphic
+            inheritance scheme which uses
+            :paramref:`_orm.Mapper.polymorphic_on` at the base.
+
+            .. versionadded:: 2.0
+
+            .. seealso::
+
+                :ref:`orm_inheritance_abstract_poly`
+
+        :param always_refresh: If True, all query operations for this mapped
+           class will overwrite all data within object instances that already
+           exist within the session, erasing any in-memory changes with
+           whatever information was loaded from the database. Usage of this
+           flag is highly discouraged; as an alternative, see the method
+           :meth:`_query.Query.populate_existing`.
+
+        :param allow_partial_pks: Defaults to True.  Indicates that a
+           composite primary key with some NULL values should be considered as
+           possibly existing within the database. This affects whether a
+           mapper will assign an incoming row to an existing identity, as well
+           as if :meth:`.Session.merge` will check the database first for a
+           particular primary key value. A "partial primary key" can occur if
+           one has mapped to an OUTER JOIN, for example.
+
+           The :paramref:`.orm.Mapper.allow_partial_pks` parameter also
+           indicates to the ORM relationship lazy loader, when loading a
+           many-to-one related object, if a composite primary key that has
+           partial NULL values should result in an attempt to load from the
+           database, or if a load attempt is not necessary.
+
+           .. versionadded:: 2.0.36 :paramref:`.orm.Mapper.allow_partial_pks`
+              is consulted by the relationship lazy loader strategy, such that
+              when set to False, a SELECT for a composite primary key that
+              has partial NULL values will not be emitted.
+
+        :param batch: Defaults to ``True``, indicating that save operations
+           of multiple entities can be batched together for efficiency.
+           Setting to False indicates
+           that an instance will be fully saved before saving the next
+           instance.  This is used in the extremely rare case that a
+           :class:`.MapperEvents` listener requires being called
+           in between individual row persistence operations.
+
+        :param column_prefix: A string which will be prepended
+           to the mapped attribute name when :class:`_schema.Column`
+           objects are automatically assigned as attributes to the
+           mapped class.  Does not affect :class:`.Column` objects that
+           are mapped explicitly in the :paramref:`.Mapper.properties`
+           dictionary.
+
+           This parameter is typically useful with imperative mappings
+           that keep the :class:`.Table` object separate.  Below, assuming
+           the ``user_table`` :class:`.Table` object has columns named
+           ``user_id``, ``user_name``, and ``password``::
+
+                class User(Base):
+                    __table__ = user_table
+                    __mapper_args__ = {"column_prefix": "_"}
+
+           The above mapping will assign the ``user_id``, ``user_name``, and
+           ``password`` columns to attributes named ``_user_id``,
+           ``_user_name``, and ``_password`` on the mapped ``User`` class.
+
+           The :paramref:`.Mapper.column_prefix` parameter is uncommon in
+           modern use. For dealing with reflected tables, a more flexible
+           approach to automating a naming scheme is to intercept the
+           :class:`.Column` objects as they are reflected; see the section
+           :ref:`mapper_automated_reflection_schemes` for notes on this usage
+           pattern.
+
+        :param concrete: If True, indicates this mapper should use concrete
+           table inheritance with its parent mapper.
+
+           See the section :ref:`concrete_inheritance` for an example.
+
+        :param confirm_deleted_rows: defaults to True; when a DELETE occurs
+          of one more rows based on specific primary keys, a warning is
+          emitted when the number of rows matched does not equal the number
+          of rows expected.  This parameter may be set to False to handle the
+          case where database ON DELETE CASCADE rules may be deleting some of
+          those rows automatically.  The warning may be changed to an
+          exception in a future release.
+
+        :param eager_defaults: if True, the ORM will immediately fetch the
+          value of server-generated default values after an INSERT or UPDATE,
+          rather than leaving them as expired to be fetched on next access.
+          This can be used for event schemes where the server-generated values
+          are needed immediately before the flush completes.
+
+          The fetch of values occurs either by using ``RETURNING`` inline
+          with the ``INSERT`` or ``UPDATE`` statement, or by adding an
+          additional ``SELECT`` statement subsequent to the ``INSERT`` or
+          ``UPDATE``, if the backend does not support ``RETURNING``.
+
+          The use of ``RETURNING`` is extremely performant in particular for
+          ``INSERT`` statements where SQLAlchemy can take advantage of
+          :ref:`insertmanyvalues <engine_insertmanyvalues>`, whereas the use of
+          an additional ``SELECT`` is relatively poor performing, adding
+          additional SQL round trips which would be unnecessary if these new
+          attributes are not to be accessed in any case.
+
+          For this reason, :paramref:`.Mapper.eager_defaults` defaults to the
+          string value ``"auto"``, which indicates that server defaults for
+          INSERT should be fetched using ``RETURNING`` if the backing database
+          supports it and if the dialect in use supports "insertmanyreturning"
+          for an INSERT statement. If the backing database does not support
+          ``RETURNING`` or "insertmanyreturning" is not available, server
+          defaults will not be fetched.
+
+          .. versionchanged:: 2.0.0rc1 added the "auto" option for
+             :paramref:`.Mapper.eager_defaults`
+
+          .. seealso::
+
+                :ref:`orm_server_defaults`
+
+          .. versionchanged:: 2.0.0  RETURNING now works with multiple rows
+             INSERTed at once using the
+             :ref:`insertmanyvalues <engine_insertmanyvalues>` feature, which
+             among other things allows the :paramref:`.Mapper.eager_defaults`
+             feature to be very performant on supporting backends.
+
+        :param exclude_properties: A list or set of string column names to
+          be excluded from mapping.
+
+          .. seealso::
+
+            :ref:`include_exclude_cols`
+
+        :param include_properties: An inclusive list or set of string column
+          names to map.
+
+          .. seealso::
+
+            :ref:`include_exclude_cols`
+
+        :param inherits: A mapped class or the corresponding
+          :class:`_orm.Mapper`
+          of one indicating a superclass to which this :class:`_orm.Mapper`
+          should *inherit* from.   The mapped class here must be a subclass
+          of the other mapper's class.   When using Declarative, this argument
+          is passed automatically as a result of the natural class
+          hierarchy of the declared classes.
+
+          .. seealso::
+
+            :ref:`inheritance_toplevel`
+
+        :param inherit_condition: For joined table inheritance, a SQL
+           expression which will
+           define how the two tables are joined; defaults to a natural join
+           between the two tables.
+
+        :param inherit_foreign_keys: When ``inherit_condition`` is used and
+           the columns present are missing a :class:`_schema.ForeignKey`
+           configuration, this parameter can be used to specify which columns
+           are "foreign".  In most cases can be left as ``None``.
+
+        :param legacy_is_orphan: Boolean, defaults to ``False``.
+          When ``True``, specifies that "legacy" orphan consideration
+          is to be applied to objects mapped by this mapper, which means
+          that a pending (that is, not persistent) object is auto-expunged
+          from an owning :class:`.Session` only when it is de-associated
+          from *all* parents that specify a ``delete-orphan`` cascade towards
+          this mapper.  The new default behavior is that the object is
+          auto-expunged when it is de-associated with *any* of its parents
+          that specify ``delete-orphan`` cascade.  This behavior is more
+          consistent with that of a persistent object, and allows behavior to
+          be consistent in more scenarios independently of whether or not an
+          orphan object has been flushed yet or not.
+
+          See the change note and example at :ref:`legacy_is_orphan_addition`
+          for more detail on this change.
+
+        :param non_primary: Specify that this :class:`_orm.Mapper`
+          is in addition
+          to the "primary" mapper, that is, the one used for persistence.
+          The :class:`_orm.Mapper` created here may be used for ad-hoc
+          mapping of the class to an alternate selectable, for loading
+          only.
+
+          .. seealso::
+
+            :ref:`relationship_aliased_class` - the new pattern that removes
+            the need for the :paramref:`_orm.Mapper.non_primary` flag.
+
+        :param passive_deletes: Indicates DELETE behavior of foreign key
+           columns when a joined-table inheritance entity is being deleted.
+           Defaults to ``False`` for a base mapper; for an inheriting mapper,
+           defaults to ``False`` unless the value is set to ``True``
+           on the superclass mapper.
+
+           When ``True``, it is assumed that ON DELETE CASCADE is configured
+           on the foreign key relationships that link this mapper's table
+           to its superclass table, so that when the unit of work attempts
+           to delete the entity, it need only emit a DELETE statement for the
+           superclass table, and not this table.
+
+           When ``False``, a DELETE statement is emitted for this mapper's
+           table individually.  If the primary key attributes local to this
+           table are unloaded, then a SELECT must be emitted in order to
+           validate these attributes; note that the primary key columns
+           of a joined-table subclass are not part of the "primary key" of
+           the object as a whole.
+
+           Note that a value of ``True`` is **always** forced onto the
+           subclass mappers; that is, it's not possible for a superclass
+           to specify passive_deletes without this taking effect for
+           all subclass mappers.
+
+           .. seealso::
+
+               :ref:`passive_deletes` - description of similar feature as
+               used with :func:`_orm.relationship`
+
+               :paramref:`.mapper.passive_updates` - supporting ON UPDATE
+               CASCADE for joined-table inheritance mappers
+
+        :param passive_updates: Indicates UPDATE behavior of foreign key
+           columns when a primary key column changes on a joined-table
+           inheritance mapping.   Defaults to ``True``.
+
+           When True, it is assumed that ON UPDATE CASCADE is configured on
+           the foreign key in the database, and that the database will handle
+           propagation of an UPDATE from a source column to dependent columns
+           on joined-table rows.
+
+           When False, it is assumed that the database does not enforce
+           referential integrity and will not be issuing its own CASCADE
+           operation for an update.  The unit of work process will
+           emit an UPDATE statement for the dependent columns during a
+           primary key change.
+
+           .. seealso::
+
+               :ref:`passive_updates` - description of a similar feature as
+               used with :func:`_orm.relationship`
+
+               :paramref:`.mapper.passive_deletes` - supporting ON DELETE
+               CASCADE for joined-table inheritance mappers
+
+        :param polymorphic_load: Specifies "polymorphic loading" behavior
+         for a subclass in an inheritance hierarchy (joined and single
+         table inheritance only).   Valid values are:
+
+          * "'inline'" - specifies this class should be part of
+            the "with_polymorphic" mappers, e.g. its columns will be included
+            in a SELECT query against the base.
+
+          * "'selectin'" - specifies that when instances of this class
+            are loaded, an additional SELECT will be emitted to retrieve
+            the columns specific to this subclass.  The SELECT uses
+            IN to fetch multiple subclasses at once.
+
+         .. versionadded:: 1.2
+
+         .. seealso::
+
+            :ref:`with_polymorphic_mapper_config`
+
+            :ref:`polymorphic_selectin`
+
+        :param polymorphic_on: Specifies the column, attribute, or
+          SQL expression used to determine the target class for an
+          incoming row, when inheriting classes are present.
+
+          May be specified as a string attribute name, or as a SQL
+          expression such as a :class:`_schema.Column` or in a Declarative
+          mapping a :func:`_orm.mapped_column` object.  It is typically
+          expected that the SQL expression corresponds to a column in the
+          base-most mapped :class:`.Table`::
+
+            class Employee(Base):
+                __tablename__ = "employee"
+
+                id: Mapped[int] = mapped_column(primary_key=True)
+                discriminator: Mapped[str] = mapped_column(String(50))
+
+                __mapper_args__ = {
+                    "polymorphic_on": discriminator,
+                    "polymorphic_identity": "employee",
+                }
+
+          It may also be specified
+          as a SQL expression, as in this example where we
+          use the :func:`.case` construct to provide a conditional
+          approach::
+
+            class Employee(Base):
+                __tablename__ = "employee"
+
+                id: Mapped[int] = mapped_column(primary_key=True)
+                discriminator: Mapped[str] = mapped_column(String(50))
+
+                __mapper_args__ = {
+                    "polymorphic_on": case(
+                        (discriminator == "EN", "engineer"),
+                        (discriminator == "MA", "manager"),
+                        else_="employee",
+                    ),
+                    "polymorphic_identity": "employee",
+                }
+
+          It may also refer to any attribute using its string name,
+          which is of particular use when using annotated column
+          configurations::
+
+                class Employee(Base):
+                    __tablename__ = "employee"
+
+                    id: Mapped[int] = mapped_column(primary_key=True)
+                    discriminator: Mapped[str]
+
+                    __mapper_args__ = {
+                        "polymorphic_on": "discriminator",
+                        "polymorphic_identity": "employee",
+                    }
+
+          When setting ``polymorphic_on`` to reference an
+          attribute or expression that's not present in the
+          locally mapped :class:`_schema.Table`, yet the value
+          of the discriminator should be persisted to the database,
+          the value of the
+          discriminator is not automatically set on new
+          instances; this must be handled by the user,
+          either through manual means or via event listeners.
+          A typical approach to establishing such a listener
+          looks like::
+
+                from sqlalchemy import event
+                from sqlalchemy.orm import object_mapper
+
+
+                @event.listens_for(Employee, "init", propagate=True)
+                def set_identity(instance, *arg, **kw):
+                    mapper = object_mapper(instance)
+                    instance.discriminator = mapper.polymorphic_identity
+
+          Where above, we assign the value of ``polymorphic_identity``
+          for the mapped class to the ``discriminator`` attribute,
+          thus persisting the value to the ``discriminator`` column
+          in the database.
+
+          .. warning::
+
+             Currently, **only one discriminator column may be set**, typically
+             on the base-most class in the hierarchy. "Cascading" polymorphic
+             columns are not yet supported.
+
+          .. seealso::
+
+            :ref:`inheritance_toplevel`
+
+        :param polymorphic_identity: Specifies the value which
+          identifies this particular class as returned by the column expression
+          referred to by the :paramref:`_orm.Mapper.polymorphic_on` setting. As
+          rows are received, the value corresponding to the
+          :paramref:`_orm.Mapper.polymorphic_on` column expression is compared
+          to this value, indicating which subclass should be used for the newly
+          reconstructed object.
+
+          .. seealso::
+
+            :ref:`inheritance_toplevel`
+
+        :param properties: A dictionary mapping the string names of object
+           attributes to :class:`.MapperProperty` instances, which define the
+           persistence behavior of that attribute.  Note that
+           :class:`_schema.Column`
+           objects present in
+           the mapped :class:`_schema.Table` are automatically placed into
+           ``ColumnProperty`` instances upon mapping, unless overridden.
+           When using Declarative, this argument is passed automatically,
+           based on all those :class:`.MapperProperty` instances declared
+           in the declared class body.
+
+           .. seealso::
+
+               :ref:`orm_mapping_properties` - in the
+               :ref:`orm_mapping_classes_toplevel`
+
+        :param primary_key: A list of :class:`_schema.Column`
+           objects, or alternatively string names of attribute names which
+           refer to :class:`_schema.Column`, which define
+           the primary key to be used against this mapper's selectable unit.
+           This is normally simply the primary key of the ``local_table``, but
+           can be overridden here.
+
+           .. versionchanged:: 2.0.2 :paramref:`_orm.Mapper.primary_key`
+              arguments may be indicated as string attribute names as well.
+
+           .. seealso::
+
+                :ref:`mapper_primary_key` - background and example use
+
+        :param version_id_col: A :class:`_schema.Column`
+           that will be used to keep a running version id of rows
+           in the table.  This is used to detect concurrent updates or
+           the presence of stale data in a flush.  The methodology is to
+           detect if an UPDATE statement does not match the last known
+           version id, a
+           :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
+           thrown.
+           By default, the column must be of :class:`.Integer` type,
+           unless ``version_id_generator`` specifies an alternative version
+           generator.
+
+           .. seealso::
+
+              :ref:`mapper_version_counter` - discussion of version counting
+              and rationale.
+
+        :param version_id_generator: Define how new version ids should
+          be generated.  Defaults to ``None``, which indicates that
+          a simple integer counting scheme be employed.  To provide a custom
+          versioning scheme, provide a callable function of the form::
+
+              def generate_version(version):
+                  return next_version
+
+          Alternatively, server-side versioning functions such as triggers,
+          or programmatic versioning schemes outside of the version id
+          generator may be used, by specifying the value ``False``.
+          Please see :ref:`server_side_version_counter` for a discussion
+          of important points when using this option.
+
+          .. seealso::
+
+             :ref:`custom_version_counter`
+
+             :ref:`server_side_version_counter`
+
+
+        :param with_polymorphic: A tuple in the form ``(<classes>,
+            <selectable>)`` indicating the default style of "polymorphic"
+            loading, that is, which tables are queried at once. <classes> is
+            any single or list of mappers and/or classes indicating the
+            inherited classes that should be loaded at once. The special value
+            ``'*'`` may be used to indicate all descending classes should be
+            loaded immediately. The second tuple argument <selectable>
+            indicates a selectable that will be used to query for multiple
+            classes.
+
+            The :paramref:`_orm.Mapper.polymorphic_load` parameter may be
+            preferable over the use of :paramref:`_orm.Mapper.with_polymorphic`
+            in modern mappings to indicate a per-subclass technique of
+            indicating polymorphic loading styles.
+
+            .. seealso::
+
+                :ref:`with_polymorphic_mapper_config`
+
+        """
+        self.class_ = util.assert_arg_type(class_, type, "class_")
+        self._sort_key = "%s.%s" % (
+            self.class_.__module__,
+            self.class_.__name__,
+        )
+
+        self._primary_key_argument = util.to_list(primary_key)
+        self.non_primary = non_primary
+
+        self.always_refresh = always_refresh
+
+        if isinstance(version_id_col, MapperProperty):
+            self.version_id_prop = version_id_col
+            self.version_id_col = None
+        else:
+            self.version_id_col = (
+                coercions.expect(
+                    roles.ColumnArgumentOrKeyRole,
+                    version_id_col,
+                    argname="version_id_col",
+                )
+                if version_id_col is not None
+                else None
+            )
+
+        if version_id_generator is False:
+            self.version_id_generator = False
+        elif version_id_generator is None:
+            self.version_id_generator = lambda x: (x or 0) + 1
+        else:
+            self.version_id_generator = version_id_generator
+
+        self.concrete = concrete
+        self.single = False
+
+        if inherits is not None:
+            self.inherits = _parse_mapper_argument(inherits)
+        else:
+            self.inherits = None
+
+        if local_table is not None:
+            self.local_table = coercions.expect(
+                roles.StrictFromClauseRole,
+                local_table,
+                disable_inspection=True,
+                argname="local_table",
+            )
+        elif self.inherits:
+            # note this is a new flow as of 2.0 so that
+            # .local_table need not be Optional
+            self.local_table = self.inherits.local_table
+            self.single = True
+        else:
+            raise sa_exc.ArgumentError(
+                f"Mapper[{self.class_.__name__}(None)] has None for a "
+                "primary table argument and does not specify 'inherits'"
+            )
+
+        if inherit_condition is not None:
+            self.inherit_condition = coercions.expect(
+                roles.OnClauseRole, inherit_condition
+            )
+        else:
+            self.inherit_condition = None
+
+        self.inherit_foreign_keys = inherit_foreign_keys
+        self._init_properties = dict(properties) if properties else {}
+        self._delete_orphans = []
+        self.batch = batch
+        self.eager_defaults = eager_defaults
+        self.column_prefix = column_prefix
+
+        # interim - polymorphic_on is further refined in
+        # _configure_polymorphic_setter
+        self.polymorphic_on = (
+            coercions.expect(  # type: ignore
+                roles.ColumnArgumentOrKeyRole,
+                polymorphic_on,
+                argname="polymorphic_on",
+            )
+            if polymorphic_on is not None
+            else None
+        )
+        self.polymorphic_abstract = polymorphic_abstract
+        self._dependency_processors = []
+        self.validators = util.EMPTY_DICT
+        self.passive_updates = passive_updates
+        self.passive_deletes = passive_deletes
+        self.legacy_is_orphan = legacy_is_orphan
+        self._clause_adapter = None
+        self._requires_row_aliasing = False
+        self._inherits_equated_pairs = None
+        self._memoized_values = {}
+        self._compiled_cache_size = _compiled_cache_size
+        self._reconstructor = None
+        self.allow_partial_pks = allow_partial_pks
+
+        if self.inherits and not self.concrete:
+            self.confirm_deleted_rows = False
+        else:
+            self.confirm_deleted_rows = confirm_deleted_rows
+
+        self._set_with_polymorphic(with_polymorphic)
+        self.polymorphic_load = polymorphic_load
+
+        # our 'polymorphic identity', a string name that when located in a
+        #  result set row indicates this Mapper should be used to construct
+        # the object instance for that row.
+        self.polymorphic_identity = polymorphic_identity
+
+        # a dictionary of 'polymorphic identity' names, associating those
+        # names with Mappers that will be used to construct object instances
+        # upon a select operation.
+        if _polymorphic_map is None:
+            self.polymorphic_map = {}
+        else:
+            self.polymorphic_map = _polymorphic_map
+
+        if include_properties is not None:
+            self.include_properties = util.to_set(include_properties)
+        else:
+            self.include_properties = None
+        if exclude_properties:
+            self.exclude_properties = util.to_set(exclude_properties)
+        else:
+            self.exclude_properties = None
+
+        # prevent this mapper from being constructed
+        # while a configure_mappers() is occurring (and defer a
+        # configure_mappers() until construction succeeds)
+        with _CONFIGURE_MUTEX:
+            cast("MapperEvents", self.dispatch._events)._new_mapper_instance(
+                class_, self
+            )
+            self._configure_inheritance()
+            self._configure_class_instrumentation()
+            self._configure_properties()
+            self._configure_polymorphic_setter()
+            self._configure_pks()
+            self.registry._flag_new_mapper(self)
+            self._log("constructed")
+            self._expire_memoizations()
+
+        self.dispatch.after_mapper_constructed(self, self.class_)
+
+    def _prefer_eager_defaults(self, dialect, table):
+        if self.eager_defaults == "auto":
+            if not table.implicit_returning:
+                return False
+
+            return (
+                table in self._server_default_col_keys
+                and dialect.insert_executemany_returning
+            )
+        else:
+            return self.eager_defaults
+
+    def _gen_cache_key(self, anon_map, bindparams):
+        return (self,)
+
+    # ### BEGIN
+    # ATTRIBUTE DECLARATIONS START HERE
+
+    is_mapper = True
+    """Part of the inspection API."""
+
+    represents_outer_join = False
+
+    registry: _RegistryType
+
+    @property
+    def mapper(self) -> Mapper[_O]:
+        """Part of the inspection API.
+
+        Returns self.
+
+        """
+        return self
+
+    @property
+    def entity(self):
+        r"""Part of the inspection API.
+
+        Returns self.class\_.
+
+        """
+        return self.class_
+
+    class_: Type[_O]
+    """The class to which this :class:`_orm.Mapper` is mapped."""
+
+    _identity_class: Type[_O]
+
+    _delete_orphans: List[Tuple[str, Type[Any]]]
+    _dependency_processors: List[DependencyProcessor]
+    _memoized_values: Dict[Any, Callable[[], Any]]
+    _inheriting_mappers: util.WeakSequence[Mapper[Any]]
+    _all_tables: Set[TableClause]
+    _polymorphic_attr_key: Optional[str]
+
+    _pks_by_table: Dict[FromClause, OrderedSet[ColumnClause[Any]]]
+    _cols_by_table: Dict[FromClause, OrderedSet[ColumnElement[Any]]]
+
+    _props: util.OrderedDict[str, MapperProperty[Any]]
+    _init_properties: Dict[str, MapperProperty[Any]]
+
+    _columntoproperty: _ColumnMapping
+
+    _set_polymorphic_identity: Optional[Callable[[InstanceState[_O]], None]]
+    _validate_polymorphic_identity: Optional[
+        Callable[[Mapper[_O], InstanceState[_O], _InstanceDict], None]
+    ]
+
+    tables: Sequence[TableClause]
+    """A sequence containing the collection of :class:`_schema.Table`
+    or :class:`_schema.TableClause` objects which this :class:`_orm.Mapper`
+    is aware of.
+
+    If the mapper is mapped to a :class:`_expression.Join`, or an
+    :class:`_expression.Alias`
+    representing a :class:`_expression.Select`, the individual
+    :class:`_schema.Table`
+    objects that comprise the full construct will be represented here.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    validators: util.immutabledict[str, Tuple[str, Dict[str, Any]]]
+    """An immutable dictionary of attributes which have been decorated
+    using the :func:`_orm.validates` decorator.
+
+    The dictionary contains string attribute names as keys
+    mapped to the actual validation method.
+
+    """
+
+    always_refresh: bool
+    allow_partial_pks: bool
+    version_id_col: Optional[ColumnElement[Any]]
+
+    with_polymorphic: Optional[
+        Tuple[
+            Union[Literal["*"], Sequence[Union[Mapper[Any], Type[Any]]]],
+            Optional[FromClause],
+        ]
+    ]
+
+    version_id_generator: Optional[Union[Literal[False], Callable[[Any], Any]]]
+
+    local_table: FromClause
+    """The immediate :class:`_expression.FromClause` to which this
+    :class:`_orm.Mapper` refers.
+
+    Typically is an instance of :class:`_schema.Table`, may be any
+    :class:`.FromClause`.
+
+    The "local" table is the
+    selectable that the :class:`_orm.Mapper` is directly responsible for
+    managing from an attribute access and flush perspective.   For
+    non-inheriting mappers, :attr:`.Mapper.local_table` will be the same
+    as :attr:`.Mapper.persist_selectable`.  For inheriting mappers,
+    :attr:`.Mapper.local_table` refers to the specific portion of
+    :attr:`.Mapper.persist_selectable` that includes the columns to which
+    this :class:`.Mapper` is loading/persisting, such as a particular
+    :class:`.Table` within a join.
+
+    .. seealso::
+
+        :attr:`_orm.Mapper.persist_selectable`.
+
+        :attr:`_orm.Mapper.selectable`.
+
+    """
+
+    persist_selectable: FromClause
+    """The :class:`_expression.FromClause` to which this :class:`_orm.Mapper`
+    is mapped.
+
+    Typically is an instance of :class:`_schema.Table`, may be any
+    :class:`.FromClause`.
+
+    The :attr:`_orm.Mapper.persist_selectable` is similar to
+    :attr:`.Mapper.local_table`, but represents the :class:`.FromClause` that
+    represents the inheriting class hierarchy overall in an inheritance
+    scenario.
+
+    :attr.`.Mapper.persist_selectable` is also separate from the
+    :attr:`.Mapper.selectable` attribute, the latter of which may be an
+    alternate subquery used for selecting columns.
+    :attr.`.Mapper.persist_selectable` is oriented towards columns that
+    will be written on a persist operation.
+
+    .. seealso::
+
+        :attr:`_orm.Mapper.selectable`.
+
+        :attr:`_orm.Mapper.local_table`.
+
+    """
+
+    inherits: Optional[Mapper[Any]]
+    """References the :class:`_orm.Mapper` which this :class:`_orm.Mapper`
+    inherits from, if any.
+
+    """
+
+    inherit_condition: Optional[ColumnElement[bool]]
+
+    configured: bool = False
+    """Represent ``True`` if this :class:`_orm.Mapper` has been configured.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    .. seealso::
+
+        :func:`.configure_mappers`.
+
+    """
+
+    concrete: bool
+    """Represent ``True`` if this :class:`_orm.Mapper` is a concrete
+    inheritance mapper.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    primary_key: Tuple[Column[Any], ...]
+    """An iterable containing the collection of :class:`_schema.Column`
+    objects
+    which comprise the 'primary key' of the mapped table, from the
+    perspective of this :class:`_orm.Mapper`.
+
+    This list is against the selectable in
+    :attr:`_orm.Mapper.persist_selectable`.
+    In the case of inheriting mappers, some columns may be managed by a
+    superclass mapper.  For example, in the case of a
+    :class:`_expression.Join`, the
+    primary key is determined by all of the primary key columns across all
+    tables referenced by the :class:`_expression.Join`.
+
+    The list is also not necessarily the same as the primary key column
+    collection associated with the underlying tables; the :class:`_orm.Mapper`
+    features a ``primary_key`` argument that can override what the
+    :class:`_orm.Mapper` considers as primary key columns.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    class_manager: ClassManager[_O]
+    """The :class:`.ClassManager` which maintains event listeners
+    and class-bound descriptors for this :class:`_orm.Mapper`.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    single: bool
+    """Represent ``True`` if this :class:`_orm.Mapper` is a single table
+    inheritance mapper.
+
+    :attr:`_orm.Mapper.local_table` will be ``None`` if this flag is set.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    non_primary: bool
+    """Represent ``True`` if this :class:`_orm.Mapper` is a "non-primary"
+    mapper, e.g. a mapper that is used only to select rows but not for
+    persistence management.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    polymorphic_on: Optional[KeyedColumnElement[Any]]
+    """The :class:`_schema.Column` or SQL expression specified as the
+    ``polymorphic_on`` argument
+    for this :class:`_orm.Mapper`, within an inheritance scenario.
+
+    This attribute is normally a :class:`_schema.Column` instance but
+    may also be an expression, such as one derived from
+    :func:`.cast`.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    polymorphic_map: Dict[Any, Mapper[Any]]
+    """A mapping of "polymorphic identity" identifiers mapped to
+    :class:`_orm.Mapper` instances, within an inheritance scenario.
+
+    The identifiers can be of any type which is comparable to the
+    type of column represented by :attr:`_orm.Mapper.polymorphic_on`.
+
+    An inheritance chain of mappers will all reference the same
+    polymorphic map object.  The object is used to correlate incoming
+    result rows to target mappers.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    polymorphic_identity: Optional[Any]
+    """Represent an identifier which is matched against the
+    :attr:`_orm.Mapper.polymorphic_on` column during result row loading.
+
+    Used only with inheritance, this object can be of any type which is
+    comparable to the type of column represented by
+    :attr:`_orm.Mapper.polymorphic_on`.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    base_mapper: Mapper[Any]
+    """The base-most :class:`_orm.Mapper` in an inheritance chain.
+
+    In a non-inheriting scenario, this attribute will always be this
+    :class:`_orm.Mapper`.   In an inheritance scenario, it references
+    the :class:`_orm.Mapper` which is parent to all other :class:`_orm.Mapper`
+    objects in the inheritance chain.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    columns: ReadOnlyColumnCollection[str, Column[Any]]
+    """A collection of :class:`_schema.Column` or other scalar expression
+    objects maintained by this :class:`_orm.Mapper`.
+
+    The collection behaves the same as that of the ``c`` attribute on
+    any :class:`_schema.Table` object,
+    except that only those columns included in
+    this mapping are present, and are keyed based on the attribute name
+    defined in the mapping, not necessarily the ``key`` attribute of the
+    :class:`_schema.Column` itself.   Additionally, scalar expressions mapped
+    by :func:`.column_property` are also present here.
+
+    This is a *read only* attribute determined during mapper construction.
+    Behavior is undefined if directly modified.
+
+    """
+
+    c: ReadOnlyColumnCollection[str, Column[Any]]
+    """A synonym for :attr:`_orm.Mapper.columns`."""
+
+    @util.non_memoized_property
+    @util.deprecated("1.3", "Use .persist_selectable")
+    def mapped_table(self):
+        return self.persist_selectable
+
+    @util.memoized_property
+    def _path_registry(self) -> CachingEntityRegistry:
+        return PathRegistry.per_mapper(self)
+
+    def _configure_inheritance(self):
+        """Configure settings related to inheriting and/or inherited mappers
+        being present."""
+
+        # a set of all mappers which inherit from this one.
+        self._inheriting_mappers = util.WeakSequence()
+
+        if self.inherits:
+            if not issubclass(self.class_, self.inherits.class_):
+                raise sa_exc.ArgumentError(
+                    "Class '%s' does not inherit from '%s'"
+                    % (self.class_.__name__, self.inherits.class_.__name__)
+                )
+
+            self.dispatch._update(self.inherits.dispatch)
+
+            if self.non_primary != self.inherits.non_primary:
+                np = not self.non_primary and "primary" or "non-primary"
+                raise sa_exc.ArgumentError(
+                    "Inheritance of %s mapper for class '%s' is "
+                    "only allowed from a %s mapper"
+                    % (np, self.class_.__name__, np)
+                )
+
+            if self.single:
+                self.persist_selectable = self.inherits.persist_selectable
+            elif self.local_table is not self.inherits.local_table:
+                if self.concrete:
+                    self.persist_selectable = self.local_table
+                    for mapper in self.iterate_to_root():
+                        if mapper.polymorphic_on is not None:
+                            mapper._requires_row_aliasing = True
+                else:
+                    if self.inherit_condition is None:
+                        # figure out inherit condition from our table to the
+                        # immediate table of the inherited mapper, not its
+                        # full table which could pull in other stuff we don't
+                        # want (allows test/inheritance.InheritTest4 to pass)
+                        try:
+                            self.inherit_condition = sql_util.join_condition(
+                                self.inherits.local_table, self.local_table
+                            )
+                        except sa_exc.NoForeignKeysError as nfe:
+                            assert self.inherits.local_table is not None
+                            assert self.local_table is not None
+                            raise sa_exc.NoForeignKeysError(
+                                "Can't determine the inherit condition "
+                                "between inherited table '%s' and "
+                                "inheriting "
+                                "table '%s'; tables have no "
+                                "foreign key relationships established.  "
+                                "Please ensure the inheriting table has "
+                                "a foreign key relationship to the "
+                                "inherited "
+                                "table, or provide an "
+                                "'on clause' using "
+                                "the 'inherit_condition' mapper argument."
+                                % (
+                                    self.inherits.local_table.description,
+                                    self.local_table.description,
+                                )
+                            ) from nfe
+                        except sa_exc.AmbiguousForeignKeysError as afe:
+                            assert self.inherits.local_table is not None
+                            assert self.local_table is not None
+                            raise sa_exc.AmbiguousForeignKeysError(
+                                "Can't determine the inherit condition "
+                                "between inherited table '%s' and "
+                                "inheriting "
+                                "table '%s'; tables have more than one "
+                                "foreign key relationship established.  "
+                                "Please specify the 'on clause' using "
+                                "the 'inherit_condition' mapper argument."
+                                % (
+                                    self.inherits.local_table.description,
+                                    self.local_table.description,
+                                )
+                            ) from afe
+                    assert self.inherits.persist_selectable is not None
+                    self.persist_selectable = sql.join(
+                        self.inherits.persist_selectable,
+                        self.local_table,
+                        self.inherit_condition,
+                    )
+
+                    fks = util.to_set(self.inherit_foreign_keys)
+                    self._inherits_equated_pairs = sql_util.criterion_as_pairs(
+                        self.persist_selectable.onclause,
+                        consider_as_foreign_keys=fks,
+                    )
+            else:
+                self.persist_selectable = self.local_table
+
+            if self.polymorphic_identity is None:
+                self._identity_class = self.class_
+
+                if (
+                    not self.polymorphic_abstract
+                    and self.inherits.base_mapper.polymorphic_on is not None
+                ):
+                    util.warn(
+                        f"{self} does not indicate a 'polymorphic_identity', "
+                        "yet is part of an inheritance hierarchy that has a "
+                        f"'polymorphic_on' column of "
+                        f"'{self.inherits.base_mapper.polymorphic_on}'. "
+                        "If this is an intermediary class that should not be "
+                        "instantiated, the class may either be left unmapped, "
+                        "or may include the 'polymorphic_abstract=True' "
+                        "parameter in its Mapper arguments. To leave the "
+                        "class unmapped when using Declarative, set the "
+                        "'__abstract__ = True' attribute on the class."
+                    )
+            elif self.concrete:
+                self._identity_class = self.class_
+            else:
+                self._identity_class = self.inherits._identity_class
+
+            if self.version_id_col is None:
+                self.version_id_col = self.inherits.version_id_col
+                self.version_id_generator = self.inherits.version_id_generator
+            elif (
+                self.inherits.version_id_col is not None
+                and self.version_id_col is not self.inherits.version_id_col
+            ):
+                util.warn(
+                    "Inheriting version_id_col '%s' does not match inherited "
+                    "version_id_col '%s' and will not automatically populate "
+                    "the inherited versioning column. "
+                    "version_id_col should only be specified on "
+                    "the base-most mapper that includes versioning."
+                    % (
+                        self.version_id_col.description,
+                        self.inherits.version_id_col.description,
+                    )
+                )
+
+            self.polymorphic_map = self.inherits.polymorphic_map
+            self.batch = self.inherits.batch
+            self.inherits._inheriting_mappers.append(self)
+            self.base_mapper = self.inherits.base_mapper
+            self.passive_updates = self.inherits.passive_updates
+            self.passive_deletes = (
+                self.inherits.passive_deletes or self.passive_deletes
+            )
+            self._all_tables = self.inherits._all_tables
+
+            if self.polymorphic_identity is not None:
+                if self.polymorphic_identity in self.polymorphic_map:
+                    util.warn(
+                        "Reassigning polymorphic association for identity %r "
+                        "from %r to %r: Check for duplicate use of %r as "
+                        "value for polymorphic_identity."
+                        % (
+                            self.polymorphic_identity,
+                            self.polymorphic_map[self.polymorphic_identity],
+                            self,
+                            self.polymorphic_identity,
+                        )
+                    )
+                self.polymorphic_map[self.polymorphic_identity] = self
+
+            if self.polymorphic_load and self.concrete:
+                raise sa_exc.ArgumentError(
+                    "polymorphic_load is not currently supported "
+                    "with concrete table inheritance"
+                )
+            if self.polymorphic_load == "inline":
+                self.inherits._add_with_polymorphic_subclass(self)
+            elif self.polymorphic_load == "selectin":
+                pass
+            elif self.polymorphic_load is not None:
+                raise sa_exc.ArgumentError(
+                    "unknown argument for polymorphic_load: %r"
+                    % self.polymorphic_load
+                )
+
+        else:
+            self._all_tables = set()
+            self.base_mapper = self
+            assert self.local_table is not None
+            self.persist_selectable = self.local_table
+            if self.polymorphic_identity is not None:
+                self.polymorphic_map[self.polymorphic_identity] = self
+            self._identity_class = self.class_
+
+        if self.persist_selectable is None:
+            raise sa_exc.ArgumentError(
+                "Mapper '%s' does not have a persist_selectable specified."
+                % self
+            )
+
+    def _set_with_polymorphic(
+        self, with_polymorphic: Optional[_WithPolymorphicArg]
+    ) -> None:
+        if with_polymorphic == "*":
+            self.with_polymorphic = ("*", None)
+        elif isinstance(with_polymorphic, (tuple, list)):
+            if isinstance(with_polymorphic[0], (str, tuple, list)):
+                self.with_polymorphic = cast(
+                    """Tuple[
+                        Union[
+                            Literal["*"],
+                            Sequence[Union["Mapper[Any]", Type[Any]]],
+                        ],
+                        Optional["FromClause"],
+                    ]""",
+                    with_polymorphic,
+                )
+            else:
+                self.with_polymorphic = (with_polymorphic, None)
+        elif with_polymorphic is not None:
+            raise sa_exc.ArgumentError(
+                f"Invalid setting for with_polymorphic: {with_polymorphic!r}"
+            )
+        else:
+            self.with_polymorphic = None
+
+        if self.with_polymorphic and self.with_polymorphic[1] is not None:
+            self.with_polymorphic = (
+                self.with_polymorphic[0],
+                coercions.expect(
+                    roles.StrictFromClauseRole,
+                    self.with_polymorphic[1],
+                    allow_select=True,
+                ),
+            )
+
+        if self.configured:
+            self._expire_memoizations()
+
+    def _add_with_polymorphic_subclass(self, mapper):
+        subcl = mapper.class_
+        if self.with_polymorphic is None:
+            self._set_with_polymorphic((subcl,))
+        elif self.with_polymorphic[0] != "*":
+            assert isinstance(self.with_polymorphic[0], tuple)
+            self._set_with_polymorphic(
+                (self.with_polymorphic[0] + (subcl,), self.with_polymorphic[1])
+            )
+
+    def _set_concrete_base(self, mapper):
+        """Set the given :class:`_orm.Mapper` as the 'inherits' for this
+        :class:`_orm.Mapper`, assuming this :class:`_orm.Mapper` is concrete
+        and does not already have an inherits."""
+
+        assert self.concrete
+        assert not self.inherits
+        assert isinstance(mapper, Mapper)
+        self.inherits = mapper
+        self.inherits.polymorphic_map.update(self.polymorphic_map)
+        self.polymorphic_map = self.inherits.polymorphic_map
+        for mapper in self.iterate_to_root():
+            if mapper.polymorphic_on is not None:
+                mapper._requires_row_aliasing = True
+        self.batch = self.inherits.batch
+        for mp in self.self_and_descendants:
+            mp.base_mapper = self.inherits.base_mapper
+        self.inherits._inheriting_mappers.append(self)
+        self.passive_updates = self.inherits.passive_updates
+        self._all_tables = self.inherits._all_tables
+
+        for key, prop in mapper._props.items():
+            if key not in self._props and not self._should_exclude(
+                key, key, local=False, column=None
+            ):
+                self._adapt_inherited_property(key, prop, False)
+
+    def _set_polymorphic_on(self, polymorphic_on):
+        self.polymorphic_on = polymorphic_on
+        self._configure_polymorphic_setter(True)
+
+    def _configure_class_instrumentation(self):
+        """If this mapper is to be a primary mapper (i.e. the
+        non_primary flag is not set), associate this Mapper with the
+        given class and entity name.
+
+        Subsequent calls to ``class_mapper()`` for the ``class_`` / ``entity``
+        name combination will return this mapper.  Also decorate the
+        `__init__` method on the mapped class to include optional
+        auto-session attachment logic.
+
+        """
+
+        # we expect that declarative has applied the class manager
+        # already and set up a registry.  if this is None,
+        # this raises as of 2.0.
+        manager = attributes.opt_manager_of_class(self.class_)
+
+        if self.non_primary:
+            if not manager or not manager.is_mapped:
+                raise sa_exc.InvalidRequestError(
+                    "Class %s has no primary mapper configured.  Configure "
+                    "a primary mapper first before setting up a non primary "
+                    "Mapper." % self.class_
+                )
+            self.class_manager = manager
+
+            assert manager.registry is not None
+            self.registry = manager.registry
+            self._identity_class = manager.mapper._identity_class
+            manager.registry._add_non_primary_mapper(self)
+            return
+
+        if manager is None or not manager.registry:
+            raise sa_exc.InvalidRequestError(
+                "The _mapper() function and Mapper() constructor may not be "
+                "invoked directly outside of a declarative registry."
+                " Please use the sqlalchemy.orm.registry.map_imperatively() "
+                "function for a classical mapping."
+            )
+
+        self.dispatch.instrument_class(self, self.class_)
+
+        # this invokes the class_instrument event and sets up
+        # the __init__ method.  documented behavior is that this must
+        # occur after the instrument_class event above.
+        # yes two events with the same two words reversed and different APIs.
+        # :(
+
+        manager = instrumentation.register_class(
+            self.class_,
+            mapper=self,
+            expired_attribute_loader=util.partial(
+                loading.load_scalar_attributes, self
+            ),
+            # finalize flag means instrument the __init__ method
+            # and call the class_instrument event
+            finalize=True,
+        )
+
+        self.class_manager = manager
+
+        assert manager.registry is not None
+        self.registry = manager.registry
+
+        # The remaining members can be added by any mapper,
+        # e_name None or not.
+        if manager.mapper is None:
+            return
+
+        event.listen(manager, "init", _event_on_init, raw=True)
+
+        for key, method in util.iterate_attributes(self.class_):
+            if key == "__init__" and hasattr(method, "_sa_original_init"):
+                method = method._sa_original_init
+                if hasattr(method, "__func__"):
+                    method = method.__func__
+            if callable(method):
+                if hasattr(method, "__sa_reconstructor__"):
+                    self._reconstructor = method
+                    event.listen(manager, "load", _event_on_load, raw=True)
+                elif hasattr(method, "__sa_validators__"):
+                    validation_opts = method.__sa_validation_opts__
+                    for name in method.__sa_validators__:
+                        if name in self.validators:
+                            raise sa_exc.InvalidRequestError(
+                                "A validation function for mapped "
+                                "attribute %r on mapper %s already exists."
+                                % (name, self)
+                            )
+                        self.validators = self.validators.union(
+                            {name: (method, validation_opts)}
+                        )
+
+    def _set_dispose_flags(self) -> None:
+        self.configured = True
+        self._ready_for_configure = True
+        self._dispose_called = True
+
+        self.__dict__.pop("_configure_failed", None)
+
+    def _str_arg_to_mapped_col(self, argname: str, key: str) -> Column[Any]:
+        try:
+            prop = self._props[key]
+        except KeyError as err:
+            raise sa_exc.ArgumentError(
+                f"Can't determine {argname} column '{key}' - "
+                "no attribute is mapped to this name."
+            ) from err
+        try:
+            expr = prop.expression
+        except AttributeError as ae:
+            raise sa_exc.ArgumentError(
+                f"Can't determine {argname} column '{key}'; "
+                "property does not refer to a single mapped Column"
+            ) from ae
+        if not isinstance(expr, Column):
+            raise sa_exc.ArgumentError(
+                f"Can't determine {argname} column '{key}'; "
+                "property does not refer to a single "
+                "mapped Column"
+            )
+        return expr
+
+    def _configure_pks(self) -> None:
+        self.tables = sql_util.find_tables(self.persist_selectable)
+
+        self._all_tables.update(t for t in self.tables)
+
+        self._pks_by_table = {}
+        self._cols_by_table = {}
+
+        all_cols = util.column_set(
+            chain(*[col.proxy_set for col in self._columntoproperty])
+        )
+
+        pk_cols = util.column_set(c for c in all_cols if c.primary_key)
+
+        # identify primary key columns which are also mapped by this mapper.
+        for fc in set(self.tables).union([self.persist_selectable]):
+            if fc.primary_key and pk_cols.issuperset(fc.primary_key):
+                # ordering is important since it determines the ordering of
+                # mapper.primary_key (and therefore query.get())
+                self._pks_by_table[fc] = util.ordered_column_set(  # type: ignore  # noqa: E501
+                    fc.primary_key
+                ).intersection(
+                    pk_cols
+                )
+            self._cols_by_table[fc] = util.ordered_column_set(fc.c).intersection(  # type: ignore  # noqa: E501
+                all_cols
+            )
+
+        if self._primary_key_argument:
+            coerced_pk_arg = [
+                (
+                    self._str_arg_to_mapped_col("primary_key", c)
+                    if isinstance(c, str)
+                    else c
+                )
+                for c in (
+                    coercions.expect(
+                        roles.DDLConstraintColumnRole,
+                        coerce_pk,
+                        argname="primary_key",
+                    )
+                    for coerce_pk in self._primary_key_argument
+                )
+            ]
+        else:
+            coerced_pk_arg = None
+
+        # if explicit PK argument sent, add those columns to the
+        # primary key mappings
+        if coerced_pk_arg:
+            for k in coerced_pk_arg:
+                if k.table not in self._pks_by_table:
+                    self._pks_by_table[k.table] = util.OrderedSet()
+                self._pks_by_table[k.table].add(k)
+
+        # otherwise, see that we got a full PK for the mapped table
+        elif (
+            self.persist_selectable not in self._pks_by_table
+            or len(self._pks_by_table[self.persist_selectable]) == 0
+        ):
+            raise sa_exc.ArgumentError(
+                "Mapper %s could not assemble any primary "
+                "key columns for mapped table '%s'"
+                % (self, self.persist_selectable.description)
+            )
+        elif self.local_table not in self._pks_by_table and isinstance(
+            self.local_table, schema.Table
+        ):
+            util.warn(
+                "Could not assemble any primary "
+                "keys for locally mapped table '%s' - "
+                "no rows will be persisted in this Table."
+                % self.local_table.description
+            )
+
+        if (
+            self.inherits
+            and not self.concrete
+            and not self._primary_key_argument
+        ):
+            # if inheriting, the "primary key" for this mapper is
+            # that of the inheriting (unless concrete or explicit)
+            self.primary_key = self.inherits.primary_key
+        else:
+            # determine primary key from argument or persist_selectable pks
+            primary_key: Collection[ColumnElement[Any]]
+
+            if coerced_pk_arg:
+                primary_key = [
+                    cc if cc is not None else c
+                    for cc, c in (
+                        (self.persist_selectable.corresponding_column(c), c)
+                        for c in coerced_pk_arg
+                    )
+                ]
+            else:
+                # if heuristically determined PKs, reduce to the minimal set
+                # of columns by eliminating FK->PK pairs for a multi-table
+                # expression.   May over-reduce for some kinds of UNIONs
+                # / CTEs; use explicit PK argument for these special cases
+                primary_key = sql_util.reduce_columns(
+                    self._pks_by_table[self.persist_selectable],
+                    ignore_nonexistent_tables=True,
+                )
+
+            if len(primary_key) == 0:
+                raise sa_exc.ArgumentError(
+                    "Mapper %s could not assemble any primary "
+                    "key columns for mapped table '%s'"
+                    % (self, self.persist_selectable.description)
+                )
+
+            self.primary_key = tuple(primary_key)
+            self._log("Identified primary key columns: %s", primary_key)
+
+        # determine cols that aren't expressed within our tables; mark these
+        # as "read only" properties which are refreshed upon INSERT/UPDATE
+        self._readonly_props = {
+            self._columntoproperty[col]
+            for col in self._columntoproperty
+            if self._columntoproperty[col] not in self._identity_key_props
+            and (
+                not hasattr(col, "table")
+                or col.table not in self._cols_by_table
+            )
+        }
+
+    def _configure_properties(self) -> None:
+        self.columns = self.c = sql_base.ColumnCollection()  # type: ignore
+
+        # object attribute names mapped to MapperProperty objects
+        self._props = util.OrderedDict()
+
+        # table columns mapped to MapperProperty
+        self._columntoproperty = _ColumnMapping(self)
+
+        explicit_col_props_by_column: Dict[
+            KeyedColumnElement[Any], Tuple[str, ColumnProperty[Any]]
+        ] = {}
+        explicit_col_props_by_key: Dict[str, ColumnProperty[Any]] = {}
+
+        # step 1: go through properties that were explicitly passed
+        # in the properties dictionary.  For Columns that are local, put them
+        # aside in a separate collection we will reconcile with the Table
+        # that's given.  For other properties, set them up in _props now.
+        if self._init_properties:
+            for key, prop_arg in self._init_properties.items():
+                if not isinstance(prop_arg, MapperProperty):
+                    possible_col_prop = self._make_prop_from_column(
+                        key, prop_arg
+                    )
+                else:
+                    possible_col_prop = prop_arg
+
+                # issue #8705.  if the explicit property is actually a
+                # Column that is local to the local Table, don't set it up
+                # in ._props yet, integrate it into the order given within
+                # the Table.
+
+                _map_as_property_now = True
+                if isinstance(possible_col_prop, properties.ColumnProperty):
+                    for given_col in possible_col_prop.columns:
+                        if self.local_table.c.contains_column(given_col):
+                            _map_as_property_now = False
+                            explicit_col_props_by_key[key] = possible_col_prop
+                            explicit_col_props_by_column[given_col] = (
+                                key,
+                                possible_col_prop,
+                            )
+
+                if _map_as_property_now:
+                    self._configure_property(
+                        key,
+                        possible_col_prop,
+                        init=False,
+                    )
+
+        # step 2: pull properties from the inherited mapper.  reconcile
+        # columns with those which are explicit above.  for properties that
+        # are only in the inheriting mapper, set them up as local props
+        if self.inherits:
+            for key, inherited_prop in self.inherits._props.items():
+                if self._should_exclude(key, key, local=False, column=None):
+                    continue
+
+                incoming_prop = explicit_col_props_by_key.get(key)
+                if incoming_prop:
+                    new_prop = self._reconcile_prop_with_incoming_columns(
+                        key,
+                        inherited_prop,
+                        warn_only=False,
+                        incoming_prop=incoming_prop,
+                    )
+                    explicit_col_props_by_key[key] = new_prop
+
+                    for inc_col in incoming_prop.columns:
+                        explicit_col_props_by_column[inc_col] = (
+                            key,
+                            new_prop,
+                        )
+                elif key not in self._props:
+                    self._adapt_inherited_property(key, inherited_prop, False)
+
+        # step 3.  Iterate through all columns in the persist selectable.
+        # this includes not only columns in the local table / fromclause,
+        # but also those columns in the superclass table if we are joined
+        # inh or single inh mapper.  map these columns as well. additional
+        # reconciliation against inherited columns occurs here also.
+
+        for column in self.persist_selectable.columns:
+            if column in explicit_col_props_by_column:
+                # column was explicitly passed to properties; configure
+                # it now in the order in which it corresponds to the
+                # Table / selectable
+                key, prop = explicit_col_props_by_column[column]
+                self._configure_property(key, prop, init=False)
+                continue
+
+            elif column in self._columntoproperty:
+                continue
+
+            column_key = (self.column_prefix or "") + column.key
+            if self._should_exclude(
+                column.key,
+                column_key,
+                local=self.local_table.c.contains_column(column),
+                column=column,
+            ):
+                continue
+
+            # adjust the "key" used for this column to that
+            # of the inheriting mapper
+            for mapper in self.iterate_to_root():
+                if column in mapper._columntoproperty:
+                    column_key = mapper._columntoproperty[column].key
+
+            self._configure_property(
+                column_key,
+                column,
+                init=False,
+                setparent=True,
+            )
+
+    def _configure_polymorphic_setter(self, init=False):
+        """Configure an attribute on the mapper representing the
+        'polymorphic_on' column, if applicable, and not
+        already generated by _configure_properties (which is typical).
+
+        Also create a setter function which will assign this
+        attribute to the value of the 'polymorphic_identity'
+        upon instance construction, also if applicable.  This
+        routine will run when an instance is created.
+
+        """
+        setter = False
+        polymorphic_key: Optional[str] = None
+
+        if self.polymorphic_on is not None:
+            setter = True
+
+            if isinstance(self.polymorphic_on, str):
+                # polymorphic_on specified as a string - link
+                # it to mapped ColumnProperty
+                try:
+                    self.polymorphic_on = self._props[self.polymorphic_on]
+                except KeyError as err:
+                    raise sa_exc.ArgumentError(
+                        "Can't determine polymorphic_on "
+                        "value '%s' - no attribute is "
+                        "mapped to this name." % self.polymorphic_on
+                    ) from err
+
+            if self.polymorphic_on in self._columntoproperty:
+                # polymorphic_on is a column that is already mapped
+                # to a ColumnProperty
+                prop = self._columntoproperty[self.polymorphic_on]
+            elif isinstance(self.polymorphic_on, MapperProperty):
+                # polymorphic_on is directly a MapperProperty,
+                # ensure it's a ColumnProperty
+                if not isinstance(
+                    self.polymorphic_on, properties.ColumnProperty
+                ):
+                    raise sa_exc.ArgumentError(
+                        "Only direct column-mapped "
+                        "property or SQL expression "
+                        "can be passed for polymorphic_on"
+                    )
+                prop = self.polymorphic_on
+            else:
+                # polymorphic_on is a Column or SQL expression and
+                # doesn't appear to be mapped. this means it can be 1.
+                # only present in the with_polymorphic selectable or
+                # 2. a totally standalone SQL expression which we'd
+                # hope is compatible with this mapper's persist_selectable
+                col = self.persist_selectable.corresponding_column(
+                    self.polymorphic_on
+                )
+                if col is None:
+                    # polymorphic_on doesn't derive from any
+                    # column/expression isn't present in the mapped
+                    # table. we will make a "hidden" ColumnProperty
+                    # for it. Just check that if it's directly a
+                    # schema.Column and we have with_polymorphic, it's
+                    # likely a user error if the schema.Column isn't
+                    # represented somehow in either persist_selectable or
+                    # with_polymorphic.   Otherwise as of 0.7.4 we
+                    # just go with it and assume the user wants it
+                    # that way (i.e. a CASE statement)
+                    setter = False
+                    instrument = False
+                    col = self.polymorphic_on
+                    if isinstance(col, schema.Column) and (
+                        self.with_polymorphic is None
+                        or self.with_polymorphic[1] is None
+                        or self.with_polymorphic[1].corresponding_column(col)
+                        is None
+                    ):
+                        raise sa_exc.InvalidRequestError(
+                            "Could not map polymorphic_on column "
+                            "'%s' to the mapped table - polymorphic "
+                            "loads will not function properly"
+                            % col.description
+                        )
+                else:
+                    # column/expression that polymorphic_on derives from
+                    # is present in our mapped table
+                    # and is probably mapped, but polymorphic_on itself
+                    # is not.  This happens when
+                    # the polymorphic_on is only directly present in the
+                    # with_polymorphic selectable, as when use
+                    # polymorphic_union.
+                    # we'll make a separate ColumnProperty for it.
+                    instrument = True
+                key = getattr(col, "key", None)
+                if key:
+                    if self._should_exclude(key, key, False, col):
+                        raise sa_exc.InvalidRequestError(
+                            "Cannot exclude or override the "
+                            "discriminator column %r" % key
+                        )
+                else:
+                    self.polymorphic_on = col = col.label("_sa_polymorphic_on")
+                    key = col.key
+
+                prop = properties.ColumnProperty(col, _instrument=instrument)
+                self._configure_property(key, prop, init=init, setparent=True)
+
+            # the actual polymorphic_on should be the first public-facing
+            # column in the property
+            self.polymorphic_on = prop.columns[0]
+            polymorphic_key = prop.key
+        else:
+            # no polymorphic_on was set.
+            # check inheriting mappers for one.
+            for mapper in self.iterate_to_root():
+                # determine if polymorphic_on of the parent
+                # should be propagated here.   If the col
+                # is present in our mapped table, or if our mapped
+                # table is the same as the parent (i.e. single table
+                # inheritance), we can use it
+                if mapper.polymorphic_on is not None:
+                    if self.persist_selectable is mapper.persist_selectable:
+                        self.polymorphic_on = mapper.polymorphic_on
+                    else:
+                        self.polymorphic_on = (
+                            self.persist_selectable
+                        ).corresponding_column(mapper.polymorphic_on)
+                    # we can use the parent mapper's _set_polymorphic_identity
+                    # directly; it ensures the polymorphic_identity of the
+                    # instance's mapper is used so is portable to subclasses.
+                    if self.polymorphic_on is not None:
+                        self._set_polymorphic_identity = (
+                            mapper._set_polymorphic_identity
+                        )
+                        self._polymorphic_attr_key = (
+                            mapper._polymorphic_attr_key
+                        )
+                        self._validate_polymorphic_identity = (
+                            mapper._validate_polymorphic_identity
+                        )
+                    else:
+                        self._set_polymorphic_identity = None
+                        self._polymorphic_attr_key = None
+                    return
+
+        if self.polymorphic_abstract and self.polymorphic_on is None:
+            raise sa_exc.InvalidRequestError(
+                "The Mapper.polymorphic_abstract parameter may only be used "
+                "on a mapper hierarchy which includes the "
+                "Mapper.polymorphic_on parameter at the base of the hierarchy."
+            )
+
+        if setter:
+
+            def _set_polymorphic_identity(state):
+                dict_ = state.dict
+                # TODO: what happens if polymorphic_on column attribute name
+                # does not match .key?
+
+                polymorphic_identity = (
+                    state.manager.mapper.polymorphic_identity
+                )
+                if (
+                    polymorphic_identity is None
+                    and state.manager.mapper.polymorphic_abstract
+                ):
+                    raise sa_exc.InvalidRequestError(
+                        f"Can't instantiate class for {state.manager.mapper}; "
+                        "mapper is marked polymorphic_abstract=True"
+                    )
+
+                state.get_impl(polymorphic_key).set(
+                    state,
+                    dict_,
+                    polymorphic_identity,
+                    None,
+                )
+
+            self._polymorphic_attr_key = polymorphic_key
+
+            def _validate_polymorphic_identity(mapper, state, dict_):
+                if (
+                    polymorphic_key in dict_
+                    and dict_[polymorphic_key]
+                    not in mapper._acceptable_polymorphic_identities
+                ):
+                    util.warn_limited(
+                        "Flushing object %s with "
+                        "incompatible polymorphic identity %r; the "
+                        "object may not refresh and/or load correctly",
+                        (state_str(state), dict_[polymorphic_key]),
+                    )
+
+            self._set_polymorphic_identity = _set_polymorphic_identity
+            self._validate_polymorphic_identity = (
+                _validate_polymorphic_identity
+            )
+        else:
+            self._polymorphic_attr_key = None
+            self._set_polymorphic_identity = None
+
+    _validate_polymorphic_identity = None
+
+    @HasMemoized.memoized_attribute
+    def _version_id_prop(self):
+        if self.version_id_col is not None:
+            return self._columntoproperty[self.version_id_col]
+        else:
+            return None
+
+    @HasMemoized.memoized_attribute
+    def _acceptable_polymorphic_identities(self):
+        identities = set()
+
+        stack = deque([self])
+        while stack:
+            item = stack.popleft()
+            if item.persist_selectable is self.persist_selectable:
+                identities.add(item.polymorphic_identity)
+                stack.extend(item._inheriting_mappers)
+
+        return identities
+
+    @HasMemoized.memoized_attribute
+    def _prop_set(self):
+        return frozenset(self._props.values())
+
+    @util.preload_module("sqlalchemy.orm.descriptor_props")
+    def _adapt_inherited_property(self, key, prop, init):
+        descriptor_props = util.preloaded.orm_descriptor_props
+
+        if not self.concrete:
+            self._configure_property(key, prop, init=False, setparent=False)
+        elif key not in self._props:
+            # determine if the class implements this attribute; if not,
+            # or if it is implemented by the attribute that is handling the
+            # given superclass-mapped property, then we need to report that we
+            # can't use this at the instance level since we are a concrete
+            # mapper and we don't map this.  don't trip user-defined
+            # descriptors that might have side effects when invoked.
+            implementing_attribute = self.class_manager._get_class_attr_mro(
+                key, prop
+            )
+            if implementing_attribute is prop or (
+                isinstance(
+                    implementing_attribute, attributes.InstrumentedAttribute
+                )
+                and implementing_attribute._parententity is prop.parent
+            ):
+                self._configure_property(
+                    key,
+                    descriptor_props.ConcreteInheritedProperty(),
+                    init=init,
+                    setparent=True,
+                )
+
+    @util.preload_module("sqlalchemy.orm.descriptor_props")
+    def _configure_property(
+        self,
+        key: str,
+        prop_arg: Union[KeyedColumnElement[Any], MapperProperty[Any]],
+        *,
+        init: bool = True,
+        setparent: bool = True,
+        warn_for_existing: bool = False,
+    ) -> MapperProperty[Any]:
+        descriptor_props = util.preloaded.orm_descriptor_props
+        self._log(
+            "_configure_property(%s, %s)", key, prop_arg.__class__.__name__
+        )
+
+        if not isinstance(prop_arg, MapperProperty):
+            prop: MapperProperty[Any] = self._property_from_column(
+                key, prop_arg
+            )
+        else:
+            prop = prop_arg
+
+        if isinstance(prop, properties.ColumnProperty):
+            col = self.persist_selectable.corresponding_column(prop.columns[0])
+
+            # if the column is not present in the mapped table,
+            # test if a column has been added after the fact to the
+            # parent table (or their parent, etc.) [ticket:1570]
+            if col is None and self.inherits:
+                path = [self]
+                for m in self.inherits.iterate_to_root():
+                    col = m.local_table.corresponding_column(prop.columns[0])
+                    if col is not None:
+                        for m2 in path:
+                            m2.persist_selectable._refresh_for_new_column(col)
+                        col = self.persist_selectable.corresponding_column(
+                            prop.columns[0]
+                        )
+                        break
+                    path.append(m)
+
+            # subquery expression, column not present in the mapped
+            # selectable.
+            if col is None:
+                col = prop.columns[0]
+
+                # column is coming in after _readonly_props was
+                # initialized; check for 'readonly'
+                if hasattr(self, "_readonly_props") and (
+                    not hasattr(col, "table")
+                    or col.table not in self._cols_by_table
+                ):
+                    self._readonly_props.add(prop)
+
+            else:
+                # if column is coming in after _cols_by_table was
+                # initialized, ensure the col is in the right set
+                if (
+                    hasattr(self, "_cols_by_table")
+                    and col.table in self._cols_by_table
+                    and col not in self._cols_by_table[col.table]
+                ):
+                    self._cols_by_table[col.table].add(col)
+
+            # if this properties.ColumnProperty represents the "polymorphic
+            # discriminator" column, mark it.  We'll need this when rendering
+            # columns in SELECT statements.
+            if not hasattr(prop, "_is_polymorphic_discriminator"):
+                prop._is_polymorphic_discriminator = (
+                    col is self.polymorphic_on
+                    or prop.columns[0] is self.polymorphic_on
+                )
+
+            if isinstance(col, expression.Label):
+                # new in 1.4, get column property against expressions
+                # to be addressable in subqueries
+                col.key = col._tq_key_label = key
+
+            self.columns.add(col, key)
+
+            for col in prop.columns:
+                for proxy_col in col.proxy_set:
+                    self._columntoproperty[proxy_col] = prop
+
+        if getattr(prop, "key", key) != key:
+            util.warn(
+                f"ORM mapped property {self.class_.__name__}.{prop.key} being "
+                "assigned to attribute "
+                f"{key!r} is already associated with "
+                f"attribute {prop.key!r}. The attribute will be de-associated "
+                f"from {prop.key!r}."
+            )
+
+        prop.key = key
+
+        if setparent:
+            prop.set_parent(self, init)
+
+        if key in self._props and getattr(
+            self._props[key], "_mapped_by_synonym", False
+        ):
+            syn = self._props[key]._mapped_by_synonym
+            raise sa_exc.ArgumentError(
+                "Can't call map_column=True for synonym %r=%r, "
+                "a ColumnProperty already exists keyed to the name "
+                "%r for column %r" % (syn, key, key, syn)
+            )
+
+        # replacement cases
+
+        # case one: prop is replacing a prop that we have mapped.  this is
+        # independent of whatever might be in the actual class dictionary
+        if (
+            key in self._props
+            and not isinstance(
+                self._props[key], descriptor_props.ConcreteInheritedProperty
+            )
+            and not isinstance(prop, descriptor_props.SynonymProperty)
+        ):
+            if warn_for_existing:
+                util.warn_deprecated(
+                    f"User-placed attribute {self.class_.__name__}.{key} on "
+                    f"{self} is replacing an existing ORM-mapped attribute.  "
+                    "Behavior is not fully defined in this case.  This "
+                    "use is deprecated and will raise an error in a future "
+                    "release",
+                    "2.0",
+                )
+            oldprop = self._props[key]
+            self._path_registry.pop(oldprop, None)
+
+        # case two: prop is replacing an attribute on the class of some kind.
+        # we have to be more careful here since it's normal when using
+        # Declarative that all the "declared attributes" on the class
+        # get replaced.
+        elif (
+            warn_for_existing
+            and self.class_.__dict__.get(key, None) is not None
+            and not isinstance(prop, descriptor_props.SynonymProperty)
+            and not isinstance(
+                self._props.get(key, None),
+                descriptor_props.ConcreteInheritedProperty,
+            )
+        ):
+            util.warn_deprecated(
+                f"User-placed attribute {self.class_.__name__}.{key} on "
+                f"{self} is replacing an existing class-bound "
+                "attribute of the same name.  "
+                "Behavior is not fully defined in this case.  This "
+                "use is deprecated and will raise an error in a future "
+                "release",
+                "2.0",
+            )
+
+        self._props[key] = prop
+
+        if not self.non_primary:
+            prop.instrument_class(self)
+
+        for mapper in self._inheriting_mappers:
+            mapper._adapt_inherited_property(key, prop, init)
+
+        if init:
+            prop.init()
+            prop.post_instrument_class(self)
+
+        if self.configured:
+            self._expire_memoizations()
+
+        return prop
+
+    def _make_prop_from_column(
+        self,
+        key: str,
+        column: Union[
+            Sequence[KeyedColumnElement[Any]], KeyedColumnElement[Any]
+        ],
+    ) -> ColumnProperty[Any]:
+        columns = util.to_list(column)
+        mapped_column = []
+        for c in columns:
+            mc = self.persist_selectable.corresponding_column(c)
+            if mc is None:
+                mc = self.local_table.corresponding_column(c)
+                if mc is not None:
+                    # if the column is in the local table but not the
+                    # mapped table, this corresponds to adding a
+                    # column after the fact to the local table.
+                    # [ticket:1523]
+                    self.persist_selectable._refresh_for_new_column(mc)
+                mc = self.persist_selectable.corresponding_column(c)
+                if mc is None:
+                    raise sa_exc.ArgumentError(
+                        "When configuring property '%s' on %s, "
+                        "column '%s' is not represented in the mapper's "
+                        "table. Use the `column_property()` function to "
+                        "force this column to be mapped as a read-only "
+                        "attribute." % (key, self, c)
+                    )
+            mapped_column.append(mc)
+        return properties.ColumnProperty(*mapped_column)
+
+    def _reconcile_prop_with_incoming_columns(
+        self,
+        key: str,
+        existing_prop: MapperProperty[Any],
+        warn_only: bool,
+        incoming_prop: Optional[ColumnProperty[Any]] = None,
+        single_column: Optional[KeyedColumnElement[Any]] = None,
+    ) -> ColumnProperty[Any]:
+        if incoming_prop and (
+            self.concrete
+            or not isinstance(existing_prop, properties.ColumnProperty)
+        ):
+            return incoming_prop
+
+        existing_column = existing_prop.columns[0]
+
+        if incoming_prop and existing_column in incoming_prop.columns:
+            return incoming_prop
+
+        if incoming_prop is None:
+            assert single_column is not None
+            incoming_column = single_column
+            equated_pair_key = (existing_prop.columns[0], incoming_column)
+        else:
+            assert single_column is None
+            incoming_column = incoming_prop.columns[0]
+            equated_pair_key = (incoming_column, existing_prop.columns[0])
+
+        if (
+            (
+                not self._inherits_equated_pairs
+                or (equated_pair_key not in self._inherits_equated_pairs)
+            )
+            and not existing_column.shares_lineage(incoming_column)
+            and existing_column is not self.version_id_col
+            and incoming_column is not self.version_id_col
+        ):
+            msg = (
+                "Implicitly combining column %s with column "
+                "%s under attribute '%s'.  Please configure one "
+                "or more attributes for these same-named columns "
+                "explicitly."
+                % (
+                    existing_prop.columns[-1],
+                    incoming_column,
+                    key,
+                )
+            )
+            if warn_only:
+                util.warn(msg)
+            else:
+                raise sa_exc.InvalidRequestError(msg)
+
+        # existing properties.ColumnProperty from an inheriting
+        # mapper. make a copy and append our column to it
+        # breakpoint()
+        new_prop = existing_prop.copy()
+
+        new_prop.columns.insert(0, incoming_column)
+        self._log(
+            "inserting column to existing list "
+            "in properties.ColumnProperty %s",
+            key,
+        )
+        return new_prop  # type: ignore
+
+    @util.preload_module("sqlalchemy.orm.descriptor_props")
+    def _property_from_column(
+        self,
+        key: str,
+        column: KeyedColumnElement[Any],
+    ) -> ColumnProperty[Any]:
+        """generate/update a :class:`.ColumnProperty` given a
+        :class:`_schema.Column` or other SQL expression object."""
+
+        descriptor_props = util.preloaded.orm_descriptor_props
+
+        prop = self._props.get(key)
+
+        if isinstance(prop, properties.ColumnProperty):
+            return self._reconcile_prop_with_incoming_columns(
+                key,
+                prop,
+                single_column=column,
+                warn_only=prop.parent is not self,
+            )
+        elif prop is None or isinstance(
+            prop, descriptor_props.ConcreteInheritedProperty
+        ):
+            return self._make_prop_from_column(key, column)
+        else:
+            raise sa_exc.ArgumentError(
+                "WARNING: when configuring property '%s' on %s, "
+                "column '%s' conflicts with property '%r'. "
+                "To resolve this, map the column to the class under a "
+                "different name in the 'properties' dictionary.  Or, "
+                "to remove all awareness of the column entirely "
+                "(including its availability as a foreign key), "
+                "use the 'include_properties' or 'exclude_properties' "
+                "mapper arguments to control specifically which table "
+                "columns get mapped." % (key, self, column.key, prop)
+            )
+
+    @util.langhelpers.tag_method_for_warnings(
+        "This warning originated from the `configure_mappers()` process, "
+        "which was invoked automatically in response to a user-initiated "
+        "operation.",
+        sa_exc.SAWarning,
+    )
+    def _check_configure(self) -> None:
+        if self.registry._new_mappers:
+            _configure_registries({self.registry}, cascade=True)
+
+    def _post_configure_properties(self) -> None:
+        """Call the ``init()`` method on all ``MapperProperties``
+        attached to this mapper.
+
+        This is a deferred configuration step which is intended
+        to execute once all mappers have been constructed.
+
+        """
+
+        self._log("_post_configure_properties() started")
+        l = [(key, prop) for key, prop in self._props.items()]
+        for key, prop in l:
+            self._log("initialize prop %s", key)
+
+            if prop.parent is self and not prop._configure_started:
+                prop.init()
+
+            if prop._configure_finished:
+                prop.post_instrument_class(self)
+
+        self._log("_post_configure_properties() complete")
+        self.configured = True
+
+    def add_properties(self, dict_of_properties):
+        """Add the given dictionary of properties to this mapper,
+        using `add_property`.
+
+        """
+        for key, value in dict_of_properties.items():
+            self.add_property(key, value)
+
+    def add_property(
+        self, key: str, prop: Union[Column[Any], MapperProperty[Any]]
+    ) -> None:
+        """Add an individual MapperProperty to this mapper.
+
+        If the mapper has not been configured yet, just adds the
+        property to the initial properties dictionary sent to the
+        constructor.  If this Mapper has already been configured, then
+        the given MapperProperty is configured immediately.
+
+        """
+        prop = self._configure_property(
+            key, prop, init=self.configured, warn_for_existing=True
+        )
+        assert isinstance(prop, MapperProperty)
+        self._init_properties[key] = prop
+
+    def _expire_memoizations(self) -> None:
+        for mapper in self.iterate_to_root():
+            mapper._reset_memoizations()
+
+    @property
+    def _log_desc(self) -> str:
+        return (
+            "("
+            + self.class_.__name__
+            + "|"
+            + (
+                self.local_table is not None
+                and self.local_table.description
+                or str(self.local_table)
+            )
+            + (self.non_primary and "|non-primary" or "")
+            + ")"
+        )
+
+    def _log(self, msg: str, *args: Any) -> None:
+        self.logger.info("%s " + msg, *((self._log_desc,) + args))
+
+    def _log_debug(self, msg: str, *args: Any) -> None:
+        self.logger.debug("%s " + msg, *((self._log_desc,) + args))
+
+    def __repr__(self) -> str:
+        return "<Mapper at 0x%x; %s>" % (id(self), self.class_.__name__)
+
+    def __str__(self) -> str:
+        return "Mapper[%s%s(%s)]" % (
+            self.class_.__name__,
+            self.non_primary and " (non-primary)" or "",
+            (
+                self.local_table.description
+                if self.local_table is not None
+                else self.persist_selectable.description
+            ),
+        )
+
+    def _is_orphan(self, state: InstanceState[_O]) -> bool:
+        orphan_possible = False
+        for mapper in self.iterate_to_root():
+            for key, cls in mapper._delete_orphans:
+                orphan_possible = True
+
+                has_parent = attributes.manager_of_class(cls).has_parent(
+                    state, key, optimistic=state.has_identity
+                )
+
+                if self.legacy_is_orphan and has_parent:
+                    return False
+                elif not self.legacy_is_orphan and not has_parent:
+                    return True
+
+        if self.legacy_is_orphan:
+            return orphan_possible
+        else:
+            return False
+
+    def has_property(self, key: str) -> bool:
+        return key in self._props
+
+    def get_property(
+        self, key: str, _configure_mappers: bool = False
+    ) -> MapperProperty[Any]:
+        """return a MapperProperty associated with the given key."""
+
+        if _configure_mappers:
+            self._check_configure()
+
+        try:
+            return self._props[key]
+        except KeyError as err:
+            raise sa_exc.InvalidRequestError(
+                f"Mapper '{self}' has no property '{key}'.  If this property "
+                "was indicated from other mappers or configure events, ensure "
+                "registry.configure() has been called."
+            ) from err
+
+    def get_property_by_column(
+        self, column: ColumnElement[_T]
+    ) -> MapperProperty[_T]:
+        """Given a :class:`_schema.Column` object, return the
+        :class:`.MapperProperty` which maps this column."""
+
+        return self._columntoproperty[column]
+
+    @property
+    def iterate_properties(self):
+        """return an iterator of all MapperProperty objects."""
+
+        return iter(self._props.values())
+
+    def _mappers_from_spec(
+        self, spec: Any, selectable: Optional[FromClause]
+    ) -> Sequence[Mapper[Any]]:
+        """given a with_polymorphic() argument, return the set of mappers it
+        represents.
+
+        Trims the list of mappers to just those represented within the given
+        selectable, if present. This helps some more legacy-ish mappings.
+
+        """
+        if spec == "*":
+            mappers = list(self.self_and_descendants)
+        elif spec:
+            mapper_set = set()
+            for m in util.to_list(spec):
+                m = _class_to_mapper(m)
+                if not m.isa(self):
+                    raise sa_exc.InvalidRequestError(
+                        "%r does not inherit from %r" % (m, self)
+                    )
+
+                if selectable is None:
+                    mapper_set.update(m.iterate_to_root())
+                else:
+                    mapper_set.add(m)
+            mappers = [m for m in self.self_and_descendants if m in mapper_set]
+        else:
+            mappers = []
+
+        if selectable is not None:
+            tables = set(
+                sql_util.find_tables(selectable, include_aliases=True)
+            )
+            mappers = [m for m in mappers if m.local_table in tables]
+        return mappers
+
+    def _selectable_from_mappers(
+        self, mappers: Iterable[Mapper[Any]], innerjoin: bool
+    ) -> FromClause:
+        """given a list of mappers (assumed to be within this mapper's
+        inheritance hierarchy), construct an outerjoin amongst those mapper's
+        mapped tables.
+
+        """
+        from_obj = self.persist_selectable
+        for m in mappers:
+            if m is self:
+                continue
+            if m.concrete:
+                raise sa_exc.InvalidRequestError(
+                    "'with_polymorphic()' requires 'selectable' argument "
+                    "when concrete-inheriting mappers are used."
+                )
+            elif not m.single:
+                if innerjoin:
+                    from_obj = from_obj.join(
+                        m.local_table, m.inherit_condition
+                    )
+                else:
+                    from_obj = from_obj.outerjoin(
+                        m.local_table, m.inherit_condition
+                    )
+
+        return from_obj
+
+    @HasMemoized.memoized_attribute
+    def _version_id_has_server_side_value(self) -> bool:
+        vid_col = self.version_id_col
+
+        if vid_col is None:
+            return False
+
+        elif not isinstance(vid_col, Column):
+            return True
+        else:
+            return vid_col.server_default is not None or (
+                vid_col.default is not None
+                and (
+                    not vid_col.default.is_scalar
+                    and not vid_col.default.is_callable
+                )
+            )
+
+    @HasMemoized.memoized_attribute
+    def _single_table_criterion(self):
+        if self.single and self.inherits and self.polymorphic_on is not None:
+            return self.polymorphic_on._annotate(
+                {"parententity": self, "parentmapper": self}
+            ).in_(
+                [
+                    m.polymorphic_identity
+                    for m in self.self_and_descendants
+                    if not m.polymorphic_abstract
+                ]
+            )
+        else:
+            return None
+
+    @HasMemoized.memoized_attribute
+    def _has_aliased_polymorphic_fromclause(self):
+        """return True if with_polymorphic[1] is an aliased fromclause,
+        like a subquery.
+
+        As of #8168, polymorphic adaption with ORMAdapter is used only
+        if this is present.
+
+        """
+        return self.with_polymorphic and isinstance(
+            self.with_polymorphic[1],
+            expression.AliasedReturnsRows,
+        )
+
+    @HasMemoized.memoized_attribute
+    def _should_select_with_poly_adapter(self):
+        """determine if _MapperEntity or _ORMColumnEntity will need to use
+        polymorphic adaption when setting up a SELECT as well as fetching
+        rows for mapped classes and subclasses against this Mapper.
+
+        moved here from context.py for #8456 to generalize the ruleset
+        for this condition.
+
+        """
+
+        # this has been simplified as of #8456.
+        # rule is: if we have a with_polymorphic or a concrete-style
+        # polymorphic selectable, *or* if the base mapper has either of those,
+        # we turn on the adaption thing.  if not, we do *no* adaption.
+        #
+        # (UPDATE for #8168: the above comment was not accurate, as we were
+        # still saying "do polymorphic" if we were using an auto-generated
+        # flattened JOIN for with_polymorphic.)
+        #
+        # this splits the behavior among the "regular" joined inheritance
+        # and single inheritance mappers, vs. the "weird / difficult"
+        # concrete and joined inh mappings that use a with_polymorphic of
+        # some kind or polymorphic_union.
+        #
+        # note we have some tests in test_polymorphic_rel that query against
+        # a subclass, then refer to the superclass that has a with_polymorphic
+        # on it (such as test_join_from_polymorphic_explicit_aliased_three).
+        # these tests actually adapt the polymorphic selectable (like, the
+        # UNION or the SELECT subquery with JOIN in it) to be just the simple
+        # subclass table.   Hence even if we are a "plain" inheriting mapper
+        # but our base has a wpoly on it, we turn on adaption.  This is a
+        # legacy case we should probably disable.
+        #
+        #
+        # UPDATE: simplified way more as of #8168.   polymorphic adaption
+        # is turned off even if with_polymorphic is set, as long as there
+        # is no user-defined aliased selectable / subquery configured.
+        # this scales back the use of polymorphic adaption in practice
+        # to basically no cases except for concrete inheritance with a
+        # polymorphic base class.
+        #
+        return (
+            self._has_aliased_polymorphic_fromclause
+            or self._requires_row_aliasing
+            or (self.base_mapper._has_aliased_polymorphic_fromclause)
+            or self.base_mapper._requires_row_aliasing
+        )
+
+    @HasMemoized.memoized_attribute
+    def _with_polymorphic_mappers(self) -> Sequence[Mapper[Any]]:
+        self._check_configure()
+
+        if not self.with_polymorphic:
+            return []
+        return self._mappers_from_spec(*self.with_polymorphic)
+
+    @HasMemoized.memoized_attribute
+    def _post_inspect(self):
+        """This hook is invoked by attribute inspection.
+
+        E.g. when Query calls:
+
+            coercions.expect(roles.ColumnsClauseRole, ent, keep_inspect=True)
+
+        This allows the inspection process run a configure mappers hook.
+
+        """
+        self._check_configure()
+
+    @HasMemoized_ro_memoized_attribute
+    def _with_polymorphic_selectable(self) -> FromClause:
+        if not self.with_polymorphic:
+            return self.persist_selectable
+
+        spec, selectable = self.with_polymorphic
+        if selectable is not None:
+            return selectable
+        else:
+            return self._selectable_from_mappers(
+                self._mappers_from_spec(spec, selectable), False
+            )
+
+    with_polymorphic_mappers = _with_polymorphic_mappers
+    """The list of :class:`_orm.Mapper` objects included in the
+    default "polymorphic" query.
+
+    """
+
+    @HasMemoized_ro_memoized_attribute
+    def _insert_cols_evaluating_none(self):
+        return {
+            table: frozenset(
+                col for col in columns if col.type.should_evaluate_none
+            )
+            for table, columns in self._cols_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _insert_cols_as_none(self):
+        return {
+            table: frozenset(
+                col.key
+                for col in columns
+                if not col.primary_key
+                and not col.server_default
+                and not col.default
+                and not col.type.should_evaluate_none
+            )
+            for table, columns in self._cols_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _propkey_to_col(self):
+        return {
+            table: {self._columntoproperty[col].key: col for col in columns}
+            for table, columns in self._cols_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _pk_keys_by_table(self):
+        return {
+            table: frozenset([col.key for col in pks])
+            for table, pks in self._pks_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _pk_attr_keys_by_table(self):
+        return {
+            table: frozenset([self._columntoproperty[col].key for col in pks])
+            for table, pks in self._pks_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _server_default_cols(
+        self,
+    ) -> Mapping[FromClause, FrozenSet[Column[Any]]]:
+        return {
+            table: frozenset(
+                [
+                    col
+                    for col in cast("Iterable[Column[Any]]", columns)
+                    if col.server_default is not None
+                    or (
+                        col.default is not None
+                        and col.default.is_clause_element
+                    )
+                ]
+            )
+            for table, columns in self._cols_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _server_onupdate_default_cols(
+        self,
+    ) -> Mapping[FromClause, FrozenSet[Column[Any]]]:
+        return {
+            table: frozenset(
+                [
+                    col
+                    for col in cast("Iterable[Column[Any]]", columns)
+                    if col.server_onupdate is not None
+                    or (
+                        col.onupdate is not None
+                        and col.onupdate.is_clause_element
+                    )
+                ]
+            )
+            for table, columns in self._cols_by_table.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _server_default_col_keys(self) -> Mapping[FromClause, FrozenSet[str]]:
+        return {
+            table: frozenset(col.key for col in cols if col.key is not None)
+            for table, cols in self._server_default_cols.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _server_onupdate_default_col_keys(
+        self,
+    ) -> Mapping[FromClause, FrozenSet[str]]:
+        return {
+            table: frozenset(col.key for col in cols if col.key is not None)
+            for table, cols in self._server_onupdate_default_cols.items()
+        }
+
+    @HasMemoized.memoized_attribute
+    def _server_default_plus_onupdate_propkeys(self) -> Set[str]:
+        result: Set[str] = set()
+
+        col_to_property = self._columntoproperty
+        for table, columns in self._server_default_cols.items():
+            result.update(
+                col_to_property[col].key
+                for col in columns.intersection(col_to_property)
+            )
+        for table, columns in self._server_onupdate_default_cols.items():
+            result.update(
+                col_to_property[col].key
+                for col in columns.intersection(col_to_property)
+            )
+        return result
+
+    @HasMemoized.memoized_instancemethod
+    def __clause_element__(self):
+        annotations: Dict[str, Any] = {
+            "entity_namespace": self,
+            "parententity": self,
+            "parentmapper": self,
+        }
+        if self.persist_selectable is not self.local_table:
+            # joined table inheritance, with polymorphic selectable,
+            # etc.
+            annotations["dml_table"] = self.local_table._annotate(
+                {
+                    "entity_namespace": self,
+                    "parententity": self,
+                    "parentmapper": self,
+                }
+            )._set_propagate_attrs(
+                {"compile_state_plugin": "orm", "plugin_subject": self}
+            )
+
+        return self.selectable._annotate(annotations)._set_propagate_attrs(
+            {"compile_state_plugin": "orm", "plugin_subject": self}
+        )
+
+    @util.memoized_property
+    def select_identity_token(self):
+        return (
+            expression.null()
+            ._annotate(
+                {
+                    "entity_namespace": self,
+                    "parententity": self,
+                    "parentmapper": self,
+                    "identity_token": True,
+                }
+            )
+            ._set_propagate_attrs(
+                {"compile_state_plugin": "orm", "plugin_subject": self}
+            )
+        )
+
+    @property
+    def selectable(self) -> FromClause:
+        """The :class:`_schema.FromClause` construct this
+        :class:`_orm.Mapper` selects from by default.
+
+        Normally, this is equivalent to :attr:`.persist_selectable`, unless
+        the ``with_polymorphic`` feature is in use, in which case the
+        full "polymorphic" selectable is returned.
+
+        """
+        return self._with_polymorphic_selectable
+
+    def _with_polymorphic_args(
+        self,
+        spec: Any = None,
+        selectable: Union[Literal[False, None], FromClause] = False,
+        innerjoin: bool = False,
+    ) -> Tuple[Sequence[Mapper[Any]], FromClause]:
+        if selectable not in (None, False):
+            selectable = coercions.expect(
+                roles.StrictFromClauseRole, selectable, allow_select=True
+            )
+
+        if self.with_polymorphic:
+            if not spec:
+                spec = self.with_polymorphic[0]
+            if selectable is False:
+                selectable = self.with_polymorphic[1]
+        elif selectable is False:
+            selectable = None
+        mappers = self._mappers_from_spec(spec, selectable)
+        if selectable is not None:
+            return mappers, selectable
+        else:
+            return mappers, self._selectable_from_mappers(mappers, innerjoin)
+
+    @HasMemoized.memoized_attribute
+    def _polymorphic_properties(self):
+        return list(
+            self._iterate_polymorphic_properties(
+                self._with_polymorphic_mappers
+            )
+        )
+
+    @property
+    def _all_column_expressions(self):
+        poly_properties = self._polymorphic_properties
+        adapter = self._polymorphic_adapter
+
+        return [
+            adapter.columns[c] if adapter else c
+            for prop in poly_properties
+            if isinstance(prop, properties.ColumnProperty)
+            and prop._renders_in_subqueries
+            for c in prop.columns
+        ]
+
+    def _columns_plus_keys(self, polymorphic_mappers=()):
+        if polymorphic_mappers:
+            poly_properties = self._iterate_polymorphic_properties(
+                polymorphic_mappers
+            )
+        else:
+            poly_properties = self._polymorphic_properties
+
+        return [
+            (prop.key, prop.columns[0])
+            for prop in poly_properties
+            if isinstance(prop, properties.ColumnProperty)
+        ]
+
+    @HasMemoized.memoized_attribute
+    def _polymorphic_adapter(self) -> Optional[orm_util.ORMAdapter]:
+        if self._has_aliased_polymorphic_fromclause:
+            return orm_util.ORMAdapter(
+                orm_util._TraceAdaptRole.MAPPER_POLYMORPHIC_ADAPTER,
+                self,
+                selectable=self.selectable,
+                equivalents=self._equivalent_columns,
+                limit_on_entity=False,
+            )
+        else:
+            return None
+
+    def _iterate_polymorphic_properties(self, mappers=None):
+        """Return an iterator of MapperProperty objects which will render into
+        a SELECT."""
+        if mappers is None:
+            mappers = self._with_polymorphic_mappers
+
+        if not mappers:
+            for c in self.iterate_properties:
+                yield c
+        else:
+            # in the polymorphic case, filter out discriminator columns
+            # from other mappers, as these are sometimes dependent on that
+            # mapper's polymorphic selectable (which we don't want rendered)
+            for c in util.unique_list(
+                chain(
+                    *[
+                        list(mapper.iterate_properties)
+                        for mapper in [self] + mappers
+                    ]
+                )
+            ):
+                if getattr(c, "_is_polymorphic_discriminator", False) and (
+                    self.polymorphic_on is None
+                    or c.columns[0] is not self.polymorphic_on
+                ):
+                    continue
+                yield c
+
+    @HasMemoized.memoized_attribute
+    def attrs(self) -> util.ReadOnlyProperties[MapperProperty[Any]]:
+        """A namespace of all :class:`.MapperProperty` objects
+        associated this mapper.
+
+        This is an object that provides each property based on
+        its key name.  For instance, the mapper for a
+        ``User`` class which has ``User.name`` attribute would
+        provide ``mapper.attrs.name``, which would be the
+        :class:`.ColumnProperty` representing the ``name``
+        column.   The namespace object can also be iterated,
+        which would yield each :class:`.MapperProperty`.
+
+        :class:`_orm.Mapper` has several pre-filtered views
+        of this attribute which limit the types of properties
+        returned, including :attr:`.synonyms`, :attr:`.column_attrs`,
+        :attr:`.relationships`, and :attr:`.composites`.
+
+        .. warning::
+
+            The :attr:`_orm.Mapper.attrs` accessor namespace is an
+            instance of :class:`.OrderedProperties`.  This is
+            a dictionary-like object which includes a small number of
+            named methods such as :meth:`.OrderedProperties.items`
+            and :meth:`.OrderedProperties.values`.  When
+            accessing attributes dynamically, favor using the dict-access
+            scheme, e.g. ``mapper.attrs[somename]`` over
+            ``getattr(mapper.attrs, somename)`` to avoid name collisions.
+
+        .. seealso::
+
+            :attr:`_orm.Mapper.all_orm_descriptors`
+
+        """
+
+        self._check_configure()
+        return util.ReadOnlyProperties(self._props)
+
+    @HasMemoized.memoized_attribute
+    def all_orm_descriptors(self) -> util.ReadOnlyProperties[InspectionAttr]:
+        """A namespace of all :class:`.InspectionAttr` attributes associated
+        with the mapped class.
+
+        These attributes are in all cases Python :term:`descriptors`
+        associated with the mapped class or its superclasses.
+
+        This namespace includes attributes that are mapped to the class
+        as well as attributes declared by extension modules.
+        It includes any Python descriptor type that inherits from
+        :class:`.InspectionAttr`.  This includes
+        :class:`.QueryableAttribute`, as well as extension types such as
+        :class:`.hybrid_property`, :class:`.hybrid_method` and
+        :class:`.AssociationProxy`.
+
+        To distinguish between mapped attributes and extension attributes,
+        the attribute :attr:`.InspectionAttr.extension_type` will refer
+        to a constant that distinguishes between different extension types.
+
+        The sorting of the attributes is based on the following rules:
+
+        1. Iterate through the class and its superclasses in order from
+           subclass to superclass (i.e. iterate through ``cls.__mro__``)
+
+        2. For each class, yield the attributes in the order in which they
+           appear in ``__dict__``, with the exception of those in step
+           3 below.  In Python 3.6 and above this ordering will be the
+           same as that of the class' construction, with the exception
+           of attributes that were added after the fact by the application
+           or the mapper.
+
+        3. If a certain attribute key is also in the superclass ``__dict__``,
+           then it's included in the iteration for that class, and not the
+           class in which it first appeared.
+
+        The above process produces an ordering that is deterministic in terms
+        of the order in which attributes were assigned to the class.
+
+        .. versionchanged:: 1.3.19 ensured deterministic ordering for
+           :meth:`_orm.Mapper.all_orm_descriptors`.
+
+        When dealing with a :class:`.QueryableAttribute`, the
+        :attr:`.QueryableAttribute.property` attribute refers to the
+        :class:`.MapperProperty` property, which is what you get when
+        referring to the collection of mapped properties via
+        :attr:`_orm.Mapper.attrs`.
+
+        .. warning::
+
+            The :attr:`_orm.Mapper.all_orm_descriptors`
+            accessor namespace is an
+            instance of :class:`.OrderedProperties`.  This is
+            a dictionary-like object which includes a small number of
+            named methods such as :meth:`.OrderedProperties.items`
+            and :meth:`.OrderedProperties.values`.  When
+            accessing attributes dynamically, favor using the dict-access
+            scheme, e.g. ``mapper.all_orm_descriptors[somename]`` over
+            ``getattr(mapper.all_orm_descriptors, somename)`` to avoid name
+            collisions.
+
+        .. seealso::
+
+            :attr:`_orm.Mapper.attrs`
+
+        """
+        return util.ReadOnlyProperties(
+            dict(self.class_manager._all_sqla_attributes())
+        )
+
+    @HasMemoized.memoized_attribute
+    @util.preload_module("sqlalchemy.orm.descriptor_props")
+    def _pk_synonyms(self) -> Dict[str, str]:
+        """return a dictionary of {syn_attribute_name: pk_attr_name} for
+        all synonyms that refer to primary key columns
+
+        """
+        descriptor_props = util.preloaded.orm_descriptor_props
+
+        pk_keys = {prop.key for prop in self._identity_key_props}
+
+        return {
+            syn.key: syn.name
+            for k, syn in self._props.items()
+            if isinstance(syn, descriptor_props.SynonymProperty)
+            and syn.name in pk_keys
+        }
+
+    @HasMemoized.memoized_attribute
+    @util.preload_module("sqlalchemy.orm.descriptor_props")
+    def synonyms(self) -> util.ReadOnlyProperties[SynonymProperty[Any]]:
+        """Return a namespace of all :class:`.Synonym`
+        properties maintained by this :class:`_orm.Mapper`.
+
+        .. seealso::
+
+            :attr:`_orm.Mapper.attrs` - namespace of all
+            :class:`.MapperProperty`
+            objects.
+
+        """
+        descriptor_props = util.preloaded.orm_descriptor_props
+
+        return self._filter_properties(descriptor_props.SynonymProperty)
+
+    @property
+    def entity_namespace(self):
+        return self.class_
+
+    @HasMemoized.memoized_attribute
+    def column_attrs(self) -> util.ReadOnlyProperties[ColumnProperty[Any]]:
+        """Return a namespace of all :class:`.ColumnProperty`
+        properties maintained by this :class:`_orm.Mapper`.
+
+        .. seealso::
+
+            :attr:`_orm.Mapper.attrs` - namespace of all
+            :class:`.MapperProperty`
+            objects.
+
+        """
+        return self._filter_properties(properties.ColumnProperty)
+
+    @HasMemoized.memoized_attribute
+    @util.preload_module("sqlalchemy.orm.relationships")
+    def relationships(
+        self,
+    ) -> util.ReadOnlyProperties[RelationshipProperty[Any]]:
+        """A namespace of all :class:`.Relationship` properties
+        maintained by this :class:`_orm.Mapper`.
+
+        .. warning::
+
+            the :attr:`_orm.Mapper.relationships` accessor namespace is an
+            instance of :class:`.OrderedProperties`.  This is
+            a dictionary-like object which includes a small number of
+            named methods such as :meth:`.OrderedProperties.items`
+            and :meth:`.OrderedProperties.values`.  When
+            accessing attributes dynamically, favor using the dict-access
+            scheme, e.g. ``mapper.relationships[somename]`` over
+            ``getattr(mapper.relationships, somename)`` to avoid name
+            collisions.
+
+        .. seealso::
+
+            :attr:`_orm.Mapper.attrs` - namespace of all
+            :class:`.MapperProperty`
+            objects.
+
+        """
+        return self._filter_properties(
+            util.preloaded.orm_relationships.RelationshipProperty
+        )
+
+    @HasMemoized.memoized_attribute
+    @util.preload_module("sqlalchemy.orm.descriptor_props")
+    def composites(self) -> util.ReadOnlyProperties[CompositeProperty[Any]]:
+        """Return a namespace of all :class:`.Composite`
+        properties maintained by this :class:`_orm.Mapper`.
+
+        .. seealso::
+
+            :attr:`_orm.Mapper.attrs` - namespace of all
+            :class:`.MapperProperty`
+            objects.
+
+        """
+        return self._filter_properties(
+            util.preloaded.orm_descriptor_props.CompositeProperty
+        )
+
+    def _filter_properties(
+        self, type_: Type[_MP]
+    ) -> util.ReadOnlyProperties[_MP]:
+        self._check_configure()
+        return util.ReadOnlyProperties(
+            util.OrderedDict(
+                (k, v) for k, v in self._props.items() if isinstance(v, type_)
+            )
+        )
+
+    @HasMemoized.memoized_attribute
+    def _get_clause(self):
+        """create a "get clause" based on the primary key.  this is used
+        by query.get() and many-to-one lazyloads to load this item
+        by primary key.
+
+        """
+        params = [
+            (
+                primary_key,
+                sql.bindparam("pk_%d" % idx, type_=primary_key.type),
+            )
+            for idx, primary_key in enumerate(self.primary_key, 1)
+        ]
+        return (
+            sql.and_(*[k == v for (k, v) in params]),
+            util.column_dict(params),
+        )
+
+    @HasMemoized.memoized_attribute
+    def _equivalent_columns(self) -> _EquivalentColumnMap:
+        """Create a map of all equivalent columns, based on
+        the determination of column pairs that are equated to
+        one another based on inherit condition.  This is designed
+        to work with the queries that util.polymorphic_union
+        comes up with, which often don't include the columns from
+        the base table directly (including the subclass table columns
+        only).
+
+        The resulting structure is a dictionary of columns mapped
+        to lists of equivalent columns, e.g.::
+
+            {tablea.col1: {tableb.col1, tablec.col1}, tablea.col2: {tabled.col2}}
+
+        """  # noqa: E501
+        result: _EquivalentColumnMap = {}
+
+        def visit_binary(binary):
+            if binary.operator == operators.eq:
+                if binary.left in result:
+                    result[binary.left].add(binary.right)
+                else:
+                    result[binary.left] = {binary.right}
+                if binary.right in result:
+                    result[binary.right].add(binary.left)
+                else:
+                    result[binary.right] = {binary.left}
+
+        for mapper in self.base_mapper.self_and_descendants:
+            if mapper.inherit_condition is not None:
+                visitors.traverse(
+                    mapper.inherit_condition, {}, {"binary": visit_binary}
+                )
+
+        return result
+
+    def _is_userland_descriptor(self, assigned_name: str, obj: Any) -> bool:
+        if isinstance(
+            obj,
+            (
+                _MappedAttribute,
+                instrumentation.ClassManager,
+                expression.ColumnElement,
+            ),
+        ):
+            return False
+        else:
+            return assigned_name not in self._dataclass_fields
+
+    @HasMemoized.memoized_attribute
+    def _dataclass_fields(self):
+        return [f.name for f in util.dataclass_fields(self.class_)]
+
+    def _should_exclude(self, name, assigned_name, local, column):
+        """determine whether a particular property should be implicitly
+        present on the class.
+
+        This occurs when properties are propagated from an inherited class, or
+        are applied from the columns present in the mapped table.
+
+        """
+
+        if column is not None and sql_base._never_select_column(column):
+            return True
+
+        # check for class-bound attributes and/or descriptors,
+        # either local or from an inherited class
+        # ignore dataclass field default values
+        if local:
+            if self.class_.__dict__.get(
+                assigned_name, None
+            ) is not None and self._is_userland_descriptor(
+                assigned_name, self.class_.__dict__[assigned_name]
+            ):
+                return True
+        else:
+            attr = self.class_manager._get_class_attr_mro(assigned_name, None)
+            if attr is not None and self._is_userland_descriptor(
+                assigned_name, attr
+            ):
+                return True
+
+        if (
+            self.include_properties is not None
+            and name not in self.include_properties
+            and (column is None or column not in self.include_properties)
+        ):
+            self._log("not including property %s" % (name))
+            return True
+
+        if self.exclude_properties is not None and (
+            name in self.exclude_properties
+            or (column is not None and column in self.exclude_properties)
+        ):
+            self._log("excluding property %s" % (name))
+            return True
+
+        return False
+
+    def common_parent(self, other: Mapper[Any]) -> bool:
+        """Return true if the given mapper shares a
+        common inherited parent as this mapper."""
+
+        return self.base_mapper is other.base_mapper
+
+    def is_sibling(self, other: Mapper[Any]) -> bool:
+        """return true if the other mapper is an inheriting sibling to this
+        one.  common parent but different branch
+
+        """
+        return (
+            self.base_mapper is other.base_mapper
+            and not self.isa(other)
+            and not other.isa(self)
+        )
+
+    def _canload(
+        self, state: InstanceState[Any], allow_subtypes: bool
+    ) -> bool:
+        s = self.primary_mapper()
+        if self.polymorphic_on is not None or allow_subtypes:
+            return _state_mapper(state).isa(s)
+        else:
+            return _state_mapper(state) is s
+
+    def isa(self, other: Mapper[Any]) -> bool:
+        """Return True if the this mapper inherits from the given mapper."""
+
+        m: Optional[Mapper[Any]] = self
+        while m and m is not other:
+            m = m.inherits
+        return bool(m)
+
+    def iterate_to_root(self) -> Iterator[Mapper[Any]]:
+        m: Optional[Mapper[Any]] = self
+        while m:
+            yield m
+            m = m.inherits
+
+    @HasMemoized.memoized_attribute
+    def self_and_descendants(self) -> Sequence[Mapper[Any]]:
+        """The collection including this mapper and all descendant mappers.
+
+        This includes not just the immediately inheriting mappers but
+        all their inheriting mappers as well.
+
+        """
+        descendants = []
+        stack = deque([self])
+        while stack:
+            item = stack.popleft()
+            descendants.append(item)
+            stack.extend(item._inheriting_mappers)
+        return util.WeakSequence(descendants)
+
+    def polymorphic_iterator(self) -> Iterator[Mapper[Any]]:
+        """Iterate through the collection including this mapper and
+        all descendant mappers.
+
+        This includes not just the immediately inheriting mappers but
+        all their inheriting mappers as well.
+
+        To iterate through an entire hierarchy, use
+        ``mapper.base_mapper.polymorphic_iterator()``.
+
+        """
+        return iter(self.self_and_descendants)
+
+    def primary_mapper(self) -> Mapper[Any]:
+        """Return the primary mapper corresponding to this mapper's class key
+        (class)."""
+
+        return self.class_manager.mapper
+
+    @property
+    def primary_base_mapper(self) -> Mapper[Any]:
+        return self.class_manager.mapper.base_mapper
+
+    def _result_has_identity_key(self, result, adapter=None):
+        pk_cols: Sequence[ColumnClause[Any]] = self.primary_key
+        if adapter:
+            pk_cols = [adapter.columns[c] for c in pk_cols]
+        rk = result.keys()
+        for col in pk_cols:
+            if col not in rk:
+                return False
+        else:
+            return True
+
+    def identity_key_from_row(
+        self,
+        row: Union[Row[Any], RowMapping],
+        identity_token: Optional[Any] = None,
+        adapter: Optional[ORMAdapter] = None,
+    ) -> _IdentityKeyType[_O]:
+        """Return an identity-map key for use in storing/retrieving an
+        item from the identity map.
+
+        :param row: A :class:`.Row` or :class:`.RowMapping` produced from a
+         result set that selected from the ORM mapped primary key columns.
+
+         .. versionchanged:: 2.0
+            :class:`.Row` or :class:`.RowMapping` are accepted
+            for the "row" argument
+
+        """
+        pk_cols: Sequence[ColumnClause[Any]] = self.primary_key
+        if adapter:
+            pk_cols = [adapter.columns[c] for c in pk_cols]
+
+        mapping: RowMapping
+        if hasattr(row, "_mapping"):
+            mapping = row._mapping
+        else:
+            mapping = row  # type: ignore[assignment]
+
+        return (
+            self._identity_class,
+            tuple(mapping[column] for column in pk_cols),
+            identity_token,
+        )
+
+    def identity_key_from_primary_key(
+        self,
+        primary_key: Tuple[Any, ...],
+        identity_token: Optional[Any] = None,
+    ) -> _IdentityKeyType[_O]:
+        """Return an identity-map key for use in storing/retrieving an
+        item from an identity map.
+
+        :param primary_key: A list of values indicating the identifier.
+
+        """
+        return (
+            self._identity_class,
+            tuple(primary_key),
+            identity_token,
+        )
+
+    def identity_key_from_instance(self, instance: _O) -> _IdentityKeyType[_O]:
+        """Return the identity key for the given instance, based on
+        its primary key attributes.
+
+        If the instance's state is expired, calling this method
+        will result in a database check to see if the object has been deleted.
+        If the row no longer exists,
+        :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
+        This value is typically also found on the instance state under the
+        attribute name `key`.
+
+        """
+        state = attributes.instance_state(instance)
+        return self._identity_key_from_state(state, PassiveFlag.PASSIVE_OFF)
+
+    def _identity_key_from_state(
+        self,
+        state: InstanceState[_O],
+        passive: PassiveFlag = PassiveFlag.PASSIVE_RETURN_NO_VALUE,
+    ) -> _IdentityKeyType[_O]:
+        dict_ = state.dict
+        manager = state.manager
+        return (
+            self._identity_class,
+            tuple(
+                [
+                    manager[prop.key].impl.get(state, dict_, passive)
+                    for prop in self._identity_key_props
+                ]
+            ),
+            state.identity_token,
+        )
+
+    def primary_key_from_instance(self, instance: _O) -> Tuple[Any, ...]:
+        """Return the list of primary key values for the given
+        instance.
+
+        If the instance's state is expired, calling this method
+        will result in a database check to see if the object has been deleted.
+        If the row no longer exists,
+        :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
+        """
+        state = attributes.instance_state(instance)
+        identity_key = self._identity_key_from_state(
+            state, PassiveFlag.PASSIVE_OFF
+        )
+        return identity_key[1]
+
+    @HasMemoized.memoized_attribute
+    def _persistent_sortkey_fn(self):
+        key_fns = [col.type.sort_key_function for col in self.primary_key]
+
+        if set(key_fns).difference([None]):
+
+            def key(state):
+                return tuple(
+                    key_fn(val) if key_fn is not None else val
+                    for key_fn, val in zip(key_fns, state.key[1])
+                )
+
+        else:
+
+            def key(state):
+                return state.key[1]
+
+        return key
+
+    @HasMemoized.memoized_attribute
+    def _identity_key_props(self):
+        return [self._columntoproperty[col] for col in self.primary_key]
+
+    @HasMemoized.memoized_attribute
+    def _all_pk_cols(self):
+        collection: Set[ColumnClause[Any]] = set()
+        for table in self.tables:
+            collection.update(self._pks_by_table[table])
+        return collection
+
+    @HasMemoized.memoized_attribute
+    def _should_undefer_in_wildcard(self):
+        cols: Set[ColumnElement[Any]] = set(self.primary_key)
+        if self.polymorphic_on is not None:
+            cols.add(self.polymorphic_on)
+        return cols
+
+    @HasMemoized.memoized_attribute
+    def _primary_key_propkeys(self):
+        return {self._columntoproperty[col].key for col in self._all_pk_cols}
+
+    def _get_state_attr_by_column(
+        self,
+        state: InstanceState[_O],
+        dict_: _InstanceDict,
+        column: ColumnElement[Any],
+        passive: PassiveFlag = PassiveFlag.PASSIVE_RETURN_NO_VALUE,
+    ) -> Any:
+        prop = self._columntoproperty[column]
+        return state.manager[prop.key].impl.get(state, dict_, passive=passive)
+
+    def _set_committed_state_attr_by_column(self, state, dict_, column, value):
+        prop = self._columntoproperty[column]
+        state.manager[prop.key].impl.set_committed_value(state, dict_, value)
+
+    def _set_state_attr_by_column(self, state, dict_, column, value):
+        prop = self._columntoproperty[column]
+        state.manager[prop.key].impl.set(state, dict_, value, None)
+
+    def _get_committed_attr_by_column(self, obj, column):
+        state = attributes.instance_state(obj)
+        dict_ = attributes.instance_dict(obj)
+        return self._get_committed_state_attr_by_column(
+            state, dict_, column, passive=PassiveFlag.PASSIVE_OFF
+        )
+
+    def _get_committed_state_attr_by_column(
+        self, state, dict_, column, passive=PassiveFlag.PASSIVE_RETURN_NO_VALUE
+    ):
+        prop = self._columntoproperty[column]
+        return state.manager[prop.key].impl.get_committed_value(
+            state, dict_, passive=passive
+        )
+
+    def _optimized_get_statement(self, state, attribute_names):
+        """assemble a WHERE clause which retrieves a given state by primary
+        key, using a minimized set of tables.
+
+        Applies to a joined-table inheritance mapper where the
+        requested attribute names are only present on joined tables,
+        not the base table.  The WHERE clause attempts to include
+        only those tables to minimize joins.
+
+        """
+        props = self._props
+
+        col_attribute_names = set(attribute_names).intersection(
+            state.mapper.column_attrs.keys()
+        )
+        tables: Set[FromClause] = set(
+            chain(
+                *[
+                    sql_util.find_tables(c, check_columns=True)
+                    for key in col_attribute_names
+                    for c in props[key].columns
+                ]
+            )
+        )
+
+        if self.base_mapper.local_table in tables:
+            return None
+
+        def visit_binary(binary):
+            leftcol = binary.left
+            rightcol = binary.right
+            if leftcol is None or rightcol is None:
+                return
+
+            if leftcol.table not in tables:
+                leftval = self._get_committed_state_attr_by_column(
+                    state,
+                    state.dict,
+                    leftcol,
+                    passive=PassiveFlag.PASSIVE_NO_INITIALIZE,
+                )
+                if leftval in orm_util._none_set:
+                    raise _OptGetColumnsNotAvailable()
+                binary.left = sql.bindparam(
+                    None, leftval, type_=binary.right.type
+                )
+            elif rightcol.table not in tables:
+                rightval = self._get_committed_state_attr_by_column(
+                    state,
+                    state.dict,
+                    rightcol,
+                    passive=PassiveFlag.PASSIVE_NO_INITIALIZE,
+                )
+                if rightval in orm_util._none_set:
+                    raise _OptGetColumnsNotAvailable()
+                binary.right = sql.bindparam(
+                    None, rightval, type_=binary.right.type
+                )
+
+        allconds: List[ColumnElement[bool]] = []
+
+        start = False
+
+        # as of #7507, from the lowest base table on upwards,
+        # we include all intermediary tables.
+
+        for mapper in reversed(list(self.iterate_to_root())):
+            if mapper.local_table in tables:
+                start = True
+            elif not isinstance(mapper.local_table, expression.TableClause):
+                return None
+            if start and not mapper.single:
+                assert mapper.inherits
+                assert not mapper.concrete
+                assert mapper.inherit_condition is not None
+                allconds.append(mapper.inherit_condition)
+                tables.add(mapper.local_table)
+
+        # only the bottom table needs its criteria to be altered to fit
+        # the primary key ident - the rest of the tables upwards to the
+        # descendant-most class should all be present and joined to each
+        # other.
+        try:
+            _traversed = visitors.cloned_traverse(
+                allconds[0], {}, {"binary": visit_binary}
+            )
+        except _OptGetColumnsNotAvailable:
+            return None
+        else:
+            allconds[0] = _traversed
+
+        cond = sql.and_(*allconds)
+
+        cols = []
+        for key in col_attribute_names:
+            cols.extend(props[key].columns)
+        return (
+            sql.select(*cols)
+            .where(cond)
+            .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+        )
+
+    def _iterate_to_target_viawpoly(self, mapper):
+        if self.isa(mapper):
+            prev = self
+            for m in self.iterate_to_root():
+                yield m
+
+                if m is not prev and prev not in m._with_polymorphic_mappers:
+                    break
+
+                prev = m
+                if m is mapper:
+                    break
+
+    @HasMemoized.memoized_attribute
+    def _would_selectinload_combinations_cache(self):
+        return {}
+
+    def _would_selectin_load_only_from_given_mapper(self, super_mapper):
+        """return True if this mapper would "selectin" polymorphic load based
+        on the given super mapper, and not from a setting from a subclass.
+
+        given::
+
+            class A: ...
+
+
+            class B(A):
+                __mapper_args__ = {"polymorphic_load": "selectin"}
+
+
+            class C(B): ...
+
+
+            class D(B):
+                __mapper_args__ = {"polymorphic_load": "selectin"}
+
+        ``inspect(C)._would_selectin_load_only_from_given_mapper(inspect(B))``
+        returns True, because C does selectin loading because of B's setting.
+
+        OTOH, ``inspect(D)
+        ._would_selectin_load_only_from_given_mapper(inspect(B))``
+        returns False, because D does selectin loading because of its own
+        setting; when we are doing a selectin poly load from B, we want to
+        filter out D because it would already have its own selectin poly load
+        set up separately.
+
+        Added as part of #9373.
+
+        """
+        cache = self._would_selectinload_combinations_cache
+
+        try:
+            return cache[super_mapper]
+        except KeyError:
+            pass
+
+        # assert that given object is a supermapper, meaning we already
+        # strong reference it directly or indirectly.  this allows us
+        # to not worry that we are creating new strongrefs to unrelated
+        # mappers or other objects.
+        assert self.isa(super_mapper)
+
+        mapper = super_mapper
+        for m in self._iterate_to_target_viawpoly(mapper):
+            if m.polymorphic_load == "selectin":
+                retval = m is super_mapper
+                break
+        else:
+            retval = False
+
+        cache[super_mapper] = retval
+        return retval
+
+    def _should_selectin_load(self, enabled_via_opt, polymorphic_from):
+        if not enabled_via_opt:
+            # common case, takes place for all polymorphic loads
+            mapper = polymorphic_from
+            for m in self._iterate_to_target_viawpoly(mapper):
+                if m.polymorphic_load == "selectin":
+                    return m
+        else:
+            # uncommon case, selectin load options were used
+            enabled_via_opt = set(enabled_via_opt)
+            enabled_via_opt_mappers = {e.mapper: e for e in enabled_via_opt}
+            for entity in enabled_via_opt.union([polymorphic_from]):
+                mapper = entity.mapper
+                for m in self._iterate_to_target_viawpoly(mapper):
+                    if (
+                        m.polymorphic_load == "selectin"
+                        or m in enabled_via_opt_mappers
+                    ):
+                        return enabled_via_opt_mappers.get(m, m)
+
+        return None
+
+    @util.preload_module("sqlalchemy.orm.strategy_options")
+    def _subclass_load_via_in(self, entity, polymorphic_from):
+        """Assemble a that can load the columns local to
+        this subclass as a SELECT with IN.
+
+        """
+
+        strategy_options = util.preloaded.orm_strategy_options
+
+        assert self.inherits
+
+        if self.polymorphic_on is not None:
+            polymorphic_prop = self._columntoproperty[self.polymorphic_on]
+            keep_props = set([polymorphic_prop] + self._identity_key_props)
+        else:
+            keep_props = set(self._identity_key_props)
+
+        disable_opt = strategy_options.Load(entity)
+        enable_opt = strategy_options.Load(entity)
+
+        classes_to_include = {self}
+        m: Optional[Mapper[Any]] = self.inherits
+        while (
+            m is not None
+            and m is not polymorphic_from
+            and m.polymorphic_load == "selectin"
+        ):
+            classes_to_include.add(m)
+            m = m.inherits
+
+        for prop in self.column_attrs + self.relationships:
+            # skip prop keys that are not instrumented on the mapped class.
+            # this is primarily the "_sa_polymorphic_on" property that gets
+            # created for an ad-hoc polymorphic_on SQL expression, issue #8704
+            if prop.key not in self.class_manager:
+                continue
+
+            if prop.parent in classes_to_include or prop in keep_props:
+                # "enable" options, to turn on the properties that we want to
+                # load by default (subject to options from the query)
+                if not isinstance(prop, StrategizedProperty):
+                    continue
+
+                enable_opt = enable_opt._set_generic_strategy(
+                    # convert string name to an attribute before passing
+                    # to loader strategy.   note this must be in terms
+                    # of given entity, such as AliasedClass, etc.
+                    (getattr(entity.entity_namespace, prop.key),),
+                    dict(prop.strategy_key),
+                    _reconcile_to_other=True,
+                )
+            else:
+                # "disable" options, to turn off the properties from the
+                # superclass that we *don't* want to load, applied after
+                # the options from the query to override them
+                disable_opt = disable_opt._set_generic_strategy(
+                    # convert string name to an attribute before passing
+                    # to loader strategy.   note this must be in terms
+                    # of given entity, such as AliasedClass, etc.
+                    (getattr(entity.entity_namespace, prop.key),),
+                    {"do_nothing": True},
+                    _reconcile_to_other=False,
+                )
+
+        primary_key = [
+            sql_util._deep_annotate(pk, {"_orm_adapt": True})
+            for pk in self.primary_key
+        ]
+
+        in_expr: ColumnElement[Any]
+
+        if len(primary_key) > 1:
+            in_expr = sql.tuple_(*primary_key)
+        else:
+            in_expr = primary_key[0]
+
+        if entity.is_aliased_class:
+            assert entity.mapper is self
+
+            q = sql.select(entity).set_label_style(
+                LABEL_STYLE_TABLENAME_PLUS_COL
+            )
+
+            in_expr = entity._adapter.traverse(in_expr)
+            primary_key = [entity._adapter.traverse(k) for k in primary_key]
+            q = q.where(
+                in_expr.in_(sql.bindparam("primary_keys", expanding=True))
+            ).order_by(*primary_key)
+        else:
+            q = sql.select(self).set_label_style(
+                LABEL_STYLE_TABLENAME_PLUS_COL
+            )
+            q = q.where(
+                in_expr.in_(sql.bindparam("primary_keys", expanding=True))
+            ).order_by(*primary_key)
+
+        return q, enable_opt, disable_opt
+
+    @HasMemoized.memoized_attribute
+    def _subclass_load_via_in_mapper(self):
+        # the default is loading this mapper against the basemost mapper
+        return self._subclass_load_via_in(self, self.base_mapper)
+
+    def cascade_iterator(
+        self,
+        type_: str,
+        state: InstanceState[_O],
+        halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None,
+    ) -> Iterator[
+        Tuple[object, Mapper[Any], InstanceState[Any], _InstanceDict]
+    ]:
+        r"""Iterate each element and its mapper in an object graph,
+        for all relationships that meet the given cascade rule.
+
+        :param type\_:
+          The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``,
+          etc.).
+
+          .. note::  the ``"all"`` cascade is not accepted here.  For a generic
+             object traversal function, see :ref:`faq_walk_objects`.
+
+        :param state:
+          The lead InstanceState.  child items will be processed per
+          the relationships defined for this object's mapper.
+
+        :return: the method yields individual object instances.
+
+        .. seealso::
+
+            :ref:`unitofwork_cascades`
+
+            :ref:`faq_walk_objects` - illustrates a generic function to
+            traverse all objects without relying on cascades.
+
+        """
+        visited_states: Set[InstanceState[Any]] = set()
+        prp, mpp = object(), object()
+
+        assert state.mapper.isa(self)
+
+        # this is actually a recursive structure, fully typing it seems
+        # a little too difficult for what it's worth here
+        visitables: Deque[
+            Tuple[
+                Deque[Any],
+                object,
+                Optional[InstanceState[Any]],
+                Optional[_InstanceDict],
+            ]
+        ]
+
+        visitables = deque(
+            [(deque(state.mapper._props.values()), prp, state, state.dict)]
+        )
+
+        while visitables:
+            iterator, item_type, parent_state, parent_dict = visitables[-1]
+            if not iterator:
+                visitables.pop()
+                continue
+
+            if item_type is prp:
+                prop = iterator.popleft()
+                if not prop.cascade or type_ not in prop.cascade:
+                    continue
+                assert parent_state is not None
+                assert parent_dict is not None
+                queue = deque(
+                    prop.cascade_iterator(
+                        type_,
+                        parent_state,
+                        parent_dict,
+                        visited_states,
+                        halt_on,
+                    )
+                )
+                if queue:
+                    visitables.append((queue, mpp, None, None))
+            elif item_type is mpp:
+                (
+                    instance,
+                    instance_mapper,
+                    corresponding_state,
+                    corresponding_dict,
+                ) = iterator.popleft()
+                yield (
+                    instance,
+                    instance_mapper,
+                    corresponding_state,
+                    corresponding_dict,
+                )
+                visitables.append(
+                    (
+                        deque(instance_mapper._props.values()),
+                        prp,
+                        corresponding_state,
+                        corresponding_dict,
+                    )
+                )
+
+    @HasMemoized.memoized_attribute
+    def _compiled_cache(self):
+        return util.LRUCache(self._compiled_cache_size)
+
+    @HasMemoized.memoized_attribute
+    def _multiple_persistence_tables(self):
+        return len(self.tables) > 1
+
+    @HasMemoized.memoized_attribute
+    def _sorted_tables(self):
+        table_to_mapper: Dict[TableClause, Mapper[Any]] = {}
+
+        for mapper in self.base_mapper.self_and_descendants:
+            for t in mapper.tables:
+                table_to_mapper.setdefault(t, mapper)
+
+        extra_dependencies = []
+        for table, mapper in table_to_mapper.items():
+            super_ = mapper.inherits
+            if super_:
+                extra_dependencies.extend(
+                    [(super_table, table) for super_table in super_.tables]
+                )
+
+        def skip(fk):
+            # attempt to skip dependencies that are not
+            # significant to the inheritance chain
+            # for two tables that are related by inheritance.
+            # while that dependency may be important, it's technically
+            # not what we mean to sort on here.
+            parent = table_to_mapper.get(fk.parent.table)
+            dep = table_to_mapper.get(fk.column.table)
+            if (
+                parent is not None
+                and dep is not None
+                and dep is not parent
+                and dep.inherit_condition is not None
+            ):
+                cols = set(sql_util._find_columns(dep.inherit_condition))
+                if parent.inherit_condition is not None:
+                    cols = cols.union(
+                        sql_util._find_columns(parent.inherit_condition)
+                    )
+                    return fk.parent not in cols and fk.column not in cols
+                else:
+                    return fk.parent not in cols
+            return False
+
+        sorted_ = sql_util.sort_tables(
+            table_to_mapper,
+            skip_fn=skip,
+            extra_dependencies=extra_dependencies,
+        )
+
+        ret = util.OrderedDict()
+        for t in sorted_:
+            ret[t] = table_to_mapper[t]
+        return ret
+
+    def _memo(self, key: Any, callable_: Callable[[], _T]) -> _T:
+        if key in self._memoized_values:
+            return cast(_T, self._memoized_values[key])
+        else:
+            self._memoized_values[key] = value = callable_()
+            return value
+
+    @util.memoized_property
+    def _table_to_equated(self):
+        """memoized map of tables to collections of columns to be
+        synchronized upwards to the base mapper."""
+
+        result: util.defaultdict[
+            Table,
+            List[
+                Tuple[
+                    Mapper[Any],
+                    List[Tuple[ColumnElement[Any], ColumnElement[Any]]],
+                ]
+            ],
+        ] = util.defaultdict(list)
+
+        def set_union(x, y):
+            return x.union(y)
+
+        for table in self._sorted_tables:
+            cols = set(table.c)
+
+            for m in self.iterate_to_root():
+                if m._inherits_equated_pairs and cols.intersection(
+                    reduce(
+                        set_union,
+                        [l.proxy_set for l, r in m._inherits_equated_pairs],
+                    )
+                ):
+                    result[table].append((m, m._inherits_equated_pairs))
+
+        return result
+
+
+class _OptGetColumnsNotAvailable(Exception):
+    pass
+
+
+def configure_mappers() -> None:
+    """Initialize the inter-mapper relationships of all mappers that
+    have been constructed thus far across all :class:`_orm.registry`
+    collections.
+
+    The configure step is used to reconcile and initialize the
+    :func:`_orm.relationship` linkages between mapped classes, as well as to
+    invoke configuration events such as the
+    :meth:`_orm.MapperEvents.before_configured` and
+    :meth:`_orm.MapperEvents.after_configured`, which may be used by ORM
+    extensions or user-defined extension hooks.
+
+    Mapper configuration is normally invoked automatically, the first time
+    mappings from a particular :class:`_orm.registry` are used, as well as
+    whenever mappings are used and additional not-yet-configured mappers have
+    been constructed. The automatic configuration process however is local only
+    to the :class:`_orm.registry` involving the target mapper and any related
+    :class:`_orm.registry` objects which it may depend on; this is
+    equivalent to invoking the :meth:`_orm.registry.configure` method
+    on a particular :class:`_orm.registry`.
+
+    By contrast, the :func:`_orm.configure_mappers` function will invoke the
+    configuration process on all :class:`_orm.registry` objects that
+    exist in memory, and may be useful for scenarios where many individual
+    :class:`_orm.registry` objects that are nonetheless interrelated are
+    in use.
+
+    .. versionchanged:: 1.4
+
+        As of SQLAlchemy 1.4.0b2, this function works on a
+        per-:class:`_orm.registry` basis, locating all :class:`_orm.registry`
+        objects present and invoking the :meth:`_orm.registry.configure` method
+        on each. The :meth:`_orm.registry.configure` method may be preferred to
+        limit the configuration of mappers to those local to a particular
+        :class:`_orm.registry` and/or declarative base class.
+
+    Points at which automatic configuration is invoked include when a mapped
+    class is instantiated into an instance, as well as when ORM queries
+    are emitted using :meth:`.Session.query` or :meth:`_orm.Session.execute`
+    with an ORM-enabled statement.
+
+    The mapper configure process, whether invoked by
+    :func:`_orm.configure_mappers` or from :meth:`_orm.registry.configure`,
+    provides several event hooks that can be used to augment the mapper
+    configuration step. These hooks include:
+
+    * :meth:`.MapperEvents.before_configured` - called once before
+      :func:`.configure_mappers` or :meth:`_orm.registry.configure` does any
+      work; this can be used to establish additional options, properties, or
+      related mappings before the operation proceeds.
+
+    * :meth:`.MapperEvents.mapper_configured` - called as each individual
+      :class:`_orm.Mapper` is configured within the process; will include all
+      mapper state except for backrefs set up by other mappers that are still
+      to be configured.
+
+    * :meth:`.MapperEvents.after_configured` - called once after
+      :func:`.configure_mappers` or :meth:`_orm.registry.configure` is
+      complete; at this stage, all :class:`_orm.Mapper` objects that fall
+      within the scope of the configuration operation will be fully configured.
+      Note that the calling application may still have other mappings that
+      haven't been produced yet, such as if they are in modules as yet
+      unimported, and may also have mappings that are still to be configured,
+      if they are in other :class:`_orm.registry` collections not part of the
+      current scope of configuration.
+
+    """
+
+    _configure_registries(_all_registries(), cascade=True)
+
+
+def _configure_registries(
+    registries: Set[_RegistryType], cascade: bool
+) -> None:
+    for reg in registries:
+        if reg._new_mappers:
+            break
+    else:
+        return
+
+    with _CONFIGURE_MUTEX:
+        global _already_compiling
+        if _already_compiling:
+            return
+        _already_compiling = True
+        try:
+            # double-check inside mutex
+            for reg in registries:
+                if reg._new_mappers:
+                    break
+            else:
+                return
+
+            Mapper.dispatch._for_class(Mapper).before_configured()  # type: ignore # noqa: E501
+            # initialize properties on all mappers
+            # note that _mapper_registry is unordered, which
+            # may randomly conceal/reveal issues related to
+            # the order of mapper compilation
+
+            _do_configure_registries(registries, cascade)
+        finally:
+            _already_compiling = False
+    Mapper.dispatch._for_class(Mapper).after_configured()  # type: ignore
+
+
+@util.preload_module("sqlalchemy.orm.decl_api")
+def _do_configure_registries(
+    registries: Set[_RegistryType], cascade: bool
+) -> None:
+    registry = util.preloaded.orm_decl_api.registry
+
+    orig = set(registries)
+
+    for reg in registry._recurse_with_dependencies(registries):
+        has_skip = False
+
+        for mapper in reg._mappers_to_configure():
+            run_configure = None
+
+            for fn in mapper.dispatch.before_mapper_configured:
+                run_configure = fn(mapper, mapper.class_)
+                if run_configure is EXT_SKIP:
+                    has_skip = True
+                    break
+            if run_configure is EXT_SKIP:
+                continue
+
+            if getattr(mapper, "_configure_failed", False):
+                e = sa_exc.InvalidRequestError(
+                    "One or more mappers failed to initialize - "
+                    "can't proceed with initialization of other "
+                    "mappers. Triggering mapper: '%s'. "
+                    "Original exception was: %s"
+                    % (mapper, mapper._configure_failed)
+                )
+                e._configure_failed = mapper._configure_failed  # type: ignore
+                raise e
+
+            if not mapper.configured:
+                try:
+                    mapper._post_configure_properties()
+                    mapper._expire_memoizations()
+                    mapper.dispatch.mapper_configured(mapper, mapper.class_)
+                except Exception:
+                    exc = sys.exc_info()[1]
+                    if not hasattr(exc, "_configure_failed"):
+                        mapper._configure_failed = exc
+                    raise
+        if not has_skip:
+            reg._new_mappers = False
+
+        if not cascade and reg._dependencies.difference(orig):
+            raise sa_exc.InvalidRequestError(
+                "configure was called with cascade=False but "
+                "additional registries remain"
+            )
+
+
+@util.preload_module("sqlalchemy.orm.decl_api")
+def _dispose_registries(registries: Set[_RegistryType], cascade: bool) -> None:
+    registry = util.preloaded.orm_decl_api.registry
+
+    orig = set(registries)
+
+    for reg in registry._recurse_with_dependents(registries):
+        if not cascade and reg._dependents.difference(orig):
+            raise sa_exc.InvalidRequestError(
+                "Registry has dependent registries that are not disposed; "
+                "pass cascade=True to clear these also"
+            )
+
+        while reg._managers:
+            try:
+                manager, _ = reg._managers.popitem()
+            except KeyError:
+                # guard against race between while and popitem
+                pass
+            else:
+                reg._dispose_manager_and_mapper(manager)
+
+        reg._non_primary_mappers.clear()
+        reg._dependents.clear()
+        for dep in reg._dependencies:
+            dep._dependents.discard(reg)
+        reg._dependencies.clear()
+        # this wasn't done in the 1.3 clear_mappers() and in fact it
+        # was a bug, as it could cause configure_mappers() to invoke
+        # the "before_configured" event even though mappers had all been
+        # disposed.
+        reg._new_mappers = False
+
+
+def reconstructor(fn):
+    """Decorate a method as the 'reconstructor' hook.
+
+    Designates a single method as the "reconstructor", an ``__init__``-like
+    method that will be called by the ORM after the instance has been
+    loaded from the database or otherwise reconstituted.
+
+    .. tip::
+
+        The :func:`_orm.reconstructor` decorator makes use of the
+        :meth:`_orm.InstanceEvents.load` event hook, which can be
+        used directly.
+
+    The reconstructor will be invoked with no arguments.  Scalar
+    (non-collection) database-mapped attributes of the instance will
+    be available for use within the function.  Eagerly-loaded
+    collections are generally not yet available and will usually only
+    contain the first element.  ORM state changes made to objects at
+    this stage will not be recorded for the next flush() operation, so
+    the activity within a reconstructor should be conservative.
+
+    .. seealso::
+
+        :meth:`.InstanceEvents.load`
+
+    """
+    fn.__sa_reconstructor__ = True
+    return fn
+
+
+def validates(
+    *names: str, include_removes: bool = False, include_backrefs: bool = True
+) -> Callable[[_Fn], _Fn]:
+    r"""Decorate a method as a 'validator' for one or more named properties.
+
+    Designates a method as a validator, a method which receives the
+    name of the attribute as well as a value to be assigned, or in the
+    case of a collection, the value to be added to the collection.
+    The function can then raise validation exceptions to halt the
+    process from continuing (where Python's built-in ``ValueError``
+    and ``AssertionError`` exceptions are reasonable choices), or can
+    modify or replace the value before proceeding. The function should
+    otherwise return the given value.
+
+    Note that a validator for a collection **cannot** issue a load of that
+    collection within the validation routine - this usage raises
+    an assertion to avoid recursion overflows.  This is a reentrant
+    condition which is not supported.
+
+    :param \*names: list of attribute names to be validated.
+    :param include_removes: if True, "remove" events will be
+     sent as well - the validation function must accept an additional
+     argument "is_remove" which will be a boolean.
+
+    :param include_backrefs: defaults to ``True``; if ``False``, the
+     validation function will not emit if the originator is an attribute
+     event related via a backref.  This can be used for bi-directional
+     :func:`.validates` usage where only one validator should emit per
+     attribute operation.
+
+     .. versionchanged:: 2.0.16 This paramter inadvertently defaulted to
+        ``False`` for releases 2.0.0 through 2.0.15.  Its correct default
+        of ``True`` is restored in 2.0.16.
+
+    .. seealso::
+
+      :ref:`simple_validators` - usage examples for :func:`.validates`
+
+    """
+
+    def wrap(fn: _Fn) -> _Fn:
+        fn.__sa_validators__ = names  # type: ignore[attr-defined]
+        fn.__sa_validation_opts__ = {  # type: ignore[attr-defined]
+            "include_removes": include_removes,
+            "include_backrefs": include_backrefs,
+        }
+        return fn
+
+    return wrap
+
+
+def _event_on_load(state, ctx):
+    instrumenting_mapper = state.manager.mapper
+
+    if instrumenting_mapper._reconstructor:
+        instrumenting_mapper._reconstructor(state.obj())
+
+
+def _event_on_init(state, args, kwargs):
+    """Run init_instance hooks.
+
+    This also includes mapper compilation, normally not needed
+    here but helps with some piecemeal configuration
+    scenarios (such as in the ORM tutorial).
+
+    """
+
+    instrumenting_mapper = state.manager.mapper
+    if instrumenting_mapper:
+        instrumenting_mapper._check_configure()
+        if instrumenting_mapper._set_polymorphic_identity:
+            instrumenting_mapper._set_polymorphic_identity(state)
+
+
+class _ColumnMapping(Dict["ColumnElement[Any]", "MapperProperty[Any]"]):
+    """Error reporting helper for mapper._columntoproperty."""
+
+    __slots__ = ("mapper",)
+
+    def __init__(self, mapper):
+        # TODO: weakref would be a good idea here
+        self.mapper = mapper
+
+    def __missing__(self, column):
+        prop = self.mapper._props.get(column)
+        if prop:
+            raise orm_exc.UnmappedColumnError(
+                "Column '%s.%s' is not available, due to "
+                "conflicting property '%s':%r"
+                % (column.table.name, column.name, column.key, prop)
+            )
+        raise orm_exc.UnmappedColumnError(
+            "No column %s is configured on mapper %s..."
+            % (column, self.mapper)
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py
new file mode 100644
index 00000000..388e4609
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/path_registry.py
@@ -0,0 +1,811 @@
+# orm/path_registry.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+"""Path tracking utilities, representing mapper graph traversals.
+
+"""
+
+from __future__ import annotations
+
+from functools import reduce
+from itertools import chain
+import logging
+import operator
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import base as orm_base
+from ._typing import insp_is_mapper_property
+from .. import exc
+from .. import util
+from ..sql import visitors
+from ..sql.cache_key import HasCacheKey
+
+if TYPE_CHECKING:
+    from ._typing import _InternalEntityType
+    from .interfaces import StrategizedProperty
+    from .mapper import Mapper
+    from .relationships import RelationshipProperty
+    from .util import AliasedInsp
+    from ..sql.cache_key import _CacheKeyTraversalType
+    from ..sql.elements import BindParameter
+    from ..sql.visitors import anon_map
+    from ..util.typing import _LiteralStar
+    from ..util.typing import TypeGuard
+
+    def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: ...
+
+    def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ...
+
+else:
+    is_root = operator.attrgetter("is_root")
+    is_entity = operator.attrgetter("is_entity")
+
+
+_SerializedPath = List[Any]
+_StrPathToken = str
+_PathElementType = Union[
+    _StrPathToken, "_InternalEntityType[Any]", "StrategizedProperty[Any]"
+]
+
+# the representation is in fact
+# a tuple with alternating:
+# [_InternalEntityType[Any], Union[str, StrategizedProperty[Any]],
+# _InternalEntityType[Any], Union[str, StrategizedProperty[Any]], ...]
+# this might someday be a tuple of 2-tuples instead, but paths can be
+# chopped at odd intervals as well so this is less flexible
+_PathRepresentation = Tuple[_PathElementType, ...]
+
+# NOTE: these names are weird since the array is 0-indexed,
+# the "_Odd" entries are at 0, 2, 4, etc
+_OddPathRepresentation = Sequence["_InternalEntityType[Any]"]
+_EvenPathRepresentation = Sequence[Union["StrategizedProperty[Any]", str]]
+
+
+log = logging.getLogger(__name__)
+
+
+def _unreduce_path(path: _SerializedPath) -> PathRegistry:
+    return PathRegistry.deserialize(path)
+
+
+_WILDCARD_TOKEN: _LiteralStar = "*"
+_DEFAULT_TOKEN = "_sa_default"
+
+
+class PathRegistry(HasCacheKey):
+    """Represent query load paths and registry functions.
+
+    Basically represents structures like:
+
+    (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
+
+    These structures are generated by things like
+    query options (joinedload(), subqueryload(), etc.) and are
+    used to compose keys stored in the query._attributes dictionary
+    for various options.
+
+    They are then re-composed at query compile/result row time as
+    the query is formed and as rows are fetched, where they again
+    serve to compose keys to look up options in the context.attributes
+    dictionary, which is copied from query._attributes.
+
+    The path structure has a limited amount of caching, where each
+    "root" ultimately pulls from a fixed registry associated with
+    the first mapper, that also contains elements for each of its
+    property keys.  However paths longer than two elements, which
+    are the exception rather than the rule, are generated on an
+    as-needed basis.
+
+    """
+
+    __slots__ = ()
+
+    is_token = False
+    is_root = False
+    has_entity = False
+    is_property = False
+    is_entity = False
+
+    is_unnatural: bool
+
+    path: _PathRepresentation
+    natural_path: _PathRepresentation
+    parent: Optional[PathRegistry]
+    root: RootRegistry
+
+    _cache_key_traversal: _CacheKeyTraversalType = [
+        ("path", visitors.ExtendedInternalTraversal.dp_has_cache_key_list)
+    ]
+
+    def __eq__(self, other: Any) -> bool:
+        try:
+            return other is not None and self.path == other._path_for_compare
+        except AttributeError:
+            util.warn(
+                "Comparison of PathRegistry to %r is not supported"
+                % (type(other))
+            )
+            return False
+
+    def __ne__(self, other: Any) -> bool:
+        try:
+            return other is None or self.path != other._path_for_compare
+        except AttributeError:
+            util.warn(
+                "Comparison of PathRegistry to %r is not supported"
+                % (type(other))
+            )
+            return True
+
+    @property
+    def _path_for_compare(self) -> Optional[_PathRepresentation]:
+        return self.path
+
+    def odd_element(self, index: int) -> _InternalEntityType[Any]:
+        return self.path[index]  # type: ignore
+
+    def set(self, attributes: Dict[Any, Any], key: Any, value: Any) -> None:
+        log.debug("set '%s' on path '%s' to '%s'", key, self, value)
+        attributes[(key, self.natural_path)] = value
+
+    def setdefault(
+        self, attributes: Dict[Any, Any], key: Any, value: Any
+    ) -> None:
+        log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
+        attributes.setdefault((key, self.natural_path), value)
+
+    def get(
+        self, attributes: Dict[Any, Any], key: Any, value: Optional[Any] = None
+    ) -> Any:
+        key = (key, self.natural_path)
+        if key in attributes:
+            return attributes[key]
+        else:
+            return value
+
+    def __len__(self) -> int:
+        return len(self.path)
+
+    def __hash__(self) -> int:
+        return id(self)
+
+    @overload
+    def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: ...
+
+    @overload
+    def __getitem__(self, entity: int) -> _PathElementType: ...
+
+    @overload
+    def __getitem__(self, entity: slice) -> _PathRepresentation: ...
+
+    @overload
+    def __getitem__(
+        self, entity: _InternalEntityType[Any]
+    ) -> AbstractEntityRegistry: ...
+
+    @overload
+    def __getitem__(
+        self, entity: StrategizedProperty[Any]
+    ) -> PropRegistry: ...
+
+    def __getitem__(
+        self,
+        entity: Union[
+            _StrPathToken,
+            int,
+            slice,
+            _InternalEntityType[Any],
+            StrategizedProperty[Any],
+        ],
+    ) -> Union[
+        TokenRegistry,
+        _PathElementType,
+        _PathRepresentation,
+        PropRegistry,
+        AbstractEntityRegistry,
+    ]:
+        raise NotImplementedError()
+
+    # TODO: what are we using this for?
+    @property
+    def length(self) -> int:
+        return len(self.path)
+
+    def pairs(
+        self,
+    ) -> Iterator[
+        Tuple[_InternalEntityType[Any], Union[str, StrategizedProperty[Any]]]
+    ]:
+        odd_path = cast(_OddPathRepresentation, self.path)
+        even_path = cast(_EvenPathRepresentation, odd_path)
+        for i in range(0, len(odd_path), 2):
+            yield odd_path[i], even_path[i + 1]
+
+    def contains_mapper(self, mapper: Mapper[Any]) -> bool:
+        _m_path = cast(_OddPathRepresentation, self.path)
+        for path_mapper in [_m_path[i] for i in range(0, len(_m_path), 2)]:
+            if path_mapper.mapper.isa(mapper):
+                return True
+        else:
+            return False
+
+    def contains(self, attributes: Dict[Any, Any], key: Any) -> bool:
+        return (key, self.path) in attributes
+
+    def __reduce__(self) -> Any:
+        return _unreduce_path, (self.serialize(),)
+
+    @classmethod
+    def _serialize_path(cls, path: _PathRepresentation) -> _SerializedPath:
+        _m_path = cast(_OddPathRepresentation, path)
+        _p_path = cast(_EvenPathRepresentation, path)
+
+        return list(
+            zip(
+                tuple(
+                    m.class_ if (m.is_mapper or m.is_aliased_class) else str(m)
+                    for m in [_m_path[i] for i in range(0, len(_m_path), 2)]
+                ),
+                tuple(
+                    p.key if insp_is_mapper_property(p) else str(p)
+                    for p in [_p_path[i] for i in range(1, len(_p_path), 2)]
+                )
+                + (None,),
+            )
+        )
+
+    @classmethod
+    def _deserialize_path(cls, path: _SerializedPath) -> _PathRepresentation:
+        def _deserialize_mapper_token(mcls: Any) -> Any:
+            return (
+                # note: we likely dont want configure=True here however
+                # this is maintained at the moment for backwards compatibility
+                orm_base._inspect_mapped_class(mcls, configure=True)
+                if mcls not in PathToken._intern
+                else PathToken._intern[mcls]
+            )
+
+        def _deserialize_key_token(mcls: Any, key: Any) -> Any:
+            if key is None:
+                return None
+            elif key in PathToken._intern:
+                return PathToken._intern[key]
+            else:
+                mp = orm_base._inspect_mapped_class(mcls, configure=True)
+                assert mp is not None
+                return mp.attrs[key]
+
+        p = tuple(
+            chain(
+                *[
+                    (
+                        _deserialize_mapper_token(mcls),
+                        _deserialize_key_token(mcls, key),
+                    )
+                    for mcls, key in path
+                ]
+            )
+        )
+        if p and p[-1] is None:
+            p = p[0:-1]
+        return p
+
+    def serialize(self) -> _SerializedPath:
+        path = self.path
+        return self._serialize_path(path)
+
+    @classmethod
+    def deserialize(cls, path: _SerializedPath) -> PathRegistry:
+        assert path is not None
+        p = cls._deserialize_path(path)
+        return cls.coerce(p)
+
+    @overload
+    @classmethod
+    def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: ...
+
+    @overload
+    @classmethod
+    def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: ...
+
+    @classmethod
+    def per_mapper(
+        cls, mapper: _InternalEntityType[Any]
+    ) -> AbstractEntityRegistry:
+        if mapper.is_mapper:
+            return CachingEntityRegistry(cls.root, mapper)
+        else:
+            return SlotsEntityRegistry(cls.root, mapper)
+
+    @classmethod
+    def coerce(cls, raw: _PathRepresentation) -> PathRegistry:
+        def _red(prev: PathRegistry, next_: _PathElementType) -> PathRegistry:
+            return prev[next_]
+
+        # can't quite get mypy to appreciate this one :)
+        return reduce(_red, raw, cls.root)  # type: ignore
+
+    def __add__(self, other: PathRegistry) -> PathRegistry:
+        def _red(prev: PathRegistry, next_: _PathElementType) -> PathRegistry:
+            return prev[next_]
+
+        return reduce(_red, other.path, self)
+
+    def __str__(self) -> str:
+        return f"ORM Path[{' -> '.join(str(elem) for elem in self.path)}]"
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self.path!r})"
+
+
+class CreatesToken(PathRegistry):
+    __slots__ = ()
+
+    is_aliased_class: bool
+    is_root: bool
+
+    def token(self, token: _StrPathToken) -> TokenRegistry:
+        if token.endswith(f":{_WILDCARD_TOKEN}"):
+            return TokenRegistry(self, token)
+        elif token.endswith(f":{_DEFAULT_TOKEN}"):
+            return TokenRegistry(self.root, token)
+        else:
+            raise exc.ArgumentError(f"invalid token: {token}")
+
+
+class RootRegistry(CreatesToken):
+    """Root registry, defers to mappers so that
+    paths are maintained per-root-mapper.
+
+    """
+
+    __slots__ = ()
+
+    inherit_cache = True
+
+    path = natural_path = ()
+    has_entity = False
+    is_aliased_class = False
+    is_root = True
+    is_unnatural = False
+
+    def _getitem(
+        self, entity: Any
+    ) -> Union[TokenRegistry, AbstractEntityRegistry]:
+        if entity in PathToken._intern:
+            if TYPE_CHECKING:
+                assert isinstance(entity, _StrPathToken)
+            return TokenRegistry(self, PathToken._intern[entity])
+        else:
+            try:
+                return entity._path_registry  # type: ignore
+            except AttributeError:
+                raise IndexError(
+                    f"invalid argument for RootRegistry.__getitem__: {entity}"
+                )
+
+    def _truncate_recursive(self) -> RootRegistry:
+        return self
+
+    if not TYPE_CHECKING:
+        __getitem__ = _getitem
+
+
+PathRegistry.root = RootRegistry()
+
+
+class PathToken(orm_base.InspectionAttr, HasCacheKey, str):
+    """cacheable string token"""
+
+    _intern: Dict[str, PathToken] = {}
+
+    def _gen_cache_key(
+        self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
+    ) -> Tuple[Any, ...]:
+        return (str(self),)
+
+    @property
+    def _path_for_compare(self) -> Optional[_PathRepresentation]:
+        return None
+
+    @classmethod
+    def intern(cls, strvalue: str) -> PathToken:
+        if strvalue in cls._intern:
+            return cls._intern[strvalue]
+        else:
+            cls._intern[strvalue] = result = PathToken(strvalue)
+            return result
+
+
+class TokenRegistry(PathRegistry):
+    __slots__ = ("token", "parent", "path", "natural_path")
+
+    inherit_cache = True
+
+    token: _StrPathToken
+    parent: CreatesToken
+
+    def __init__(self, parent: CreatesToken, token: _StrPathToken):
+        token = PathToken.intern(token)
+
+        self.token = token
+        self.parent = parent
+        self.path = parent.path + (token,)
+        self.natural_path = parent.natural_path + (token,)
+
+    has_entity = False
+
+    is_token = True
+
+    def generate_for_superclasses(self) -> Iterator[PathRegistry]:
+        # NOTE: this method is no longer used.  consider removal
+        parent = self.parent
+        if is_root(parent):
+            yield self
+            return
+
+        if TYPE_CHECKING:
+            assert isinstance(parent, AbstractEntityRegistry)
+        if not parent.is_aliased_class:
+            for mp_ent in parent.mapper.iterate_to_root():
+                yield TokenRegistry(parent.parent[mp_ent], self.token)
+        elif (
+            parent.is_aliased_class
+            and cast(
+                "AliasedInsp[Any]",
+                parent.entity,
+            )._is_with_polymorphic
+        ):
+            yield self
+            for ent in cast(
+                "AliasedInsp[Any]", parent.entity
+            )._with_polymorphic_entities:
+                yield TokenRegistry(parent.parent[ent], self.token)
+        else:
+            yield self
+
+    def _generate_natural_for_superclasses(
+        self,
+    ) -> Iterator[_PathRepresentation]:
+        parent = self.parent
+        if is_root(parent):
+            yield self.natural_path
+            return
+
+        if TYPE_CHECKING:
+            assert isinstance(parent, AbstractEntityRegistry)
+        for mp_ent in parent.mapper.iterate_to_root():
+            yield TokenRegistry(parent.parent[mp_ent], self.token).natural_path
+        if (
+            parent.is_aliased_class
+            and cast(
+                "AliasedInsp[Any]",
+                parent.entity,
+            )._is_with_polymorphic
+        ):
+            yield self.natural_path
+            for ent in cast(
+                "AliasedInsp[Any]", parent.entity
+            )._with_polymorphic_entities:
+                yield (
+                    TokenRegistry(parent.parent[ent], self.token).natural_path
+                )
+        else:
+            yield self.natural_path
+
+    def _getitem(self, entity: Any) -> Any:
+        try:
+            return self.path[entity]
+        except TypeError as err:
+            raise IndexError(f"{entity}") from err
+
+    if not TYPE_CHECKING:
+        __getitem__ = _getitem
+
+
+class PropRegistry(PathRegistry):
+    __slots__ = (
+        "prop",
+        "parent",
+        "path",
+        "natural_path",
+        "has_entity",
+        "entity",
+        "mapper",
+        "_wildcard_path_loader_key",
+        "_default_path_loader_key",
+        "_loader_key",
+        "is_unnatural",
+    )
+    inherit_cache = True
+    is_property = True
+
+    prop: StrategizedProperty[Any]
+    mapper: Optional[Mapper[Any]]
+    entity: Optional[_InternalEntityType[Any]]
+
+    def __init__(
+        self, parent: AbstractEntityRegistry, prop: StrategizedProperty[Any]
+    ):
+
+        # restate this path in terms of the
+        # given StrategizedProperty's parent.
+        insp = cast("_InternalEntityType[Any]", parent[-1])
+        natural_parent: AbstractEntityRegistry = parent
+
+        # inherit "is_unnatural" from the parent
+        self.is_unnatural = parent.parent.is_unnatural or bool(
+            parent.mapper.inherits
+        )
+
+        if not insp.is_aliased_class or insp._use_mapper_path:  # type: ignore
+            parent = natural_parent = parent.parent[prop.parent]
+        elif (
+            insp.is_aliased_class
+            and insp.with_polymorphic_mappers
+            and prop.parent in insp.with_polymorphic_mappers
+        ):
+            subclass_entity: _InternalEntityType[Any] = parent[-1]._entity_for_mapper(prop.parent)  # type: ignore  # noqa: E501
+            parent = parent.parent[subclass_entity]
+
+            # when building a path where with_polymorphic() is in use,
+            # special logic to determine the "natural path" when subclass
+            # entities are used.
+            #
+            # here we are trying to distinguish between a path that starts
+            # on a with_polymorphic entity vs. one that starts on a
+            # normal entity that introduces a with_polymorphic() in the
+            # middle using of_type():
+            #
+            #  # as in test_polymorphic_rel->
+            #  #    test_subqueryload_on_subclass_uses_path_correctly
+            #  wp = with_polymorphic(RegularEntity, "*")
+            #  sess.query(wp).options(someload(wp.SomeSubEntity.foos))
+            #
+            # vs
+            #
+            #  # as in test_relationship->JoinedloadWPolyOfTypeContinued
+            #  wp = with_polymorphic(SomeFoo, "*")
+            #  sess.query(RegularEntity).options(
+            #       someload(RegularEntity.foos.of_type(wp))
+            #       .someload(wp.SubFoo.bar)
+            #   )
+            #
+            # in the former case, the Query as it generates a path that we
+            # want to match will be in terms of the with_polymorphic at the
+            # beginning.  in the latter case, Query will generate simple
+            # paths that don't know about this with_polymorphic, so we must
+            # use a separate natural path.
+            #
+            #
+            if parent.parent:
+                natural_parent = parent.parent[subclass_entity.mapper]
+                self.is_unnatural = True
+            else:
+                natural_parent = parent
+        elif (
+            natural_parent.parent
+            and insp.is_aliased_class
+            and prop.parent  # this should always be the case here
+            is not insp.mapper
+            and insp.mapper.isa(prop.parent)
+        ):
+            natural_parent = parent.parent[prop.parent]
+
+        self.prop = prop
+        self.parent = parent
+        self.path = parent.path + (prop,)
+        self.natural_path = natural_parent.natural_path + (prop,)
+
+        self.has_entity = prop._links_to_entity
+        if prop._is_relationship:
+            if TYPE_CHECKING:
+                assert isinstance(prop, RelationshipProperty)
+            self.entity = prop.entity
+            self.mapper = prop.mapper
+        else:
+            self.entity = None
+            self.mapper = None
+
+        self._wildcard_path_loader_key = (
+            "loader",
+            parent.natural_path + self.prop._wildcard_token,
+        )
+        self._default_path_loader_key = self.prop._default_path_loader_key
+        self._loader_key = ("loader", self.natural_path)
+
+    def _truncate_recursive(self) -> PropRegistry:
+        earliest = None
+        for i, token in enumerate(reversed(self.path[:-1])):
+            if token is self.prop:
+                earliest = i
+
+        if earliest is None:
+            return self
+        else:
+            return self.coerce(self.path[0 : -(earliest + 1)])  # type: ignore
+
+    @property
+    def entity_path(self) -> AbstractEntityRegistry:
+        assert self.entity is not None
+        return self[self.entity]
+
+    def _getitem(
+        self, entity: Union[int, slice, _InternalEntityType[Any]]
+    ) -> Union[AbstractEntityRegistry, _PathElementType, _PathRepresentation]:
+        if isinstance(entity, (int, slice)):
+            return self.path[entity]
+        else:
+            return SlotsEntityRegistry(self, entity)
+
+    if not TYPE_CHECKING:
+        __getitem__ = _getitem
+
+
+class AbstractEntityRegistry(CreatesToken):
+    __slots__ = (
+        "key",
+        "parent",
+        "is_aliased_class",
+        "path",
+        "entity",
+        "natural_path",
+    )
+
+    has_entity = True
+    is_entity = True
+
+    parent: Union[RootRegistry, PropRegistry]
+    key: _InternalEntityType[Any]
+    entity: _InternalEntityType[Any]
+    is_aliased_class: bool
+
+    def __init__(
+        self,
+        parent: Union[RootRegistry, PropRegistry],
+        entity: _InternalEntityType[Any],
+    ):
+        self.key = entity
+        self.parent = parent
+        self.is_aliased_class = entity.is_aliased_class
+        self.entity = entity
+        self.path = parent.path + (entity,)
+
+        # the "natural path" is the path that we get when Query is traversing
+        # from the lead entities into the various relationships; it corresponds
+        # to the structure of mappers and relationships. when we are given a
+        # path that comes from loader options, as of 1.3 it can have ac-hoc
+        # with_polymorphic() and other AliasedInsp objects inside of it, which
+        # are usually not present in mappings.  So here we track both the
+        # "enhanced" path in self.path and the "natural" path that doesn't
+        # include those objects so these two traversals can be matched up.
+
+        # the test here for "(self.is_aliased_class or parent.is_unnatural)"
+        # are to avoid the more expensive conditional logic that follows if we
+        # know we don't have to do it.   This conditional can just as well be
+        # "if parent.path:", it just is more function calls.
+        #
+        # This is basically the only place that the "is_unnatural" flag
+        # actually changes behavior.
+        if parent.path and (self.is_aliased_class or parent.is_unnatural):
+            # this is an infrequent code path used only for loader strategies
+            # that also make use of of_type().
+            if entity.mapper.isa(parent.natural_path[-1].mapper):  # type: ignore # noqa: E501
+                self.natural_path = parent.natural_path + (entity.mapper,)
+            else:
+                self.natural_path = parent.natural_path + (
+                    parent.natural_path[-1].entity,  # type: ignore
+                )
+        # it seems to make sense that since these paths get mixed up
+        # with statements that are cached or not, we should make
+        # sure the natural path is cacheable across different occurrences
+        # of equivalent AliasedClass objects.  however, so far this
+        # does not seem to be needed for whatever reason.
+        # elif not parent.path and self.is_aliased_class:
+        #     self.natural_path = (self.entity._generate_cache_key()[0], )
+        else:
+            self.natural_path = self.path
+
+    def _truncate_recursive(self) -> AbstractEntityRegistry:
+        return self.parent._truncate_recursive()[self.entity]
+
+    @property
+    def root_entity(self) -> _InternalEntityType[Any]:
+        return self.odd_element(0)
+
+    @property
+    def entity_path(self) -> PathRegistry:
+        return self
+
+    @property
+    def mapper(self) -> Mapper[Any]:
+        return self.entity.mapper
+
+    def __bool__(self) -> bool:
+        return True
+
+    def _getitem(
+        self, entity: Any
+    ) -> Union[_PathElementType, _PathRepresentation, PathRegistry]:
+        if isinstance(entity, (int, slice)):
+            return self.path[entity]
+        elif entity in PathToken._intern:
+            return TokenRegistry(self, PathToken._intern[entity])
+        else:
+            return PropRegistry(self, entity)
+
+    if not TYPE_CHECKING:
+        __getitem__ = _getitem
+
+
+class SlotsEntityRegistry(AbstractEntityRegistry):
+    # for aliased class, return lightweight, no-cycles created
+    # version
+    inherit_cache = True
+
+
+class _ERDict(Dict[Any, Any]):
+    def __init__(self, registry: CachingEntityRegistry):
+        self.registry = registry
+
+    def __missing__(self, key: Any) -> PropRegistry:
+        self[key] = item = PropRegistry(self.registry, key)
+
+        return item
+
+
+class CachingEntityRegistry(AbstractEntityRegistry):
+    # for long lived mapper, return dict based caching
+    # version that creates reference cycles
+
+    __slots__ = ("_cache",)
+
+    inherit_cache = True
+
+    def __init__(
+        self,
+        parent: Union[RootRegistry, PropRegistry],
+        entity: _InternalEntityType[Any],
+    ):
+        super().__init__(parent, entity)
+        self._cache = _ERDict(self)
+
+    def pop(self, key: Any, default: Any) -> Any:
+        return self._cache.pop(key, default)
+
+    def _getitem(self, entity: Any) -> Any:
+        if isinstance(entity, (int, slice)):
+            return self.path[entity]
+        elif isinstance(entity, PathToken):
+            return TokenRegistry(self, entity)
+        else:
+            return self._cache[entity]
+
+    if not TYPE_CHECKING:
+        __getitem__ = _getitem
+
+
+if TYPE_CHECKING:
+
+    def path_is_entity(
+        path: PathRegistry,
+    ) -> TypeGuard[AbstractEntityRegistry]: ...
+
+    def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: ...
+
+else:
+    path_is_entity = operator.attrgetter("is_entity")
+    path_is_property = operator.attrgetter("is_property")
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py
new file mode 100644
index 00000000..cbe8557a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py
@@ -0,0 +1,1782 @@
+# orm/persistence.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""private module containing functions used to emit INSERT, UPDATE
+and DELETE statements on behalf of a :class:`_orm.Mapper` and its descending
+mappers.
+
+The functions here are called only by the unit of work functions
+in unitofwork.py.
+
+"""
+from __future__ import annotations
+
+from itertools import chain
+from itertools import groupby
+from itertools import zip_longest
+import operator
+
+from . import attributes
+from . import exc as orm_exc
+from . import loading
+from . import sync
+from .base import state_str
+from .. import exc as sa_exc
+from .. import future
+from .. import sql
+from .. import util
+from ..engine import cursor as _cursor
+from ..sql import operators
+from ..sql.elements import BooleanClauseList
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+
+
+def save_obj(base_mapper, states, uowtransaction, single=False):
+    """Issue ``INSERT`` and/or ``UPDATE`` statements for a list
+    of objects.
+
+    This is called within the context of a UOWTransaction during a
+    flush operation, given a list of states to be flushed.  The
+    base mapper in an inheritance hierarchy handles the inserts/
+    updates for all descendant mappers.
+
+    """
+
+    # if batch=false, call _save_obj separately for each object
+    if not single and not base_mapper.batch:
+        for state in _sort_states(base_mapper, states):
+            save_obj(base_mapper, [state], uowtransaction, single=True)
+        return
+
+    states_to_update = []
+    states_to_insert = []
+
+    for (
+        state,
+        dict_,
+        mapper,
+        connection,
+        has_identity,
+        row_switch,
+        update_version_id,
+    ) in _organize_states_for_save(base_mapper, states, uowtransaction):
+        if has_identity or row_switch:
+            states_to_update.append(
+                (state, dict_, mapper, connection, update_version_id)
+            )
+        else:
+            states_to_insert.append((state, dict_, mapper, connection))
+
+    for table, mapper in base_mapper._sorted_tables.items():
+        if table not in mapper._pks_by_table:
+            continue
+        insert = _collect_insert_commands(table, states_to_insert)
+
+        update = _collect_update_commands(
+            uowtransaction, table, states_to_update
+        )
+
+        _emit_update_statements(
+            base_mapper,
+            uowtransaction,
+            mapper,
+            table,
+            update,
+        )
+
+        _emit_insert_statements(
+            base_mapper,
+            uowtransaction,
+            mapper,
+            table,
+            insert,
+        )
+
+    _finalize_insert_update_commands(
+        base_mapper,
+        uowtransaction,
+        chain(
+            (
+                (state, state_dict, mapper, connection, False)
+                for (state, state_dict, mapper, connection) in states_to_insert
+            ),
+            (
+                (state, state_dict, mapper, connection, True)
+                for (
+                    state,
+                    state_dict,
+                    mapper,
+                    connection,
+                    update_version_id,
+                ) in states_to_update
+            ),
+        ),
+    )
+
+
+def post_update(base_mapper, states, uowtransaction, post_update_cols):
+    """Issue UPDATE statements on behalf of a relationship() which
+    specifies post_update.
+
+    """
+
+    states_to_update = list(
+        _organize_states_for_post_update(base_mapper, states, uowtransaction)
+    )
+
+    for table, mapper in base_mapper._sorted_tables.items():
+        if table not in mapper._pks_by_table:
+            continue
+
+        update = (
+            (
+                state,
+                state_dict,
+                sub_mapper,
+                connection,
+                (
+                    mapper._get_committed_state_attr_by_column(
+                        state, state_dict, mapper.version_id_col
+                    )
+                    if mapper.version_id_col is not None
+                    else None
+                ),
+            )
+            for state, state_dict, sub_mapper, connection in states_to_update
+            if table in sub_mapper._pks_by_table
+        )
+
+        update = _collect_post_update_commands(
+            base_mapper, uowtransaction, table, update, post_update_cols
+        )
+
+        _emit_post_update_statements(
+            base_mapper,
+            uowtransaction,
+            mapper,
+            table,
+            update,
+        )
+
+
+def delete_obj(base_mapper, states, uowtransaction):
+    """Issue ``DELETE`` statements for a list of objects.
+
+    This is called within the context of a UOWTransaction during a
+    flush operation.
+
+    """
+
+    states_to_delete = list(
+        _organize_states_for_delete(base_mapper, states, uowtransaction)
+    )
+
+    table_to_mapper = base_mapper._sorted_tables
+
+    for table in reversed(list(table_to_mapper.keys())):
+        mapper = table_to_mapper[table]
+        if table not in mapper._pks_by_table:
+            continue
+        elif mapper.inherits and mapper.passive_deletes:
+            continue
+
+        delete = _collect_delete_commands(
+            base_mapper, uowtransaction, table, states_to_delete
+        )
+
+        _emit_delete_statements(
+            base_mapper,
+            uowtransaction,
+            mapper,
+            table,
+            delete,
+        )
+
+    for (
+        state,
+        state_dict,
+        mapper,
+        connection,
+        update_version_id,
+    ) in states_to_delete:
+        mapper.dispatch.after_delete(mapper, connection, state)
+
+
+def _organize_states_for_save(base_mapper, states, uowtransaction):
+    """Make an initial pass across a set of states for INSERT or
+    UPDATE.
+
+    This includes splitting out into distinct lists for
+    each, calling before_insert/before_update, obtaining
+    key information for each state including its dictionary,
+    mapper, the connection to use for the execution per state,
+    and the identity flag.
+
+    """
+
+    for state, dict_, mapper, connection in _connections_for_states(
+        base_mapper, uowtransaction, states
+    ):
+        has_identity = bool(state.key)
+
+        instance_key = state.key or mapper._identity_key_from_state(state)
+
+        row_switch = update_version_id = None
+
+        # call before_XXX extensions
+        if not has_identity:
+            mapper.dispatch.before_insert(mapper, connection, state)
+        else:
+            mapper.dispatch.before_update(mapper, connection, state)
+
+        if mapper._validate_polymorphic_identity:
+            mapper._validate_polymorphic_identity(mapper, state, dict_)
+
+        # detect if we have a "pending" instance (i.e. has
+        # no instance_key attached to it), and another instance
+        # with the same identity key already exists as persistent.
+        # convert to an UPDATE if so.
+        if (
+            not has_identity
+            and instance_key in uowtransaction.session.identity_map
+        ):
+            instance = uowtransaction.session.identity_map[instance_key]
+            existing = attributes.instance_state(instance)
+
+            if not uowtransaction.was_already_deleted(existing):
+                if not uowtransaction.is_deleted(existing):
+                    util.warn(
+                        "New instance %s with identity key %s conflicts "
+                        "with persistent instance %s"
+                        % (state_str(state), instance_key, state_str(existing))
+                    )
+                else:
+                    base_mapper._log_debug(
+                        "detected row switch for identity %s.  "
+                        "will update %s, remove %s from "
+                        "transaction",
+                        instance_key,
+                        state_str(state),
+                        state_str(existing),
+                    )
+
+                    # remove the "delete" flag from the existing element
+                    uowtransaction.remove_state_actions(existing)
+                    row_switch = existing
+
+        if (has_identity or row_switch) and mapper.version_id_col is not None:
+            update_version_id = mapper._get_committed_state_attr_by_column(
+                row_switch if row_switch else state,
+                row_switch.dict if row_switch else dict_,
+                mapper.version_id_col,
+            )
+
+        yield (
+            state,
+            dict_,
+            mapper,
+            connection,
+            has_identity,
+            row_switch,
+            update_version_id,
+        )
+
+
+def _organize_states_for_post_update(base_mapper, states, uowtransaction):
+    """Make an initial pass across a set of states for UPDATE
+    corresponding to post_update.
+
+    This includes obtaining key information for each state
+    including its dictionary, mapper, the connection to use for
+    the execution per state.
+
+    """
+    return _connections_for_states(base_mapper, uowtransaction, states)
+
+
+def _organize_states_for_delete(base_mapper, states, uowtransaction):
+    """Make an initial pass across a set of states for DELETE.
+
+    This includes calling out before_delete and obtaining
+    key information for each state including its dictionary,
+    mapper, the connection to use for the execution per state.
+
+    """
+    for state, dict_, mapper, connection in _connections_for_states(
+        base_mapper, uowtransaction, states
+    ):
+        mapper.dispatch.before_delete(mapper, connection, state)
+
+        if mapper.version_id_col is not None:
+            update_version_id = mapper._get_committed_state_attr_by_column(
+                state, dict_, mapper.version_id_col
+            )
+        else:
+            update_version_id = None
+
+        yield (state, dict_, mapper, connection, update_version_id)
+
+
+def _collect_insert_commands(
+    table,
+    states_to_insert,
+    *,
+    bulk=False,
+    return_defaults=False,
+    render_nulls=False,
+    include_bulk_keys=(),
+):
+    """Identify sets of values to use in INSERT statements for a
+    list of states.
+
+    """
+    for state, state_dict, mapper, connection in states_to_insert:
+        if table not in mapper._pks_by_table:
+            continue
+
+        params = {}
+        value_params = {}
+
+        propkey_to_col = mapper._propkey_to_col[table]
+
+        eval_none = mapper._insert_cols_evaluating_none[table]
+
+        for propkey in set(propkey_to_col).intersection(state_dict):
+            value = state_dict[propkey]
+            col = propkey_to_col[propkey]
+            if value is None and col not in eval_none and not render_nulls:
+                continue
+            elif not bulk and (
+                hasattr(value, "__clause_element__")
+                or isinstance(value, sql.ClauseElement)
+            ):
+                value_params[col] = (
+                    value.__clause_element__()
+                    if hasattr(value, "__clause_element__")
+                    else value
+                )
+            else:
+                params[col.key] = value
+
+        if not bulk:
+            # for all the columns that have no default and we don't have
+            # a value and where "None" is not a special value, add
+            # explicit None to the INSERT.   This is a legacy behavior
+            # which might be worth removing, as it should not be necessary
+            # and also produces confusion, given that "missing" and None
+            # now have distinct meanings
+            for colkey in (
+                mapper._insert_cols_as_none[table]
+                .difference(params)
+                .difference([c.key for c in value_params])
+            ):
+                params[colkey] = None
+
+        if not bulk or return_defaults:
+            # params are in terms of Column key objects, so
+            # compare to pk_keys_by_table
+            has_all_pks = mapper._pk_keys_by_table[table].issubset(params)
+
+            if mapper.base_mapper._prefer_eager_defaults(
+                connection.dialect, table
+            ):
+                has_all_defaults = mapper._server_default_col_keys[
+                    table
+                ].issubset(params)
+            else:
+                has_all_defaults = True
+        else:
+            has_all_defaults = has_all_pks = True
+
+        if (
+            mapper.version_id_generator is not False
+            and mapper.version_id_col is not None
+            and mapper.version_id_col in mapper._cols_by_table[table]
+        ):
+            params[mapper.version_id_col.key] = mapper.version_id_generator(
+                None
+            )
+
+        if bulk:
+            if mapper._set_polymorphic_identity:
+                params.setdefault(
+                    mapper._polymorphic_attr_key, mapper.polymorphic_identity
+                )
+
+            if include_bulk_keys:
+                params.update((k, state_dict[k]) for k in include_bulk_keys)
+
+        yield (
+            state,
+            state_dict,
+            params,
+            mapper,
+            connection,
+            value_params,
+            has_all_pks,
+            has_all_defaults,
+        )
+
+
+def _collect_update_commands(
+    uowtransaction,
+    table,
+    states_to_update,
+    *,
+    bulk=False,
+    use_orm_update_stmt=None,
+    include_bulk_keys=(),
+):
+    """Identify sets of values to use in UPDATE statements for a
+    list of states.
+
+    This function works intricately with the history system
+    to determine exactly what values should be updated
+    as well as how the row should be matched within an UPDATE
+    statement.  Includes some tricky scenarios where the primary
+    key of an object might have been changed.
+
+    """
+
+    for (
+        state,
+        state_dict,
+        mapper,
+        connection,
+        update_version_id,
+    ) in states_to_update:
+        if table not in mapper._pks_by_table:
+            continue
+
+        pks = mapper._pks_by_table[table]
+
+        if use_orm_update_stmt is not None:
+            # TODO: ordered values, etc
+            value_params = use_orm_update_stmt._values
+        else:
+            value_params = {}
+
+        propkey_to_col = mapper._propkey_to_col[table]
+
+        if bulk:
+            # keys here are mapped attribute keys, so
+            # look at mapper attribute keys for pk
+            params = {
+                propkey_to_col[propkey].key: state_dict[propkey]
+                for propkey in set(propkey_to_col)
+                .intersection(state_dict)
+                .difference(mapper._pk_attr_keys_by_table[table])
+            }
+            has_all_defaults = True
+        else:
+            params = {}
+            for propkey in set(propkey_to_col).intersection(
+                state.committed_state
+            ):
+                value = state_dict[propkey]
+                col = propkey_to_col[propkey]
+
+                if hasattr(value, "__clause_element__") or isinstance(
+                    value, sql.ClauseElement
+                ):
+                    value_params[col] = (
+                        value.__clause_element__()
+                        if hasattr(value, "__clause_element__")
+                        else value
+                    )
+                # guard against values that generate non-__nonzero__
+                # objects for __eq__()
+                elif (
+                    state.manager[propkey].impl.is_equal(
+                        value, state.committed_state[propkey]
+                    )
+                    is not True
+                ):
+                    params[col.key] = value
+
+            if mapper.base_mapper.eager_defaults is True:
+                has_all_defaults = (
+                    mapper._server_onupdate_default_col_keys[table]
+                ).issubset(params)
+            else:
+                has_all_defaults = True
+
+        if (
+            update_version_id is not None
+            and mapper.version_id_col in mapper._cols_by_table[table]
+        ):
+            if not bulk and not (params or value_params):
+                # HACK: check for history in other tables, in case the
+                # history is only in a different table than the one
+                # where the version_id_col is.  This logic was lost
+                # from 0.9 -> 1.0.0 and restored in 1.0.6.
+                for prop in mapper._columntoproperty.values():
+                    history = state.manager[prop.key].impl.get_history(
+                        state, state_dict, attributes.PASSIVE_NO_INITIALIZE
+                    )
+                    if history.added:
+                        break
+                else:
+                    # no net change, break
+                    continue
+
+            col = mapper.version_id_col
+            no_params = not params and not value_params
+            params[col._label] = update_version_id
+
+            if (
+                bulk or col.key not in params
+            ) and mapper.version_id_generator is not False:
+                val = mapper.version_id_generator(update_version_id)
+                params[col.key] = val
+            elif mapper.version_id_generator is False and no_params:
+                # no version id generator, no values set on the table,
+                # and version id wasn't manually incremented.
+                # set version id to itself so we get an UPDATE
+                # statement
+                params[col.key] = update_version_id
+
+        elif not (params or value_params):
+            continue
+
+        has_all_pks = True
+        expect_pk_cascaded = False
+        if bulk:
+            # keys here are mapped attribute keys, so
+            # look at mapper attribute keys for pk
+            pk_params = {
+                propkey_to_col[propkey]._label: state_dict.get(propkey)
+                for propkey in set(propkey_to_col).intersection(
+                    mapper._pk_attr_keys_by_table[table]
+                )
+            }
+            if util.NONE_SET.intersection(pk_params.values()):
+                raise sa_exc.InvalidRequestError(
+                    f"No primary key value supplied for column(s) "
+                    f"""{
+                        ', '.join(
+                            str(c) for c in pks if pk_params[c._label] is None
+                        )
+                    }; """
+                    "per-row ORM Bulk UPDATE by Primary Key requires that "
+                    "records contain primary key values",
+                    code="bupq",
+                )
+
+        else:
+            pk_params = {}
+            for col in pks:
+                propkey = mapper._columntoproperty[col].key
+
+                history = state.manager[propkey].impl.get_history(
+                    state, state_dict, attributes.PASSIVE_OFF
+                )
+
+                if history.added:
+                    if (
+                        not history.deleted
+                        or ("pk_cascaded", state, col)
+                        in uowtransaction.attributes
+                    ):
+                        expect_pk_cascaded = True
+                        pk_params[col._label] = history.added[0]
+                        params.pop(col.key, None)
+                    else:
+                        # else, use the old value to locate the row
+                        pk_params[col._label] = history.deleted[0]
+                        if col in value_params:
+                            has_all_pks = False
+                else:
+                    pk_params[col._label] = history.unchanged[0]
+                if pk_params[col._label] is None:
+                    raise orm_exc.FlushError(
+                        "Can't update table %s using NULL for primary "
+                        "key value on column %s" % (table, col)
+                    )
+
+        if include_bulk_keys:
+            params.update((k, state_dict[k]) for k in include_bulk_keys)
+
+        if params or value_params:
+            params.update(pk_params)
+            yield (
+                state,
+                state_dict,
+                params,
+                mapper,
+                connection,
+                value_params,
+                has_all_defaults,
+                has_all_pks,
+            )
+        elif expect_pk_cascaded:
+            # no UPDATE occurs on this table, but we expect that CASCADE rules
+            # have changed the primary key of the row; propagate this event to
+            # other columns that expect to have been modified. this normally
+            # occurs after the UPDATE is emitted however we invoke it here
+            # explicitly in the absence of our invoking an UPDATE
+            for m, equated_pairs in mapper._table_to_equated[table]:
+                sync.populate(
+                    state,
+                    m,
+                    state,
+                    m,
+                    equated_pairs,
+                    uowtransaction,
+                    mapper.passive_updates,
+                )
+
+
+def _collect_post_update_commands(
+    base_mapper, uowtransaction, table, states_to_update, post_update_cols
+):
+    """Identify sets of values to use in UPDATE statements for a
+    list of states within a post_update operation.
+
+    """
+
+    for (
+        state,
+        state_dict,
+        mapper,
+        connection,
+        update_version_id,
+    ) in states_to_update:
+        # assert table in mapper._pks_by_table
+
+        pks = mapper._pks_by_table[table]
+        params = {}
+        hasdata = False
+
+        for col in mapper._cols_by_table[table]:
+            if col in pks:
+                params[col._label] = mapper._get_state_attr_by_column(
+                    state, state_dict, col, passive=attributes.PASSIVE_OFF
+                )
+
+            elif col in post_update_cols or col.onupdate is not None:
+                prop = mapper._columntoproperty[col]
+                history = state.manager[prop.key].impl.get_history(
+                    state, state_dict, attributes.PASSIVE_NO_INITIALIZE
+                )
+                if history.added:
+                    value = history.added[0]
+                    params[col.key] = value
+                    hasdata = True
+        if hasdata:
+            if (
+                update_version_id is not None
+                and mapper.version_id_col in mapper._cols_by_table[table]
+            ):
+                col = mapper.version_id_col
+                params[col._label] = update_version_id
+
+                if (
+                    bool(state.key)
+                    and col.key not in params
+                    and mapper.version_id_generator is not False
+                ):
+                    val = mapper.version_id_generator(update_version_id)
+                    params[col.key] = val
+            yield state, state_dict, mapper, connection, params
+
+
+def _collect_delete_commands(
+    base_mapper, uowtransaction, table, states_to_delete
+):
+    """Identify values to use in DELETE statements for a list of
+    states to be deleted."""
+
+    for (
+        state,
+        state_dict,
+        mapper,
+        connection,
+        update_version_id,
+    ) in states_to_delete:
+        if table not in mapper._pks_by_table:
+            continue
+
+        params = {}
+        for col in mapper._pks_by_table[table]:
+            params[col.key] = value = (
+                mapper._get_committed_state_attr_by_column(
+                    state, state_dict, col
+                )
+            )
+            if value is None:
+                raise orm_exc.FlushError(
+                    "Can't delete from table %s "
+                    "using NULL for primary "
+                    "key value on column %s" % (table, col)
+                )
+
+        if (
+            update_version_id is not None
+            and mapper.version_id_col in mapper._cols_by_table[table]
+        ):
+            params[mapper.version_id_col.key] = update_version_id
+        yield params, connection
+
+
+def _emit_update_statements(
+    base_mapper,
+    uowtransaction,
+    mapper,
+    table,
+    update,
+    *,
+    bookkeeping=True,
+    use_orm_update_stmt=None,
+    enable_check_rowcount=True,
+):
+    """Emit UPDATE statements corresponding to value lists collected
+    by _collect_update_commands()."""
+
+    needs_version_id = (
+        mapper.version_id_col is not None
+        and mapper.version_id_col in mapper._cols_by_table[table]
+    )
+
+    execution_options = {"compiled_cache": base_mapper._compiled_cache}
+
+    def update_stmt(existing_stmt=None):
+        clauses = BooleanClauseList._construct_raw(operators.and_)
+
+        for col in mapper._pks_by_table[table]:
+            clauses._append_inplace(
+                col == sql.bindparam(col._label, type_=col.type)
+            )
+
+        if needs_version_id:
+            clauses._append_inplace(
+                mapper.version_id_col
+                == sql.bindparam(
+                    mapper.version_id_col._label,
+                    type_=mapper.version_id_col.type,
+                )
+            )
+
+        if existing_stmt is not None:
+            stmt = existing_stmt.where(clauses)
+        else:
+            stmt = table.update().where(clauses)
+        return stmt
+
+    if use_orm_update_stmt is not None:
+        cached_stmt = update_stmt(use_orm_update_stmt)
+
+    else:
+        cached_stmt = base_mapper._memo(("update", table), update_stmt)
+
+    for (
+        (connection, paramkeys, hasvalue, has_all_defaults, has_all_pks),
+        records,
+    ) in groupby(
+        update,
+        lambda rec: (
+            rec[4],  # connection
+            set(rec[2]),  # set of parameter keys
+            bool(rec[5]),  # whether or not we have "value" parameters
+            rec[6],  # has_all_defaults
+            rec[7],  # has all pks
+        ),
+    ):
+        rows = 0
+        records = list(records)
+
+        statement = cached_stmt
+
+        if use_orm_update_stmt is not None:
+            statement = statement._annotate(
+                {
+                    "_emit_update_table": table,
+                    "_emit_update_mapper": mapper,
+                }
+            )
+
+        return_defaults = False
+
+        if not has_all_pks:
+            statement = statement.return_defaults(*mapper._pks_by_table[table])
+            return_defaults = True
+
+        if (
+            bookkeeping
+            and not has_all_defaults
+            and mapper.base_mapper.eager_defaults is True
+            # change as of #8889 - if RETURNING is not going to be used anyway,
+            # (applies to MySQL, MariaDB which lack UPDATE RETURNING) ensure
+            # we can do an executemany UPDATE which is more efficient
+            and table.implicit_returning
+            and connection.dialect.update_returning
+        ):
+            statement = statement.return_defaults(
+                *mapper._server_onupdate_default_cols[table]
+            )
+            return_defaults = True
+
+        if mapper._version_id_has_server_side_value:
+            statement = statement.return_defaults(mapper.version_id_col)
+            return_defaults = True
+
+        assert_singlerow = connection.dialect.supports_sane_rowcount
+
+        assert_multirow = (
+            assert_singlerow
+            and connection.dialect.supports_sane_multi_rowcount
+        )
+
+        # change as of #8889 - if RETURNING is not going to be used anyway,
+        # (applies to MySQL, MariaDB which lack UPDATE RETURNING) ensure
+        # we can do an executemany UPDATE which is more efficient
+        allow_executemany = not return_defaults and not needs_version_id
+
+        if hasvalue:
+            for (
+                state,
+                state_dict,
+                params,
+                mapper,
+                connection,
+                value_params,
+                has_all_defaults,
+                has_all_pks,
+            ) in records:
+                c = connection.execute(
+                    statement.values(value_params),
+                    params,
+                    execution_options=execution_options,
+                )
+                if bookkeeping:
+                    _postfetch(
+                        mapper,
+                        uowtransaction,
+                        table,
+                        state,
+                        state_dict,
+                        c,
+                        c.context.compiled_parameters[0],
+                        value_params,
+                        True,
+                        c.returned_defaults,
+                    )
+                rows += c.rowcount
+                check_rowcount = enable_check_rowcount and assert_singlerow
+        else:
+            if not allow_executemany:
+                check_rowcount = enable_check_rowcount and assert_singlerow
+                for (
+                    state,
+                    state_dict,
+                    params,
+                    mapper,
+                    connection,
+                    value_params,
+                    has_all_defaults,
+                    has_all_pks,
+                ) in records:
+                    c = connection.execute(
+                        statement, params, execution_options=execution_options
+                    )
+
+                    # TODO: why with bookkeeping=False?
+                    if bookkeeping:
+                        _postfetch(
+                            mapper,
+                            uowtransaction,
+                            table,
+                            state,
+                            state_dict,
+                            c,
+                            c.context.compiled_parameters[0],
+                            value_params,
+                            True,
+                            c.returned_defaults,
+                        )
+                    rows += c.rowcount
+            else:
+                multiparams = [rec[2] for rec in records]
+
+                check_rowcount = enable_check_rowcount and (
+                    assert_multirow
+                    or (assert_singlerow and len(multiparams) == 1)
+                )
+
+                c = connection.execute(
+                    statement, multiparams, execution_options=execution_options
+                )
+
+                rows += c.rowcount
+
+                for (
+                    state,
+                    state_dict,
+                    params,
+                    mapper,
+                    connection,
+                    value_params,
+                    has_all_defaults,
+                    has_all_pks,
+                ) in records:
+                    if bookkeeping:
+                        _postfetch(
+                            mapper,
+                            uowtransaction,
+                            table,
+                            state,
+                            state_dict,
+                            c,
+                            c.context.compiled_parameters[0],
+                            value_params,
+                            True,
+                            (
+                                c.returned_defaults
+                                if not c.context.executemany
+                                else None
+                            ),
+                        )
+
+        if check_rowcount:
+            if rows != len(records):
+                raise orm_exc.StaleDataError(
+                    "UPDATE statement on table '%s' expected to "
+                    "update %d row(s); %d were matched."
+                    % (table.description, len(records), rows)
+                )
+
+        elif needs_version_id:
+            util.warn(
+                "Dialect %s does not support updated rowcount "
+                "- versioning cannot be verified."
+                % c.dialect.dialect_description
+            )
+
+
+def _emit_insert_statements(
+    base_mapper,
+    uowtransaction,
+    mapper,
+    table,
+    insert,
+    *,
+    bookkeeping=True,
+    use_orm_insert_stmt=None,
+    execution_options=None,
+):
+    """Emit INSERT statements corresponding to value lists collected
+    by _collect_insert_commands()."""
+
+    if use_orm_insert_stmt is not None:
+        cached_stmt = use_orm_insert_stmt
+        exec_opt = util.EMPTY_DICT
+
+        # if a user query with RETURNING was passed, we definitely need
+        # to use RETURNING.
+        returning_is_required_anyway = bool(use_orm_insert_stmt._returning)
+        deterministic_results_reqd = (
+            returning_is_required_anyway
+            and use_orm_insert_stmt._sort_by_parameter_order
+        ) or bookkeeping
+    else:
+        returning_is_required_anyway = False
+        deterministic_results_reqd = bookkeeping
+        cached_stmt = base_mapper._memo(("insert", table), table.insert)
+        exec_opt = {"compiled_cache": base_mapper._compiled_cache}
+
+    if execution_options:
+        execution_options = util.EMPTY_DICT.merge_with(
+            exec_opt, execution_options
+        )
+    else:
+        execution_options = exec_opt
+
+    return_result = None
+
+    for (
+        (connection, _, hasvalue, has_all_pks, has_all_defaults),
+        records,
+    ) in groupby(
+        insert,
+        lambda rec: (
+            rec[4],  # connection
+            set(rec[2]),  # parameter keys
+            bool(rec[5]),  # whether we have "value" parameters
+            rec[6],
+            rec[7],
+        ),
+    ):
+        statement = cached_stmt
+
+        if use_orm_insert_stmt is not None:
+            statement = statement._annotate(
+                {
+                    "_emit_insert_table": table,
+                    "_emit_insert_mapper": mapper,
+                }
+            )
+
+        if (
+            (
+                not bookkeeping
+                or (
+                    has_all_defaults
+                    or not base_mapper._prefer_eager_defaults(
+                        connection.dialect, table
+                    )
+                    or not table.implicit_returning
+                    or not connection.dialect.insert_returning
+                )
+            )
+            and not returning_is_required_anyway
+            and has_all_pks
+            and not hasvalue
+        ):
+            # the "we don't need newly generated values back" section.
+            # here we have all the PKs, all the defaults or we don't want
+            # to fetch them, or the dialect doesn't support RETURNING at all
+            # so we have to post-fetch / use lastrowid anyway.
+            records = list(records)
+            multiparams = [rec[2] for rec in records]
+
+            result = connection.execute(
+                statement, multiparams, execution_options=execution_options
+            )
+            if bookkeeping:
+                for (
+                    (
+                        state,
+                        state_dict,
+                        params,
+                        mapper_rec,
+                        conn,
+                        value_params,
+                        has_all_pks,
+                        has_all_defaults,
+                    ),
+                    last_inserted_params,
+                ) in zip(records, result.context.compiled_parameters):
+                    if state:
+                        _postfetch(
+                            mapper_rec,
+                            uowtransaction,
+                            table,
+                            state,
+                            state_dict,
+                            result,
+                            last_inserted_params,
+                            value_params,
+                            False,
+                            (
+                                result.returned_defaults
+                                if not result.context.executemany
+                                else None
+                            ),
+                        )
+                    else:
+                        _postfetch_bulk_save(mapper_rec, state_dict, table)
+
+        else:
+            # here, we need defaults and/or pk values back or we otherwise
+            # know that we are using RETURNING in any case
+
+            records = list(records)
+
+            if returning_is_required_anyway or (
+                table.implicit_returning and not hasvalue and len(records) > 1
+            ):
+                if (
+                    deterministic_results_reqd
+                    and connection.dialect.insert_executemany_returning_sort_by_parameter_order  # noqa: E501
+                ) or (
+                    not deterministic_results_reqd
+                    and connection.dialect.insert_executemany_returning
+                ):
+                    do_executemany = True
+                elif returning_is_required_anyway:
+                    if deterministic_results_reqd:
+                        dt = " with RETURNING and sort by parameter order"
+                    else:
+                        dt = " with RETURNING"
+                    raise sa_exc.InvalidRequestError(
+                        f"Can't use explicit RETURNING for bulk INSERT "
+                        f"operation with "
+                        f"{connection.dialect.dialect_description} backend; "
+                        f"executemany{dt} is not enabled for this dialect."
+                    )
+                else:
+                    do_executemany = False
+            else:
+                do_executemany = False
+
+            if use_orm_insert_stmt is None:
+                if (
+                    not has_all_defaults
+                    and base_mapper._prefer_eager_defaults(
+                        connection.dialect, table
+                    )
+                ):
+                    statement = statement.return_defaults(
+                        *mapper._server_default_cols[table],
+                        sort_by_parameter_order=bookkeeping,
+                    )
+
+            if mapper.version_id_col is not None:
+                statement = statement.return_defaults(
+                    mapper.version_id_col,
+                    sort_by_parameter_order=bookkeeping,
+                )
+            elif do_executemany:
+                statement = statement.return_defaults(
+                    *table.primary_key, sort_by_parameter_order=bookkeeping
+                )
+
+            if do_executemany:
+                multiparams = [rec[2] for rec in records]
+
+                result = connection.execute(
+                    statement, multiparams, execution_options=execution_options
+                )
+
+                if use_orm_insert_stmt is not None:
+                    if return_result is None:
+                        return_result = result
+                    else:
+                        return_result = return_result.splice_vertically(result)
+
+                if bookkeeping:
+                    for (
+                        (
+                            state,
+                            state_dict,
+                            params,
+                            mapper_rec,
+                            conn,
+                            value_params,
+                            has_all_pks,
+                            has_all_defaults,
+                        ),
+                        last_inserted_params,
+                        inserted_primary_key,
+                        returned_defaults,
+                    ) in zip_longest(
+                        records,
+                        result.context.compiled_parameters,
+                        result.inserted_primary_key_rows,
+                        result.returned_defaults_rows or (),
+                    ):
+                        if inserted_primary_key is None:
+                            # this is a real problem and means that we didn't
+                            # get back as many PK rows.  we can't continue
+                            # since this indicates PK rows were missing, which
+                            # means we likely mis-populated records starting
+                            # at that point with incorrectly matched PK
+                            # values.
+                            raise orm_exc.FlushError(
+                                "Multi-row INSERT statement for %s did not "
+                                "produce "
+                                "the correct number of INSERTed rows for "
+                                "RETURNING.  Ensure there are no triggers or "
+                                "special driver issues preventing INSERT from "
+                                "functioning properly." % mapper_rec
+                            )
+
+                        for pk, col in zip(
+                            inserted_primary_key,
+                            mapper._pks_by_table[table],
+                        ):
+                            prop = mapper_rec._columntoproperty[col]
+                            if state_dict.get(prop.key) is None:
+                                state_dict[prop.key] = pk
+
+                        if state:
+                            _postfetch(
+                                mapper_rec,
+                                uowtransaction,
+                                table,
+                                state,
+                                state_dict,
+                                result,
+                                last_inserted_params,
+                                value_params,
+                                False,
+                                returned_defaults,
+                            )
+                        else:
+                            _postfetch_bulk_save(mapper_rec, state_dict, table)
+            else:
+                assert not returning_is_required_anyway
+
+                for (
+                    state,
+                    state_dict,
+                    params,
+                    mapper_rec,
+                    connection,
+                    value_params,
+                    has_all_pks,
+                    has_all_defaults,
+                ) in records:
+                    if value_params:
+                        result = connection.execute(
+                            statement.values(value_params),
+                            params,
+                            execution_options=execution_options,
+                        )
+                    else:
+                        result = connection.execute(
+                            statement,
+                            params,
+                            execution_options=execution_options,
+                        )
+
+                    primary_key = result.inserted_primary_key
+                    if primary_key is None:
+                        raise orm_exc.FlushError(
+                            "Single-row INSERT statement for %s "
+                            "did not produce a "
+                            "new primary key result "
+                            "being invoked.  Ensure there are no triggers or "
+                            "special driver issues preventing INSERT from "
+                            "functioning properly." % (mapper_rec,)
+                        )
+                    for pk, col in zip(
+                        primary_key, mapper._pks_by_table[table]
+                    ):
+                        prop = mapper_rec._columntoproperty[col]
+                        if (
+                            col in value_params
+                            or state_dict.get(prop.key) is None
+                        ):
+                            state_dict[prop.key] = pk
+                    if bookkeeping:
+                        if state:
+                            _postfetch(
+                                mapper_rec,
+                                uowtransaction,
+                                table,
+                                state,
+                                state_dict,
+                                result,
+                                result.context.compiled_parameters[0],
+                                value_params,
+                                False,
+                                (
+                                    result.returned_defaults
+                                    if not result.context.executemany
+                                    else None
+                                ),
+                            )
+                        else:
+                            _postfetch_bulk_save(mapper_rec, state_dict, table)
+
+    if use_orm_insert_stmt is not None:
+        if return_result is None:
+            return _cursor.null_dml_result()
+        else:
+            return return_result
+
+
+def _emit_post_update_statements(
+    base_mapper, uowtransaction, mapper, table, update
+):
+    """Emit UPDATE statements corresponding to value lists collected
+    by _collect_post_update_commands()."""
+
+    execution_options = {"compiled_cache": base_mapper._compiled_cache}
+
+    needs_version_id = (
+        mapper.version_id_col is not None
+        and mapper.version_id_col in mapper._cols_by_table[table]
+    )
+
+    def update_stmt():
+        clauses = BooleanClauseList._construct_raw(operators.and_)
+
+        for col in mapper._pks_by_table[table]:
+            clauses._append_inplace(
+                col == sql.bindparam(col._label, type_=col.type)
+            )
+
+        if needs_version_id:
+            clauses._append_inplace(
+                mapper.version_id_col
+                == sql.bindparam(
+                    mapper.version_id_col._label,
+                    type_=mapper.version_id_col.type,
+                )
+            )
+
+        stmt = table.update().where(clauses)
+
+        return stmt
+
+    statement = base_mapper._memo(("post_update", table), update_stmt)
+
+    if mapper._version_id_has_server_side_value:
+        statement = statement.return_defaults(mapper.version_id_col)
+
+    # execute each UPDATE in the order according to the original
+    # list of states to guarantee row access order, but
+    # also group them into common (connection, cols) sets
+    # to support executemany().
+    for key, records in groupby(
+        update,
+        lambda rec: (rec[3], set(rec[4])),  # connection  # parameter keys
+    ):
+        rows = 0
+
+        records = list(records)
+        connection = key[0]
+
+        assert_singlerow = connection.dialect.supports_sane_rowcount
+        assert_multirow = (
+            assert_singlerow
+            and connection.dialect.supports_sane_multi_rowcount
+        )
+        allow_executemany = not needs_version_id or assert_multirow
+
+        if not allow_executemany:
+            check_rowcount = assert_singlerow
+            for state, state_dict, mapper_rec, connection, params in records:
+                c = connection.execute(
+                    statement, params, execution_options=execution_options
+                )
+
+                _postfetch_post_update(
+                    mapper_rec,
+                    uowtransaction,
+                    table,
+                    state,
+                    state_dict,
+                    c,
+                    c.context.compiled_parameters[0],
+                )
+                rows += c.rowcount
+        else:
+            multiparams = [
+                params
+                for state, state_dict, mapper_rec, conn, params in records
+            ]
+
+            check_rowcount = assert_multirow or (
+                assert_singlerow and len(multiparams) == 1
+            )
+
+            c = connection.execute(
+                statement, multiparams, execution_options=execution_options
+            )
+
+            rows += c.rowcount
+            for state, state_dict, mapper_rec, connection, params in records:
+                _postfetch_post_update(
+                    mapper_rec,
+                    uowtransaction,
+                    table,
+                    state,
+                    state_dict,
+                    c,
+                    c.context.compiled_parameters[0],
+                )
+
+        if check_rowcount:
+            if rows != len(records):
+                raise orm_exc.StaleDataError(
+                    "UPDATE statement on table '%s' expected to "
+                    "update %d row(s); %d were matched."
+                    % (table.description, len(records), rows)
+                )
+
+        elif needs_version_id:
+            util.warn(
+                "Dialect %s does not support updated rowcount "
+                "- versioning cannot be verified."
+                % c.dialect.dialect_description
+            )
+
+
+def _emit_delete_statements(
+    base_mapper, uowtransaction, mapper, table, delete
+):
+    """Emit DELETE statements corresponding to value lists collected
+    by _collect_delete_commands()."""
+
+    need_version_id = (
+        mapper.version_id_col is not None
+        and mapper.version_id_col in mapper._cols_by_table[table]
+    )
+
+    def delete_stmt():
+        clauses = BooleanClauseList._construct_raw(operators.and_)
+
+        for col in mapper._pks_by_table[table]:
+            clauses._append_inplace(
+                col == sql.bindparam(col.key, type_=col.type)
+            )
+
+        if need_version_id:
+            clauses._append_inplace(
+                mapper.version_id_col
+                == sql.bindparam(
+                    mapper.version_id_col.key, type_=mapper.version_id_col.type
+                )
+            )
+
+        return table.delete().where(clauses)
+
+    statement = base_mapper._memo(("delete", table), delete_stmt)
+    for connection, recs in groupby(delete, lambda rec: rec[1]):  # connection
+        del_objects = [params for params, connection in recs]
+
+        execution_options = {"compiled_cache": base_mapper._compiled_cache}
+        expected = len(del_objects)
+        rows_matched = -1
+        only_warn = False
+
+        if (
+            need_version_id
+            and not connection.dialect.supports_sane_multi_rowcount
+        ):
+            if connection.dialect.supports_sane_rowcount:
+                rows_matched = 0
+                # execute deletes individually so that versioned
+                # rows can be verified
+                for params in del_objects:
+                    c = connection.execute(
+                        statement, params, execution_options=execution_options
+                    )
+                    rows_matched += c.rowcount
+            else:
+                util.warn(
+                    "Dialect %s does not support deleted rowcount "
+                    "- versioning cannot be verified."
+                    % connection.dialect.dialect_description
+                )
+                connection.execute(
+                    statement, del_objects, execution_options=execution_options
+                )
+        else:
+            c = connection.execute(
+                statement, del_objects, execution_options=execution_options
+            )
+
+            if not need_version_id:
+                only_warn = True
+
+            rows_matched = c.rowcount
+
+        if (
+            base_mapper.confirm_deleted_rows
+            and rows_matched > -1
+            and expected != rows_matched
+            and (
+                connection.dialect.supports_sane_multi_rowcount
+                or len(del_objects) == 1
+            )
+        ):
+            # TODO: why does this "only warn" if versioning is turned off,
+            # whereas the UPDATE raises?
+            if only_warn:
+                util.warn(
+                    "DELETE statement on table '%s' expected to "
+                    "delete %d row(s); %d were matched.  Please set "
+                    "confirm_deleted_rows=False within the mapper "
+                    "configuration to prevent this warning."
+                    % (table.description, expected, rows_matched)
+                )
+            else:
+                raise orm_exc.StaleDataError(
+                    "DELETE statement on table '%s' expected to "
+                    "delete %d row(s); %d were matched.  Please set "
+                    "confirm_deleted_rows=False within the mapper "
+                    "configuration to prevent this warning."
+                    % (table.description, expected, rows_matched)
+                )
+
+
+def _finalize_insert_update_commands(base_mapper, uowtransaction, states):
+    """finalize state on states that have been inserted or updated,
+    including calling after_insert/after_update events.
+
+    """
+    for state, state_dict, mapper, connection, has_identity in states:
+        if mapper._readonly_props:
+            readonly = state.unmodified_intersection(
+                [
+                    p.key
+                    for p in mapper._readonly_props
+                    if (
+                        p.expire_on_flush
+                        and (not p.deferred or p.key in state.dict)
+                    )
+                    or (
+                        not p.expire_on_flush
+                        and not p.deferred
+                        and p.key not in state.dict
+                    )
+                ]
+            )
+            if readonly:
+                state._expire_attributes(state.dict, readonly)
+
+        # if eager_defaults option is enabled, load
+        # all expired cols.  Else if we have a version_id_col, make sure
+        # it isn't expired.
+        toload_now = []
+
+        # this is specifically to emit a second SELECT for eager_defaults,
+        # so only if it's set to True, not "auto"
+        if base_mapper.eager_defaults is True:
+            toload_now.extend(
+                state._unloaded_non_object.intersection(
+                    mapper._server_default_plus_onupdate_propkeys
+                )
+            )
+
+        if (
+            mapper.version_id_col is not None
+            and mapper.version_id_generator is False
+        ):
+            if mapper._version_id_prop.key in state.unloaded:
+                toload_now.extend([mapper._version_id_prop.key])
+
+        if toload_now:
+            state.key = base_mapper._identity_key_from_state(state)
+            stmt = future.select(mapper).set_label_style(
+                LABEL_STYLE_TABLENAME_PLUS_COL
+            )
+            loading.load_on_ident(
+                uowtransaction.session,
+                stmt,
+                state.key,
+                refresh_state=state,
+                only_load_props=toload_now,
+            )
+
+        # call after_XXX extensions
+        if not has_identity:
+            mapper.dispatch.after_insert(mapper, connection, state)
+        else:
+            mapper.dispatch.after_update(mapper, connection, state)
+
+        if (
+            mapper.version_id_generator is False
+            and mapper.version_id_col is not None
+        ):
+            if state_dict[mapper._version_id_prop.key] is None:
+                raise orm_exc.FlushError(
+                    "Instance does not contain a non-NULL version value"
+                )
+
+
+def _postfetch_post_update(
+    mapper, uowtransaction, table, state, dict_, result, params
+):
+    needs_version_id = (
+        mapper.version_id_col is not None
+        and mapper.version_id_col in mapper._cols_by_table[table]
+    )
+
+    if not uowtransaction.is_deleted(state):
+        # post updating after a regular INSERT or UPDATE, do a full postfetch
+        prefetch_cols = result.context.compiled.prefetch
+        postfetch_cols = result.context.compiled.postfetch
+    elif needs_version_id:
+        # post updating before a DELETE with a version_id_col, need to
+        # postfetch just version_id_col
+        prefetch_cols = postfetch_cols = ()
+    else:
+        # post updating before a DELETE without a version_id_col,
+        # don't need to postfetch
+        return
+
+    if needs_version_id:
+        prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
+
+    refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush)
+    if refresh_flush:
+        load_evt_attrs = []
+
+    for c in prefetch_cols:
+        if c.key in params and c in mapper._columntoproperty:
+            dict_[mapper._columntoproperty[c].key] = params[c.key]
+            if refresh_flush:
+                load_evt_attrs.append(mapper._columntoproperty[c].key)
+
+    if refresh_flush and load_evt_attrs:
+        mapper.class_manager.dispatch.refresh_flush(
+            state, uowtransaction, load_evt_attrs
+        )
+
+    if postfetch_cols:
+        state._expire_attributes(
+            state.dict,
+            [
+                mapper._columntoproperty[c].key
+                for c in postfetch_cols
+                if c in mapper._columntoproperty
+            ],
+        )
+
+
+def _postfetch(
+    mapper,
+    uowtransaction,
+    table,
+    state,
+    dict_,
+    result,
+    params,
+    value_params,
+    isupdate,
+    returned_defaults,
+):
+    """Expire attributes in need of newly persisted database state,
+    after an INSERT or UPDATE statement has proceeded for that
+    state."""
+
+    prefetch_cols = result.context.compiled.prefetch
+    postfetch_cols = result.context.compiled.postfetch
+    returning_cols = result.context.compiled.effective_returning
+
+    if (
+        mapper.version_id_col is not None
+        and mapper.version_id_col in mapper._cols_by_table[table]
+    ):
+        prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
+
+    refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush)
+    if refresh_flush:
+        load_evt_attrs = []
+
+    if returning_cols:
+        row = returned_defaults
+        if row is not None:
+            for row_value, col in zip(row, returning_cols):
+                # pk cols returned from insert are handled
+                # distinctly, don't step on the values here
+                if col.primary_key and result.context.isinsert:
+                    continue
+
+                # note that columns can be in the "return defaults" that are
+                # not mapped to this mapper, typically because they are
+                # "excluded", which can be specified directly or also occurs
+                # when using declarative w/ single table inheritance
+                prop = mapper._columntoproperty.get(col)
+                if prop:
+                    dict_[prop.key] = row_value
+                    if refresh_flush:
+                        load_evt_attrs.append(prop.key)
+
+    for c in prefetch_cols:
+        if c.key in params and c in mapper._columntoproperty:
+            pkey = mapper._columntoproperty[c].key
+
+            # set prefetched value in dict and also pop from committed_state,
+            # since this is new database state that replaces whatever might
+            # have previously been fetched (see #10800).  this is essentially a
+            # shorthand version of set_committed_value(), which could also be
+            # used here directly (with more overhead)
+            dict_[pkey] = params[c.key]
+            state.committed_state.pop(pkey, None)
+
+            if refresh_flush:
+                load_evt_attrs.append(pkey)
+
+    if refresh_flush and load_evt_attrs:
+        mapper.class_manager.dispatch.refresh_flush(
+            state, uowtransaction, load_evt_attrs
+        )
+
+    if isupdate and value_params:
+        # explicitly suit the use case specified by
+        # [ticket:3801], PK SQL expressions for UPDATE on non-RETURNING
+        # database which are set to themselves in order to do a version bump.
+        postfetch_cols.extend(
+            [
+                col
+                for col in value_params
+                if col.primary_key and col not in returning_cols
+            ]
+        )
+
+    if postfetch_cols:
+        state._expire_attributes(
+            state.dict,
+            [
+                mapper._columntoproperty[c].key
+                for c in postfetch_cols
+                if c in mapper._columntoproperty
+            ],
+        )
+
+    # synchronize newly inserted ids from one table to the next
+    # TODO: this still goes a little too often.  would be nice to
+    # have definitive list of "columns that changed" here
+    for m, equated_pairs in mapper._table_to_equated[table]:
+        sync.populate(
+            state,
+            m,
+            state,
+            m,
+            equated_pairs,
+            uowtransaction,
+            mapper.passive_updates,
+        )
+
+
+def _postfetch_bulk_save(mapper, dict_, table):
+    for m, equated_pairs in mapper._table_to_equated[table]:
+        sync.bulk_populate_inherit_keys(dict_, m, equated_pairs)
+
+
+def _connections_for_states(base_mapper, uowtransaction, states):
+    """Return an iterator of (state, state.dict, mapper, connection).
+
+    The states are sorted according to _sort_states, then paired
+    with the connection they should be using for the given
+    unit of work transaction.
+
+    """
+    # if session has a connection callable,
+    # organize individual states with the connection
+    # to use for update
+    if uowtransaction.session.connection_callable:
+        connection_callable = uowtransaction.session.connection_callable
+    else:
+        connection = uowtransaction.transaction.connection(base_mapper)
+        connection_callable = None
+
+    for state in _sort_states(base_mapper, states):
+        if connection_callable:
+            connection = connection_callable(base_mapper, state.obj())
+
+        mapper = state.manager.mapper
+
+        yield state, state.dict, mapper, connection
+
+
+def _sort_states(mapper, states):
+    pending = set(states)
+    persistent = {s for s in pending if s.key is not None}
+    pending.difference_update(persistent)
+
+    try:
+        persistent_sorted = sorted(
+            persistent, key=mapper._persistent_sortkey_fn
+        )
+    except TypeError as err:
+        raise sa_exc.InvalidRequestError(
+            "Could not sort objects by primary key; primary key "
+            "values must be sortable in Python (was: %s)" % err
+        ) from err
+    return (
+        sorted(pending, key=operator.attrgetter("insert_order"))
+        + persistent_sorted
+    )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py
new file mode 100644
index 00000000..a41c520c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/properties.py
@@ -0,0 +1,877 @@
+# orm/properties.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""MapperProperty implementations.
+
+This is a private module which defines the behavior of individual ORM-
+mapped attributes.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import attributes
+from . import strategy_options
+from .base import _DeclarativeMapped
+from .base import class_mapper
+from .descriptor_props import CompositeProperty
+from .descriptor_props import ConcreteInheritedProperty
+from .descriptor_props import SynonymProperty
+from .interfaces import _AttributeOptions
+from .interfaces import _DEFAULT_ATTRIBUTE_OPTIONS
+from .interfaces import _IntrospectsAnnotations
+from .interfaces import _MapsColumns
+from .interfaces import MapperProperty
+from .interfaces import PropComparator
+from .interfaces import StrategizedProperty
+from .relationships import RelationshipProperty
+from .util import de_stringify_annotation
+from .. import exc as sa_exc
+from .. import ForeignKey
+from .. import log
+from .. import util
+from ..sql import coercions
+from ..sql import roles
+from ..sql.base import _NoArg
+from ..sql.schema import Column
+from ..sql.schema import SchemaConst
+from ..sql.type_api import TypeEngine
+from ..util.typing import de_optionalize_union_types
+from ..util.typing import get_args
+from ..util.typing import includes_none
+from ..util.typing import is_fwd_ref
+from ..util.typing import is_pep593
+from ..util.typing import is_pep695
+from ..util.typing import Self
+
+if TYPE_CHECKING:
+    from ._typing import _IdentityKeyType
+    from ._typing import _InstanceDict
+    from ._typing import _ORMColumnExprArgument
+    from ._typing import _RegistryType
+    from .base import Mapped
+    from .decl_base import _ClassScanMapperConfig
+    from .mapper import Mapper
+    from .session import Session
+    from .state import _InstallLoaderCallableProto
+    from .state import InstanceState
+    from ..sql._typing import _InfoType
+    from ..sql.elements import ColumnElement
+    from ..sql.elements import NamedColumn
+    from ..sql.operators import OperatorType
+    from ..util.typing import _AnnotationScanType
+    from ..util.typing import RODescriptorReference
+
+_T = TypeVar("_T", bound=Any)
+_PT = TypeVar("_PT", bound=Any)
+_NC = TypeVar("_NC", bound="NamedColumn[Any]")
+
+__all__ = [
+    "ColumnProperty",
+    "CompositeProperty",
+    "ConcreteInheritedProperty",
+    "RelationshipProperty",
+    "SynonymProperty",
+]
+
+
+@log.class_logger
+class ColumnProperty(
+    _MapsColumns[_T],
+    StrategizedProperty[_T],
+    _IntrospectsAnnotations,
+    log.Identified,
+):
+    """Describes an object attribute that corresponds to a table column
+    or other column expression.
+
+    Public constructor is the :func:`_orm.column_property` function.
+
+    """
+
+    strategy_wildcard_key = strategy_options._COLUMN_TOKEN
+    inherit_cache = True
+    """:meta private:"""
+
+    _links_to_entity = False
+
+    columns: List[NamedColumn[Any]]
+
+    _is_polymorphic_discriminator: bool
+
+    _mapped_by_synonym: Optional[str]
+
+    comparator_factory: Type[PropComparator[_T]]
+
+    __slots__ = (
+        "columns",
+        "group",
+        "deferred",
+        "instrument",
+        "comparator_factory",
+        "active_history",
+        "expire_on_flush",
+        "_creation_order",
+        "_is_polymorphic_discriminator",
+        "_mapped_by_synonym",
+        "_deferred_column_loader",
+        "_raise_column_loader",
+        "_renders_in_subqueries",
+        "raiseload",
+    )
+
+    def __init__(
+        self,
+        column: _ORMColumnExprArgument[_T],
+        *additional_columns: _ORMColumnExprArgument[Any],
+        attribute_options: Optional[_AttributeOptions] = None,
+        group: Optional[str] = None,
+        deferred: bool = False,
+        raiseload: bool = False,
+        comparator_factory: Optional[Type[PropComparator[_T]]] = None,
+        active_history: bool = False,
+        expire_on_flush: bool = True,
+        info: Optional[_InfoType] = None,
+        doc: Optional[str] = None,
+        _instrument: bool = True,
+        _assume_readonly_dc_attributes: bool = False,
+    ):
+        super().__init__(
+            attribute_options=attribute_options,
+            _assume_readonly_dc_attributes=_assume_readonly_dc_attributes,
+        )
+        columns = (column,) + additional_columns
+        self.columns = [
+            coercions.expect(roles.LabeledColumnExprRole, c) for c in columns
+        ]
+        self.group = group
+        self.deferred = deferred
+        self.raiseload = raiseload
+        self.instrument = _instrument
+        self.comparator_factory = (
+            comparator_factory
+            if comparator_factory is not None
+            else self.__class__.Comparator
+        )
+        self.active_history = active_history
+        self.expire_on_flush = expire_on_flush
+
+        if info is not None:
+            self.info.update(info)
+
+        if doc is not None:
+            self.doc = doc
+        else:
+            for col in reversed(self.columns):
+                doc = getattr(col, "doc", None)
+                if doc is not None:
+                    self.doc = doc
+                    break
+            else:
+                self.doc = None
+
+        util.set_creation_order(self)
+
+        self.strategy_key = (
+            ("deferred", self.deferred),
+            ("instrument", self.instrument),
+        )
+        if self.raiseload:
+            self.strategy_key += (("raiseload", True),)
+
+    def declarative_scan(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        registry: _RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
+        annotation: Optional[_AnnotationScanType],
+        extracted_mapped_annotation: Optional[_AnnotationScanType],
+        is_dataclass_field: bool,
+    ) -> None:
+        column = self.columns[0]
+        if column.key is None:
+            column.key = key
+        if column.name is None:
+            column.name = key
+
+    @property
+    def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]:
+        return self
+
+    @property
+    def columns_to_assign(self) -> List[Tuple[Column[Any], int]]:
+        # mypy doesn't care about the isinstance here
+        return [
+            (c, 0)  # type: ignore
+            for c in self.columns
+            if isinstance(c, Column) and c.table is None
+        ]
+
+    def _memoized_attr__renders_in_subqueries(self) -> bool:
+        if ("query_expression", True) in self.strategy_key:
+            return self.strategy._have_default_expression  # type: ignore
+
+        return ("deferred", True) not in self.strategy_key or (
+            self not in self.parent._readonly_props  # type: ignore
+        )
+
+    @util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
+    def _memoized_attr__deferred_column_loader(
+        self,
+    ) -> _InstallLoaderCallableProto[Any]:
+        state = util.preloaded.orm_state
+        strategies = util.preloaded.orm_strategies
+        return state.InstanceState._instance_level_callable_processor(
+            self.parent.class_manager,
+            strategies.LoadDeferredColumns(self.key),
+            self.key,
+        )
+
+    @util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
+    def _memoized_attr__raise_column_loader(
+        self,
+    ) -> _InstallLoaderCallableProto[Any]:
+        state = util.preloaded.orm_state
+        strategies = util.preloaded.orm_strategies
+        return state.InstanceState._instance_level_callable_processor(
+            self.parent.class_manager,
+            strategies.LoadDeferredColumns(self.key, True),
+            self.key,
+        )
+
+    def __clause_element__(self) -> roles.ColumnsClauseRole:
+        """Allow the ColumnProperty to work in expression before it is turned
+        into an instrumented attribute.
+        """
+
+        return self.expression
+
+    @property
+    def expression(self) -> roles.ColumnsClauseRole:
+        """Return the primary column or expression for this ColumnProperty.
+
+        E.g.::
+
+
+            class File(Base):
+                # ...
+
+                name = Column(String(64))
+                extension = Column(String(8))
+                filename = column_property(name + "." + extension)
+                path = column_property("C:/" + filename.expression)
+
+        .. seealso::
+
+            :ref:`mapper_column_property_sql_expressions_composed`
+
+        """
+        return self.columns[0]
+
+    def instrument_class(self, mapper: Mapper[Any]) -> None:
+        if not self.instrument:
+            return
+
+        attributes.register_descriptor(
+            mapper.class_,
+            self.key,
+            comparator=self.comparator_factory(self, mapper),
+            parententity=mapper,
+            doc=self.doc,
+        )
+
+    def do_init(self) -> None:
+        super().do_init()
+
+        if len(self.columns) > 1 and set(self.parent.primary_key).issuperset(
+            self.columns
+        ):
+            util.warn(
+                (
+                    "On mapper %s, primary key column '%s' is being combined "
+                    "with distinct primary key column '%s' in attribute '%s'. "
+                    "Use explicit properties to give each column its own "
+                    "mapped attribute name."
+                )
+                % (self.parent, self.columns[1], self.columns[0], self.key)
+            )
+
+    def copy(self) -> ColumnProperty[_T]:
+        return ColumnProperty(
+            *self.columns,
+            deferred=self.deferred,
+            group=self.group,
+            active_history=self.active_history,
+        )
+
+    def merge(
+        self,
+        session: Session,
+        source_state: InstanceState[Any],
+        source_dict: _InstanceDict,
+        dest_state: InstanceState[Any],
+        dest_dict: _InstanceDict,
+        load: bool,
+        _recursive: Dict[Any, object],
+        _resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
+    ) -> None:
+        if not self.instrument:
+            return
+        elif self.key in source_dict:
+            value = source_dict[self.key]
+
+            if not load:
+                dest_dict[self.key] = value
+            else:
+                impl = dest_state.get_impl(self.key)
+                impl.set(dest_state, dest_dict, value, None)
+        elif dest_state.has_identity and self.key not in dest_dict:
+            dest_state._expire_attributes(
+                dest_dict, [self.key], no_loader=True
+            )
+
+    class Comparator(util.MemoizedSlots, PropComparator[_PT]):
+        """Produce boolean, comparison, and other operators for
+        :class:`.ColumnProperty` attributes.
+
+        See the documentation for :class:`.PropComparator` for a brief
+        overview.
+
+        .. seealso::
+
+            :class:`.PropComparator`
+
+            :class:`.ColumnOperators`
+
+            :ref:`types_operators`
+
+            :attr:`.TypeEngine.comparator_factory`
+
+        """
+
+        if not TYPE_CHECKING:
+            # prevent pylance from being clever about slots
+            __slots__ = "__clause_element__", "info", "expressions"
+
+        prop: RODescriptorReference[ColumnProperty[_PT]]
+
+        expressions: Sequence[NamedColumn[Any]]
+        """The full sequence of columns referenced by this
+         attribute, adjusted for any aliasing in progress.
+
+        .. versionadded:: 1.3.17
+
+        .. seealso::
+
+           :ref:`maptojoin` - usage example
+        """
+
+        def _orm_annotate_column(self, column: _NC) -> _NC:
+            """annotate and possibly adapt a column to be returned
+            as the mapped-attribute exposed version of the column.
+
+            The column in this context needs to act as much like the
+            column in an ORM mapped context as possible, so includes
+            annotations to give hints to various ORM functions as to
+            the source entity of this column.   It also adapts it
+            to the mapper's with_polymorphic selectable if one is
+            present.
+
+            """
+
+            pe = self._parententity
+            annotations: Dict[str, Any] = {
+                "entity_namespace": pe,
+                "parententity": pe,
+                "parentmapper": pe,
+                "proxy_key": self.prop.key,
+            }
+
+            col = column
+
+            # for a mapper with polymorphic_on and an adapter, return
+            # the column against the polymorphic selectable.
+            # see also orm.util._orm_downgrade_polymorphic_columns
+            # for the reverse operation.
+            if self._parentmapper._polymorphic_adapter:
+                mapper_local_col = col
+                col = self._parentmapper._polymorphic_adapter.traverse(col)
+
+                # this is a clue to the ORM Query etc. that this column
+                # was adapted to the mapper's polymorphic_adapter.  the
+                # ORM uses this hint to know which column its adapting.
+                annotations["adapt_column"] = mapper_local_col
+
+            return col._annotate(annotations)._set_propagate_attrs(
+                {"compile_state_plugin": "orm", "plugin_subject": pe}
+            )
+
+        if TYPE_CHECKING:
+
+            def __clause_element__(self) -> NamedColumn[_PT]: ...
+
+        def _memoized_method___clause_element__(
+            self,
+        ) -> NamedColumn[_PT]:
+            if self.adapter:
+                return self.adapter(self.prop.columns[0], self.prop.key)
+            else:
+                return self._orm_annotate_column(self.prop.columns[0])
+
+        def _memoized_attr_info(self) -> _InfoType:
+            """The .info dictionary for this attribute."""
+
+            ce = self.__clause_element__()
+            try:
+                return ce.info  # type: ignore
+            except AttributeError:
+                return self.prop.info
+
+        def _memoized_attr_expressions(self) -> Sequence[NamedColumn[Any]]:
+            """The full sequence of columns referenced by this
+            attribute, adjusted for any aliasing in progress.
+
+            .. versionadded:: 1.3.17
+
+            """
+            if self.adapter:
+                return [
+                    self.adapter(col, self.prop.key)
+                    for col in self.prop.columns
+                ]
+            else:
+                return [
+                    self._orm_annotate_column(col) for col in self.prop.columns
+                ]
+
+        def _fallback_getattr(self, key: str) -> Any:
+            """proxy attribute access down to the mapped column.
+
+            this allows user-defined comparison methods to be accessed.
+            """
+            return getattr(self.__clause_element__(), key)
+
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]:
+            return op(self.__clause_element__(), *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+        def reverse_operate(
+            self, op: OperatorType, other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]:
+            col = self.__clause_element__()
+            return op(col._bind_param(op, other), col, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def __str__(self) -> str:
+        if not self.parent or not self.key:
+            return object.__repr__(self)
+        return str(self.parent.class_.__name__) + "." + self.key
+
+
+class MappedSQLExpression(ColumnProperty[_T], _DeclarativeMapped[_T]):
+    """Declarative front-end for the :class:`.ColumnProperty` class.
+
+    Public constructor is the :func:`_orm.column_property` function.
+
+    .. versionchanged:: 2.0 Added :class:`_orm.MappedSQLExpression` as
+       a Declarative compatible subclass for :class:`_orm.ColumnProperty`.
+
+    .. seealso::
+
+        :class:`.MappedColumn`
+
+    """
+
+    inherit_cache = True
+    """:meta private:"""
+
+
+class MappedColumn(
+    _IntrospectsAnnotations,
+    _MapsColumns[_T],
+    _DeclarativeMapped[_T],
+):
+    """Maps a single :class:`_schema.Column` on a class.
+
+    :class:`_orm.MappedColumn` is a specialization of the
+    :class:`_orm.ColumnProperty` class and is oriented towards declarative
+    configuration.
+
+    To construct :class:`_orm.MappedColumn` objects, use the
+    :func:`_orm.mapped_column` constructor function.
+
+    .. versionadded:: 2.0
+
+
+    """
+
+    __slots__ = (
+        "column",
+        "_creation_order",
+        "_sort_order",
+        "foreign_keys",
+        "_has_nullable",
+        "_has_insert_default",
+        "deferred",
+        "deferred_group",
+        "deferred_raiseload",
+        "active_history",
+        "_attribute_options",
+        "_has_dataclass_arguments",
+        "_use_existing_column",
+    )
+
+    deferred: Union[_NoArg, bool]
+    deferred_raiseload: bool
+    deferred_group: Optional[str]
+
+    column: Column[_T]
+    foreign_keys: Optional[Set[ForeignKey]]
+    _attribute_options: _AttributeOptions
+
+    def __init__(self, *arg: Any, **kw: Any):
+        self._attribute_options = attr_opts = kw.pop(
+            "attribute_options", _DEFAULT_ATTRIBUTE_OPTIONS
+        )
+
+        self._use_existing_column = kw.pop("use_existing_column", False)
+
+        self._has_dataclass_arguments = (
+            attr_opts is not None
+            and attr_opts != _DEFAULT_ATTRIBUTE_OPTIONS
+            and any(
+                attr_opts[i] is not _NoArg.NO_ARG
+                for i, attr in enumerate(attr_opts._fields)
+                if attr != "dataclasses_default"
+            )
+        )
+
+        insert_default = kw.pop("insert_default", _NoArg.NO_ARG)
+        self._has_insert_default = insert_default is not _NoArg.NO_ARG
+
+        if self._has_insert_default:
+            kw["default"] = insert_default
+        elif attr_opts.dataclasses_default is not _NoArg.NO_ARG:
+            kw["default"] = attr_opts.dataclasses_default
+
+        self.deferred_group = kw.pop("deferred_group", None)
+        self.deferred_raiseload = kw.pop("deferred_raiseload", None)
+        self.deferred = kw.pop("deferred", _NoArg.NO_ARG)
+        self.active_history = kw.pop("active_history", False)
+
+        self._sort_order = kw.pop("sort_order", _NoArg.NO_ARG)
+        self.column = cast("Column[_T]", Column(*arg, **kw))
+        self.foreign_keys = self.column.foreign_keys
+        self._has_nullable = "nullable" in kw and kw.get("nullable") not in (
+            None,
+            SchemaConst.NULL_UNSPECIFIED,
+        )
+        util.set_creation_order(self)
+
+    def _copy(self, **kw: Any) -> Self:
+        new = self.__class__.__new__(self.__class__)
+        new.column = self.column._copy(**kw)
+        new.deferred = self.deferred
+        new.deferred_group = self.deferred_group
+        new.deferred_raiseload = self.deferred_raiseload
+        new.foreign_keys = new.column.foreign_keys
+        new.active_history = self.active_history
+        new._has_nullable = self._has_nullable
+        new._attribute_options = self._attribute_options
+        new._has_insert_default = self._has_insert_default
+        new._has_dataclass_arguments = self._has_dataclass_arguments
+        new._use_existing_column = self._use_existing_column
+        new._sort_order = self._sort_order
+        util.set_creation_order(new)
+        return new
+
+    @property
+    def name(self) -> str:
+        return self.column.name
+
+    @property
+    def mapper_property_to_assign(self) -> Optional[MapperProperty[_T]]:
+        effective_deferred = self.deferred
+        if effective_deferred is _NoArg.NO_ARG:
+            effective_deferred = bool(
+                self.deferred_group or self.deferred_raiseload
+            )
+
+        if effective_deferred or self.active_history:
+            return ColumnProperty(
+                self.column,
+                deferred=effective_deferred,
+                group=self.deferred_group,
+                raiseload=self.deferred_raiseload,
+                attribute_options=self._attribute_options,
+                active_history=self.active_history,
+            )
+        else:
+            return None
+
+    @property
+    def columns_to_assign(self) -> List[Tuple[Column[Any], int]]:
+        return [
+            (
+                self.column,
+                (
+                    self._sort_order
+                    if self._sort_order is not _NoArg.NO_ARG
+                    else 0
+                ),
+            )
+        ]
+
+    def __clause_element__(self) -> Column[_T]:
+        return self.column
+
+    def operate(
+        self, op: OperatorType, *other: Any, **kwargs: Any
+    ) -> ColumnElement[Any]:
+        return op(self.__clause_element__(), *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def reverse_operate(
+        self, op: OperatorType, other: Any, **kwargs: Any
+    ) -> ColumnElement[Any]:
+        col = self.__clause_element__()
+        return op(col._bind_param(op, other), col, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def found_in_pep593_annotated(self) -> Any:
+        # return a blank mapped_column().  This mapped_column()'s
+        # Column will be merged into it in _init_column_for_annotation().
+        return MappedColumn()
+
+    def declarative_scan(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        registry: _RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
+        annotation: Optional[_AnnotationScanType],
+        extracted_mapped_annotation: Optional[_AnnotationScanType],
+        is_dataclass_field: bool,
+    ) -> None:
+        column = self.column
+
+        if (
+            self._use_existing_column
+            and decl_scan.inherits
+            and decl_scan.single
+        ):
+            if decl_scan.is_deferred:
+                raise sa_exc.ArgumentError(
+                    "Can't use use_existing_column with deferred mappers"
+                )
+            supercls_mapper = class_mapper(decl_scan.inherits, False)
+
+            colname = column.name if column.name is not None else key
+            column = self.column = supercls_mapper.local_table.c.get(  # type: ignore[assignment] # noqa: E501
+                colname, column
+            )
+
+        if column.key is None:
+            column.key = key
+        if column.name is None:
+            column.name = key
+
+        sqltype = column.type
+
+        if extracted_mapped_annotation is None:
+            if sqltype._isnull and not self.column.foreign_keys:
+                self._raise_for_required(key, cls)
+            else:
+                return
+
+        self._init_column_for_annotation(
+            cls,
+            registry,
+            extracted_mapped_annotation,
+            originating_module,
+        )
+
+    @util.preload_module("sqlalchemy.orm.decl_base")
+    def declarative_scan_for_composite(
+        self,
+        registry: _RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+        param_name: str,
+        param_annotation: _AnnotationScanType,
+    ) -> None:
+        decl_base = util.preloaded.orm_decl_base
+        decl_base._undefer_column_name(param_name, self.column)
+        self._init_column_for_annotation(
+            cls, registry, param_annotation, originating_module
+        )
+
+    def _init_column_for_annotation(
+        self,
+        cls: Type[Any],
+        registry: _RegistryType,
+        argument: _AnnotationScanType,
+        originating_module: Optional[str],
+    ) -> None:
+        sqltype = self.column.type
+
+        if is_fwd_ref(
+            argument, check_generic=True, check_for_plain_string=True
+        ):
+            assert originating_module is not None
+            argument = de_stringify_annotation(
+                cls, argument, originating_module, include_generic=True
+            )
+
+        nullable = includes_none(argument)
+
+        if not self._has_nullable:
+            self.column.nullable = nullable
+
+        our_type = de_optionalize_union_types(argument)
+
+        find_mapped_in: Tuple[Any, ...] = ()
+        our_type_is_pep593 = False
+        raw_pep_593_type = None
+
+        if is_pep593(our_type):
+            our_type_is_pep593 = True
+
+            pep_593_components = get_args(our_type)
+            raw_pep_593_type = pep_593_components[0]
+            if nullable:
+                raw_pep_593_type = de_optionalize_union_types(raw_pep_593_type)
+            find_mapped_in = pep_593_components[1:]
+        elif is_pep695(argument) and is_pep593(argument.__value__):
+            # do not support nested annotation inside unions ets
+            find_mapped_in = get_args(argument.__value__)[1:]
+
+        use_args_from: Optional[MappedColumn[Any]]
+        for elem in find_mapped_in:
+            if isinstance(elem, MappedColumn):
+                use_args_from = elem
+                break
+        else:
+            use_args_from = None
+
+        if use_args_from is not None:
+            if (
+                not self._has_insert_default
+                and use_args_from.column.default is not None
+            ):
+                self.column.default = None
+
+            use_args_from.column._merge(self.column)
+            sqltype = self.column.type
+
+            if (
+                use_args_from.deferred is not _NoArg.NO_ARG
+                and self.deferred is _NoArg.NO_ARG
+            ):
+                self.deferred = use_args_from.deferred
+
+            if (
+                use_args_from.deferred_group is not None
+                and self.deferred_group is None
+            ):
+                self.deferred_group = use_args_from.deferred_group
+
+            if (
+                use_args_from.deferred_raiseload is not None
+                and self.deferred_raiseload is None
+            ):
+                self.deferred_raiseload = use_args_from.deferred_raiseload
+
+            if (
+                use_args_from._use_existing_column
+                and not self._use_existing_column
+            ):
+                self._use_existing_column = True
+
+            if use_args_from.active_history:
+                self.active_history = use_args_from.active_history
+
+            if (
+                use_args_from._sort_order is not None
+                and self._sort_order is _NoArg.NO_ARG
+            ):
+                self._sort_order = use_args_from._sort_order
+
+            if (
+                use_args_from.column.key is not None
+                or use_args_from.column.name is not None
+            ):
+                util.warn_deprecated(
+                    "Can't use the 'key' or 'name' arguments in "
+                    "Annotated with mapped_column(); this will be ignored",
+                    "2.0.22",
+                )
+
+            if use_args_from._has_dataclass_arguments:
+                for idx, arg in enumerate(
+                    use_args_from._attribute_options._fields
+                ):
+                    if (
+                        use_args_from._attribute_options[idx]
+                        is not _NoArg.NO_ARG
+                    ):
+                        arg = arg.replace("dataclasses_", "")
+                        util.warn_deprecated(
+                            f"Argument '{arg}' is a dataclass argument and "
+                            "cannot be specified within a mapped_column() "
+                            "bundled inside of an Annotated object",
+                            "2.0.22",
+                        )
+
+        if sqltype._isnull and not self.column.foreign_keys:
+            new_sqltype = None
+
+            checks: List[Any]
+            if our_type_is_pep593:
+                checks = [our_type, raw_pep_593_type]
+            else:
+                checks = [our_type]
+
+            for check_type in checks:
+                new_sqltype = registry._resolve_type(check_type)
+                if new_sqltype is not None:
+                    break
+            else:
+                if isinstance(our_type, TypeEngine) or (
+                    isinstance(our_type, type)
+                    and issubclass(our_type, TypeEngine)
+                ):
+                    raise sa_exc.ArgumentError(
+                        f"The type provided inside the {self.column.key!r} "
+                        "attribute Mapped annotation is the SQLAlchemy type "
+                        f"{our_type}. Expected a Python type instead"
+                    )
+                else:
+                    raise sa_exc.ArgumentError(
+                        "Could not locate SQLAlchemy Core type for Python "
+                        f"type {our_type} inside the {self.column.key!r} "
+                        "attribute Mapped annotation"
+                    )
+
+            self.column._set_type(new_sqltype)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py
new file mode 100644
index 00000000..af496b24
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/query.py
@@ -0,0 +1,3454 @@
+# orm/query.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""The Query class and support.
+
+Defines the :class:`_query.Query` class, the central
+construct used by the ORM to construct database queries.
+
+The :class:`_query.Query` class should not be confused with the
+:class:`_expression.Select` class, which defines database
+SELECT operations at the SQL (non-ORM) level.  ``Query`` differs from
+``Select`` in that it returns ORM-mapped objects and interacts with an
+ORM session, whereas the ``Select`` construct interacts directly with the
+database to return iterable result sets.
+
+"""
+from __future__ import annotations
+
+import collections.abc as collections_abc
+import operator
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import attributes
+from . import interfaces
+from . import loading
+from . import util as orm_util
+from ._typing import _O
+from .base import _assertions
+from .context import _column_descriptions
+from .context import _determine_last_joined_entity
+from .context import _legacy_filter_by_entity_zero
+from .context import FromStatement
+from .context import ORMCompileState
+from .context import QueryContext
+from .interfaces import ORMColumnDescription
+from .interfaces import ORMColumnsClauseRole
+from .util import AliasedClass
+from .util import object_mapper
+from .util import with_parent
+from .. import exc as sa_exc
+from .. import inspect
+from .. import inspection
+from .. import log
+from .. import sql
+from .. import util
+from ..engine import Result
+from ..engine import Row
+from ..event import dispatcher
+from ..event import EventTarget
+from ..sql import coercions
+from ..sql import expression
+from ..sql import roles
+from ..sql import Select
+from ..sql import util as sql_util
+from ..sql import visitors
+from ..sql._typing import _FromClauseArgument
+from ..sql._typing import _TP
+from ..sql.annotation import SupportsCloneAnnotations
+from ..sql.base import _entity_namespace_key
+from ..sql.base import _generative
+from ..sql.base import _NoArg
+from ..sql.base import Executable
+from ..sql.base import Generative
+from ..sql.elements import BooleanClauseList
+from ..sql.expression import Exists
+from ..sql.selectable import _MemoizedSelectEntities
+from ..sql.selectable import _SelectFromElements
+from ..sql.selectable import ForUpdateArg
+from ..sql.selectable import HasHints
+from ..sql.selectable import HasPrefixes
+from ..sql.selectable import HasSuffixes
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+from ..sql.selectable import SelectLabelStyle
+from ..util.typing import Literal
+from ..util.typing import Self
+
+
+if TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _ExternalEntityType
+    from ._typing import _InternalEntityType
+    from ._typing import SynchronizeSessionArgument
+    from .mapper import Mapper
+    from .path_registry import PathRegistry
+    from .session import _PKIdentityArgument
+    from .session import Session
+    from .state import InstanceState
+    from ..engine.cursor import CursorResult
+    from ..engine.interfaces import _ImmutableExecuteOptions
+    from ..engine.interfaces import CompiledCacheType
+    from ..engine.interfaces import IsolationLevel
+    from ..engine.interfaces import SchemaTranslateMapType
+    from ..engine.result import FrozenResult
+    from ..engine.result import ScalarResult
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _ColumnExpressionOrStrLabelArgument
+    from ..sql._typing import _ColumnsClauseArgument
+    from ..sql._typing import _DMLColumnArgument
+    from ..sql._typing import _JoinTargetArgument
+    from ..sql._typing import _LimitOffsetType
+    from ..sql._typing import _MAYBE_ENTITY
+    from ..sql._typing import _no_kw
+    from ..sql._typing import _NOT_ENTITY
+    from ..sql._typing import _OnClauseArgument
+    from ..sql._typing import _PropagateAttrsType
+    from ..sql._typing import _T0
+    from ..sql._typing import _T1
+    from ..sql._typing import _T2
+    from ..sql._typing import _T3
+    from ..sql._typing import _T4
+    from ..sql._typing import _T5
+    from ..sql._typing import _T6
+    from ..sql._typing import _T7
+    from ..sql._typing import _TypedColumnClauseArgument as _TCCA
+    from ..sql.base import CacheableOptions
+    from ..sql.base import ExecutableOption
+    from ..sql.dml import UpdateBase
+    from ..sql.elements import ColumnElement
+    from ..sql.elements import Label
+    from ..sql.selectable import _ForUpdateOfArgument
+    from ..sql.selectable import _JoinTargetElement
+    from ..sql.selectable import _SetupJoinsElement
+    from ..sql.selectable import Alias
+    from ..sql.selectable import CTE
+    from ..sql.selectable import ExecutableReturnsRows
+    from ..sql.selectable import FromClause
+    from ..sql.selectable import ScalarSelect
+    from ..sql.selectable import Subquery
+
+
+__all__ = ["Query", "QueryContext"]
+
+_T = TypeVar("_T", bound=Any)
+
+
+@inspection._self_inspects
+@log.class_logger
+class Query(
+    _SelectFromElements,
+    SupportsCloneAnnotations,
+    HasPrefixes,
+    HasSuffixes,
+    HasHints,
+    EventTarget,
+    log.Identified,
+    Generative,
+    Executable,
+    Generic[_T],
+):
+    """ORM-level SQL construction object.
+
+    .. legacy:: The ORM :class:`.Query` object is a legacy construct
+       as of SQLAlchemy 2.0.   See the notes at the top of
+       :ref:`query_api_toplevel` for an overview, including links to migration
+       documentation.
+
+    :class:`_query.Query` objects are normally initially generated using the
+    :meth:`~.Session.query` method of :class:`.Session`, and in
+    less common cases by instantiating the :class:`_query.Query` directly and
+    associating with a :class:`.Session` using the
+    :meth:`_query.Query.with_session`
+    method.
+
+    """
+
+    # elements that are in Core and can be cached in the same way
+    _where_criteria: Tuple[ColumnElement[Any], ...] = ()
+    _having_criteria: Tuple[ColumnElement[Any], ...] = ()
+
+    _order_by_clauses: Tuple[ColumnElement[Any], ...] = ()
+    _group_by_clauses: Tuple[ColumnElement[Any], ...] = ()
+    _limit_clause: Optional[ColumnElement[Any]] = None
+    _offset_clause: Optional[ColumnElement[Any]] = None
+
+    _distinct: bool = False
+    _distinct_on: Tuple[ColumnElement[Any], ...] = ()
+
+    _for_update_arg: Optional[ForUpdateArg] = None
+    _correlate: Tuple[FromClause, ...] = ()
+    _auto_correlate: bool = True
+    _from_obj: Tuple[FromClause, ...] = ()
+    _setup_joins: Tuple[_SetupJoinsElement, ...] = ()
+
+    _label_style: SelectLabelStyle = SelectLabelStyle.LABEL_STYLE_LEGACY_ORM
+
+    _memoized_select_entities = ()
+
+    _compile_options: Union[Type[CacheableOptions], CacheableOptions] = (
+        ORMCompileState.default_compile_options
+    )
+
+    _with_options: Tuple[ExecutableOption, ...]
+    load_options = QueryContext.default_load_options + {
+        "_legacy_uniquing": True
+    }
+
+    _params: util.immutabledict[str, Any] = util.EMPTY_DICT
+
+    # local Query builder state, not needed for
+    # compilation or execution
+    _enable_assertions = True
+
+    _statement: Optional[ExecutableReturnsRows] = None
+
+    session: Session
+
+    dispatch: dispatcher[Query[_T]]
+
+    # mirrors that of ClauseElement, used to propagate the "orm"
+    # plugin as well as the "subject" of the plugin, e.g. the mapper
+    # we are querying against.
+    @util.memoized_property
+    def _propagate_attrs(self) -> _PropagateAttrsType:
+        return util.EMPTY_DICT
+
+    def __init__(
+        self,
+        entities: Union[
+            _ColumnsClauseArgument[Any], Sequence[_ColumnsClauseArgument[Any]]
+        ],
+        session: Optional[Session] = None,
+    ):
+        """Construct a :class:`_query.Query` directly.
+
+        E.g.::
+
+            q = Query([User, Address], session=some_session)
+
+        The above is equivalent to::
+
+            q = some_session.query(User, Address)
+
+        :param entities: a sequence of entities and/or SQL expressions.
+
+        :param session: a :class:`.Session` with which the
+         :class:`_query.Query`
+         will be associated.   Optional; a :class:`_query.Query`
+         can be associated
+         with a :class:`.Session` generatively via the
+         :meth:`_query.Query.with_session` method as well.
+
+        .. seealso::
+
+            :meth:`.Session.query`
+
+            :meth:`_query.Query.with_session`
+
+        """
+
+        # session is usually present.  There's one case in subqueryloader
+        # where it stores a Query without a Session and also there are tests
+        # for the query(Entity).with_session(session) API which is likely in
+        # some old recipes, however these are legacy as select() can now be
+        # used.
+        self.session = session  # type: ignore
+        self._set_entities(entities)
+
+    def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self:
+        self._propagate_attrs = util.immutabledict(values)
+        return self
+
+    def _set_entities(
+        self,
+        entities: Union[
+            _ColumnsClauseArgument[Any], Iterable[_ColumnsClauseArgument[Any]]
+        ],
+    ) -> None:
+        self._raw_columns = [
+            coercions.expect(
+                roles.ColumnsClauseRole,
+                ent,
+                apply_propagate_attrs=self,
+                post_inspect=True,
+            )
+            for ent in util.to_list(entities)
+        ]
+
+    def tuples(self: Query[_O]) -> Query[Tuple[_O]]:
+        """return a tuple-typed form of this :class:`.Query`.
+
+        This method invokes the :meth:`.Query.only_return_tuples`
+        method with a value of ``True``, which by itself ensures that this
+        :class:`.Query` will always return :class:`.Row` objects, even
+        if the query is made against a single entity.  It then also
+        at the typing level will return a "typed" query, if possible,
+        that will type result rows as ``Tuple`` objects with typed
+        elements.
+
+        This method can be compared to the :meth:`.Result.tuples` method,
+        which returns "self", but from a typing perspective returns an object
+        that will yield typed ``Tuple`` objects for results.   Typing
+        takes effect only if this :class:`.Query` object is a typed
+        query object already.
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :meth:`.Result.tuples` - v2 equivalent method.
+
+        """
+        return self.only_return_tuples(True)  # type: ignore
+
+    def _entity_from_pre_ent_zero(self) -> Optional[_InternalEntityType[Any]]:
+        if not self._raw_columns:
+            return None
+
+        ent = self._raw_columns[0]
+
+        if "parententity" in ent._annotations:
+            return ent._annotations["parententity"]  # type: ignore
+        elif "bundle" in ent._annotations:
+            return ent._annotations["bundle"]  # type: ignore
+        else:
+            # label, other SQL expression
+            for element in visitors.iterate(ent):
+                if "parententity" in element._annotations:
+                    return element._annotations["parententity"]  # type: ignore  # noqa: E501
+            else:
+                return None
+
+    def _only_full_mapper_zero(self, methname: str) -> Mapper[Any]:
+        if (
+            len(self._raw_columns) != 1
+            or "parententity" not in self._raw_columns[0]._annotations
+            or not self._raw_columns[0].is_selectable
+        ):
+            raise sa_exc.InvalidRequestError(
+                "%s() can only be used against "
+                "a single mapped class." % methname
+            )
+
+        return self._raw_columns[0]._annotations["parententity"]  # type: ignore  # noqa: E501
+
+    def _set_select_from(
+        self, obj: Iterable[_FromClauseArgument], set_base_alias: bool
+    ) -> None:
+        fa = [
+            coercions.expect(
+                roles.StrictFromClauseRole,
+                elem,
+                allow_select=True,
+                apply_propagate_attrs=self,
+            )
+            for elem in obj
+        ]
+
+        self._compile_options += {"_set_base_alias": set_base_alias}
+        self._from_obj = tuple(fa)
+
+    @_generative
+    def _set_lazyload_from(self, state: InstanceState[Any]) -> Self:
+        self.load_options += {"_lazy_loaded_from": state}
+        return self
+
+    def _get_condition(self) -> None:
+        """used by legacy BakedQuery"""
+        self._no_criterion_condition("get", order_by=False, distinct=False)
+
+    def _get_existing_condition(self) -> None:
+        self._no_criterion_assertion("get", order_by=False, distinct=False)
+
+    def _no_criterion_assertion(
+        self, meth: str, order_by: bool = True, distinct: bool = True
+    ) -> None:
+        if not self._enable_assertions:
+            return
+        if (
+            self._where_criteria
+            or self._statement is not None
+            or self._from_obj
+            or self._setup_joins
+            or self._limit_clause is not None
+            or self._offset_clause is not None
+            or self._group_by_clauses
+            or (order_by and self._order_by_clauses)
+            or (distinct and self._distinct)
+        ):
+            raise sa_exc.InvalidRequestError(
+                "Query.%s() being called on a "
+                "Query with existing criterion. " % meth
+            )
+
+    def _no_criterion_condition(
+        self, meth: str, order_by: bool = True, distinct: bool = True
+    ) -> None:
+        self._no_criterion_assertion(meth, order_by, distinct)
+
+        self._from_obj = self._setup_joins = ()
+        if self._statement is not None:
+            self._compile_options += {"_statement": None}
+        self._where_criteria = ()
+        self._distinct = False
+
+        self._order_by_clauses = self._group_by_clauses = ()
+
+    def _no_clauseelement_condition(self, meth: str) -> None:
+        if not self._enable_assertions:
+            return
+        if self._order_by_clauses:
+            raise sa_exc.InvalidRequestError(
+                "Query.%s() being called on a "
+                "Query with existing criterion. " % meth
+            )
+        self._no_criterion_condition(meth)
+
+    def _no_statement_condition(self, meth: str) -> None:
+        if not self._enable_assertions:
+            return
+        if self._statement is not None:
+            raise sa_exc.InvalidRequestError(
+                (
+                    "Query.%s() being called on a Query with an existing full "
+                    "statement - can't apply criterion."
+                )
+                % meth
+            )
+
+    def _no_limit_offset(self, meth: str) -> None:
+        if not self._enable_assertions:
+            return
+        if self._limit_clause is not None or self._offset_clause is not None:
+            raise sa_exc.InvalidRequestError(
+                "Query.%s() being called on a Query which already has LIMIT "
+                "or OFFSET applied.  Call %s() before limit() or offset() "
+                "are applied." % (meth, meth)
+            )
+
+    @property
+    def _has_row_limiting_clause(self) -> bool:
+        return (
+            self._limit_clause is not None or self._offset_clause is not None
+        )
+
+    def _get_options(
+        self,
+        populate_existing: Optional[bool] = None,
+        version_check: Optional[bool] = None,
+        only_load_props: Optional[Sequence[str]] = None,
+        refresh_state: Optional[InstanceState[Any]] = None,
+        identity_token: Optional[Any] = None,
+    ) -> Self:
+        load_options: Dict[str, Any] = {}
+        compile_options: Dict[str, Any] = {}
+
+        if version_check:
+            load_options["_version_check"] = version_check
+        if populate_existing:
+            load_options["_populate_existing"] = populate_existing
+        if refresh_state:
+            load_options["_refresh_state"] = refresh_state
+            compile_options["_for_refresh_state"] = True
+        if only_load_props:
+            compile_options["_only_load_props"] = frozenset(only_load_props)
+        if identity_token:
+            load_options["_identity_token"] = identity_token
+
+        if load_options:
+            self.load_options += load_options
+        if compile_options:
+            self._compile_options += compile_options
+
+        return self
+
+    def _clone(self, **kw: Any) -> Self:
+        return self._generate()
+
+    def _get_select_statement_only(self) -> Select[_T]:
+        if self._statement is not None:
+            raise sa_exc.InvalidRequestError(
+                "Can't call this method on a Query that uses from_statement()"
+            )
+        return cast("Select[_T]", self.statement)
+
+    @property
+    def statement(self) -> Union[Select[_T], FromStatement[_T], UpdateBase]:
+        """The full SELECT statement represented by this Query.
+
+        The statement by default will not have disambiguating labels
+        applied to the construct unless with_labels(True) is called
+        first.
+
+        """
+
+        # .statement can return the direct future.Select() construct here, as
+        # long as we are not using subsequent adaption features that
+        # are made against raw entities, e.g. from_self(), with_polymorphic(),
+        # select_entity_from().  If these features are being used, then
+        # the Select() we return will not have the correct .selected_columns
+        # collection and will not embed in subsequent queries correctly.
+        # We could find a way to make this collection "correct", however
+        # this would not be too different from doing the full compile as
+        # we are doing in any case, the Select() would still not have the
+        # proper state for other attributes like whereclause, order_by,
+        # and these features are all deprecated in any case.
+        #
+        # for these reasons, Query is not a Select, it remains an ORM
+        # object for which __clause_element__() must be called in order for
+        # it to provide a real expression object.
+        #
+        # from there, it starts to look much like Query itself won't be
+        # passed into the execute process and won't generate its own cache
+        # key; this will all occur in terms of the ORM-enabled Select.
+        stmt: Union[Select[_T], FromStatement[_T], UpdateBase]
+
+        if not self._compile_options._set_base_alias:
+            # if we don't have legacy top level aliasing features in use
+            # then convert to a future select() directly
+            stmt = self._statement_20(for_statement=True)
+        else:
+            stmt = self._compile_state(for_statement=True).statement
+
+        if self._params:
+            stmt = stmt.params(self._params)
+
+        return stmt
+
+    def _final_statement(self, legacy_query_style: bool = True) -> Select[Any]:
+        """Return the 'final' SELECT statement for this :class:`.Query`.
+
+        This is used by the testing suite only and is fairly inefficient.
+
+        This is the Core-only select() that will be rendered by a complete
+        compilation of this query, and is what .statement used to return
+        in 1.3.
+
+
+        """
+
+        q = self._clone()
+
+        return q._compile_state(
+            use_legacy_query_style=legacy_query_style
+        ).statement  # type: ignore
+
+    def _statement_20(
+        self, for_statement: bool = False, use_legacy_query_style: bool = True
+    ) -> Union[Select[_T], FromStatement[_T]]:
+        # TODO: this event needs to be deprecated, as it currently applies
+        # only to ORM query and occurs at this spot that is now more
+        # or less an artificial spot
+        if self.dispatch.before_compile:
+            for fn in self.dispatch.before_compile:
+                new_query = fn(self)
+                if new_query is not None and new_query is not self:
+                    self = new_query
+                    if not fn._bake_ok:  # type: ignore
+                        self._compile_options += {"_bake_ok": False}
+
+        compile_options = self._compile_options
+        compile_options += {
+            "_for_statement": for_statement,
+            "_use_legacy_query_style": use_legacy_query_style,
+        }
+
+        stmt: Union[Select[_T], FromStatement[_T]]
+
+        if self._statement is not None:
+            stmt = FromStatement(self._raw_columns, self._statement)
+            stmt.__dict__.update(
+                _with_options=self._with_options,
+                _with_context_options=self._with_context_options,
+                _compile_options=compile_options,
+                _execution_options=self._execution_options,
+                _propagate_attrs=self._propagate_attrs,
+            )
+        else:
+            # Query / select() internal attributes are 99% cross-compatible
+            stmt = Select._create_raw_select(**self.__dict__)
+            stmt.__dict__.update(
+                _label_style=self._label_style,
+                _compile_options=compile_options,
+                _propagate_attrs=self._propagate_attrs,
+            )
+            stmt.__dict__.pop("session", None)
+
+        # ensure the ORM context is used to compile the statement, even
+        # if it has no ORM entities.  This is so ORM-only things like
+        # _legacy_joins are picked up that wouldn't be picked up by the
+        # Core statement context
+        if "compile_state_plugin" not in stmt._propagate_attrs:
+            stmt._propagate_attrs = stmt._propagate_attrs.union(
+                {"compile_state_plugin": "orm", "plugin_subject": None}
+            )
+
+        return stmt
+
+    def subquery(
+        self,
+        name: Optional[str] = None,
+        with_labels: bool = False,
+        reduce_columns: bool = False,
+    ) -> Subquery:
+        """Return the full SELECT statement represented by
+        this :class:`_query.Query`, embedded within an
+        :class:`_expression.Alias`.
+
+        Eager JOIN generation within the query is disabled.
+
+        .. seealso::
+
+            :meth:`_sql.Select.subquery` - v2 comparable method.
+
+        :param name: string name to be assigned as the alias;
+            this is passed through to :meth:`_expression.FromClause.alias`.
+            If ``None``, a name will be deterministically generated
+            at compile time.
+
+        :param with_labels: if True, :meth:`.with_labels` will be called
+         on the :class:`_query.Query` first to apply table-qualified labels
+         to all columns.
+
+        :param reduce_columns: if True,
+         :meth:`_expression.Select.reduce_columns` will
+         be called on the resulting :func:`_expression.select` construct,
+         to remove same-named columns where one also refers to the other
+         via foreign key or WHERE clause equivalence.
+
+        """
+        q = self.enable_eagerloads(False)
+        if with_labels:
+            q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+
+        stmt = q._get_select_statement_only()
+
+        if TYPE_CHECKING:
+            assert isinstance(stmt, Select)
+
+        if reduce_columns:
+            stmt = stmt.reduce_columns()
+        return stmt.subquery(name=name)
+
+    def cte(
+        self,
+        name: Optional[str] = None,
+        recursive: bool = False,
+        nesting: bool = False,
+    ) -> CTE:
+        r"""Return the full SELECT statement represented by this
+        :class:`_query.Query` represented as a common table expression (CTE).
+
+        Parameters and usage are the same as those of the
+        :meth:`_expression.SelectBase.cte` method; see that method for
+        further details.
+
+        Here is the `PostgreSQL WITH
+        RECURSIVE example
+        <https://www.postgresql.org/docs/current/static/queries-with.html>`_.
+        Note that, in this example, the ``included_parts`` cte and the
+        ``incl_alias`` alias of it are Core selectables, which
+        means the columns are accessed via the ``.c.`` attribute.  The
+        ``parts_alias`` object is an :func:`_orm.aliased` instance of the
+        ``Part`` entity, so column-mapped attributes are available
+        directly::
+
+            from sqlalchemy.orm import aliased
+
+
+            class Part(Base):
+                __tablename__ = "part"
+                part = Column(String, primary_key=True)
+                sub_part = Column(String, primary_key=True)
+                quantity = Column(Integer)
+
+
+            included_parts = (
+                session.query(Part.sub_part, Part.part, Part.quantity)
+                .filter(Part.part == "our part")
+                .cte(name="included_parts", recursive=True)
+            )
+
+            incl_alias = aliased(included_parts, name="pr")
+            parts_alias = aliased(Part, name="p")
+            included_parts = included_parts.union_all(
+                session.query(
+                    parts_alias.sub_part, parts_alias.part, parts_alias.quantity
+                ).filter(parts_alias.part == incl_alias.c.sub_part)
+            )
+
+            q = session.query(
+                included_parts.c.sub_part,
+                func.sum(included_parts.c.quantity).label("total_quantity"),
+            ).group_by(included_parts.c.sub_part)
+
+        .. seealso::
+
+            :meth:`_sql.Select.cte` - v2 equivalent method.
+
+        """  # noqa: E501
+        return (
+            self.enable_eagerloads(False)
+            ._get_select_statement_only()
+            .cte(name=name, recursive=recursive, nesting=nesting)
+        )
+
+    def label(self, name: Optional[str]) -> Label[Any]:
+        """Return the full SELECT statement represented by this
+        :class:`_query.Query`, converted
+        to a scalar subquery with a label of the given name.
+
+        .. seealso::
+
+            :meth:`_sql.Select.label` - v2 comparable method.
+
+        """
+
+        return (
+            self.enable_eagerloads(False)
+            ._get_select_statement_only()
+            .label(name)
+        )
+
+    @overload
+    def as_scalar(  # type: ignore[overload-overlap]
+        self: Query[Tuple[_MAYBE_ENTITY]],
+    ) -> ScalarSelect[_MAYBE_ENTITY]: ...
+
+    @overload
+    def as_scalar(
+        self: Query[Tuple[_NOT_ENTITY]],
+    ) -> ScalarSelect[_NOT_ENTITY]: ...
+
+    @overload
+    def as_scalar(self) -> ScalarSelect[Any]: ...
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_query.Query.as_scalar` method is deprecated and will be "
+        "removed in a future release.  Please refer to "
+        ":meth:`_query.Query.scalar_subquery`.",
+    )
+    def as_scalar(self) -> ScalarSelect[Any]:
+        """Return the full SELECT statement represented by this
+        :class:`_query.Query`, converted to a scalar subquery.
+
+        """
+        return self.scalar_subquery()
+
+    @overload
+    def scalar_subquery(
+        self: Query[Tuple[_MAYBE_ENTITY]],
+    ) -> ScalarSelect[Any]: ...
+
+    @overload
+    def scalar_subquery(
+        self: Query[Tuple[_NOT_ENTITY]],
+    ) -> ScalarSelect[_NOT_ENTITY]: ...
+
+    @overload
+    def scalar_subquery(self) -> ScalarSelect[Any]: ...
+
+    def scalar_subquery(self) -> ScalarSelect[Any]:
+        """Return the full SELECT statement represented by this
+        :class:`_query.Query`, converted to a scalar subquery.
+
+        Analogous to
+        :meth:`sqlalchemy.sql.expression.SelectBase.scalar_subquery`.
+
+        .. versionchanged:: 1.4 The :meth:`_query.Query.scalar_subquery`
+           method replaces the :meth:`_query.Query.as_scalar` method.
+
+        .. seealso::
+
+            :meth:`_sql.Select.scalar_subquery` - v2 comparable method.
+
+        """
+
+        return (
+            self.enable_eagerloads(False)
+            ._get_select_statement_only()
+            .scalar_subquery()
+        )
+
+    @property
+    def selectable(self) -> Union[Select[_T], FromStatement[_T], UpdateBase]:
+        """Return the :class:`_expression.Select` object emitted by this
+        :class:`_query.Query`.
+
+        Used for :func:`_sa.inspect` compatibility, this is equivalent to::
+
+            query.enable_eagerloads(False).with_labels().statement
+
+        """
+        return self.__clause_element__()
+
+    def __clause_element__(
+        self,
+    ) -> Union[Select[_T], FromStatement[_T], UpdateBase]:
+        return (
+            self._with_compile_options(
+                _enable_eagerloads=False, _render_for_subquery=True
+            )
+            .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+            .statement
+        )
+
+    @overload
+    def only_return_tuples(
+        self: Query[_O], value: Literal[True]
+    ) -> RowReturningQuery[Tuple[_O]]: ...
+
+    @overload
+    def only_return_tuples(
+        self: Query[_O], value: Literal[False]
+    ) -> Query[_O]: ...
+
+    @_generative
+    def only_return_tuples(self, value: bool) -> Query[Any]:
+        """When set to True, the query results will always be a
+        :class:`.Row` object.
+
+        This can change a query that normally returns a single entity
+        as a scalar to return a :class:`.Row` result in all cases.
+
+        .. seealso::
+
+            :meth:`.Query.tuples` - returns tuples, but also at the typing
+            level will type results as ``Tuple``.
+
+            :meth:`_query.Query.is_single_entity`
+
+            :meth:`_engine.Result.tuples` - v2 comparable method.
+
+        """
+        self.load_options += dict(_only_return_tuples=value)
+        return self
+
+    @property
+    def is_single_entity(self) -> bool:
+        """Indicates if this :class:`_query.Query`
+        returns tuples or single entities.
+
+        Returns True if this query returns a single entity for each instance
+        in its result list, and False if this query returns a tuple of entities
+        for each result.
+
+        .. versionadded:: 1.3.11
+
+        .. seealso::
+
+            :meth:`_query.Query.only_return_tuples`
+
+        """
+        return (
+            not self.load_options._only_return_tuples
+            and len(self._raw_columns) == 1
+            and "parententity" in self._raw_columns[0]._annotations
+            and isinstance(
+                self._raw_columns[0]._annotations["parententity"],
+                ORMColumnsClauseRole,
+            )
+        )
+
+    @_generative
+    def enable_eagerloads(self, value: bool) -> Self:
+        """Control whether or not eager joins and subqueries are
+        rendered.
+
+        When set to False, the returned Query will not render
+        eager joins regardless of :func:`~sqlalchemy.orm.joinedload`,
+        :func:`~sqlalchemy.orm.subqueryload` options
+        or mapper-level ``lazy='joined'``/``lazy='subquery'``
+        configurations.
+
+        This is used primarily when nesting the Query's
+        statement into a subquery or other
+        selectable, or when using :meth:`_query.Query.yield_per`.
+
+        """
+        self._compile_options += {"_enable_eagerloads": value}
+        return self
+
+    @_generative
+    def _with_compile_options(self, **opt: Any) -> Self:
+        self._compile_options += opt
+        return self
+
+    @util.became_legacy_20(
+        ":meth:`_orm.Query.with_labels` and :meth:`_orm.Query.apply_labels`",
+        alternative="Use set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) "
+        "instead.",
+    )
+    def with_labels(self) -> Self:
+        return self.set_label_style(
+            SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL
+        )
+
+    apply_labels = with_labels
+
+    @property
+    def get_label_style(self) -> SelectLabelStyle:
+        """
+        Retrieve the current label style.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :meth:`_sql.Select.get_label_style` - v2 equivalent method.
+
+        """
+        return self._label_style
+
+    def set_label_style(self, style: SelectLabelStyle) -> Self:
+        """Apply column labels to the return value of Query.statement.
+
+        Indicates that this Query's `statement` accessor should return
+        a SELECT statement that applies labels to all columns in the
+        form <tablename>_<columnname>; this is commonly used to
+        disambiguate columns from multiple tables which have the same
+        name.
+
+        When the `Query` actually issues SQL to load rows, it always
+        uses column labeling.
+
+        .. note:: The :meth:`_query.Query.set_label_style` method *only* applies
+           the output of :attr:`_query.Query.statement`, and *not* to any of
+           the result-row invoking systems of :class:`_query.Query` itself,
+           e.g.
+           :meth:`_query.Query.first`, :meth:`_query.Query.all`, etc.
+           To execute
+           a query using :meth:`_query.Query.set_label_style`, invoke the
+           :attr:`_query.Query.statement` using :meth:`.Session.execute`::
+
+                result = session.execute(
+                    query.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement
+                )
+
+        .. versionadded:: 1.4
+
+
+        .. seealso::
+
+            :meth:`_sql.Select.set_label_style` - v2 equivalent method.
+
+        """  # noqa
+        if self._label_style is not style:
+            self = self._generate()
+            self._label_style = style
+        return self
+
+    @_generative
+    def enable_assertions(self, value: bool) -> Self:
+        """Control whether assertions are generated.
+
+        When set to False, the returned Query will
+        not assert its state before certain operations,
+        including that LIMIT/OFFSET has not been applied
+        when filter() is called, no criterion exists
+        when get() is called, and no "from_statement()"
+        exists when filter()/order_by()/group_by() etc.
+        is called.  This more permissive mode is used by
+        custom Query subclasses to specify criterion or
+        other modifiers outside of the usual usage patterns.
+
+        Care should be taken to ensure that the usage
+        pattern is even possible.  A statement applied
+        by from_statement() will override any criterion
+        set by filter() or order_by(), for example.
+
+        """
+        self._enable_assertions = value
+        return self
+
+    @property
+    def whereclause(self) -> Optional[ColumnElement[bool]]:
+        """A readonly attribute which returns the current WHERE criterion for
+        this Query.
+
+        This returned value is a SQL expression construct, or ``None`` if no
+        criterion has been established.
+
+        .. seealso::
+
+            :attr:`_sql.Select.whereclause` - v2 equivalent property.
+
+        """
+        return BooleanClauseList._construct_for_whereclause(
+            self._where_criteria
+        )
+
+    @_generative
+    def _with_current_path(self, path: PathRegistry) -> Self:
+        """indicate that this query applies to objects loaded
+        within a certain path.
+
+        Used by deferred loaders (see strategies.py) which transfer
+        query options from an originating query to a newly generated
+        query intended for the deferred load.
+
+        """
+        self._compile_options += {"_current_path": path}
+        return self
+
+    @_generative
+    def yield_per(self, count: int) -> Self:
+        r"""Yield only ``count`` rows at a time.
+
+        The purpose of this method is when fetching very large result sets
+        (> 10K rows), to batch results in sub-collections and yield them
+        out partially, so that the Python interpreter doesn't need to declare
+        very large areas of memory which is both time consuming and leads
+        to excessive memory use.   The performance from fetching hundreds of
+        thousands of rows can often double when a suitable yield-per setting
+        (e.g. approximately 1000) is used, even with DBAPIs that buffer
+        rows (which are most).
+
+        As of SQLAlchemy 1.4, the :meth:`_orm.Query.yield_per` method is
+        equivalent to using the ``yield_per`` execution option at the ORM
+        level. See the section :ref:`orm_queryguide_yield_per` for further
+        background on this option.
+
+        .. seealso::
+
+            :ref:`orm_queryguide_yield_per`
+
+        """
+        self.load_options += {"_yield_per": count}
+        return self
+
+    @util.became_legacy_20(
+        ":meth:`_orm.Query.get`",
+        alternative="The method is now available as :meth:`_orm.Session.get`",
+    )
+    def get(self, ident: _PKIdentityArgument) -> Optional[Any]:
+        """Return an instance based on the given primary key identifier,
+        or ``None`` if not found.
+
+        E.g.::
+
+            my_user = session.query(User).get(5)
+
+            some_object = session.query(VersionedFoo).get((5, 10))
+
+            some_object = session.query(VersionedFoo).get({"id": 5, "version_id": 10})
+
+        :meth:`_query.Query.get` is special in that it provides direct
+        access to the identity map of the owning :class:`.Session`.
+        If the given primary key identifier is present
+        in the local identity map, the object is returned
+        directly from this collection and no SQL is emitted,
+        unless the object has been marked fully expired.
+        If not present,
+        a SELECT is performed in order to locate the object.
+
+        :meth:`_query.Query.get` also will perform a check if
+        the object is present in the identity map and
+        marked as expired - a SELECT
+        is emitted to refresh the object as well as to
+        ensure that the row is still present.
+        If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
+        :meth:`_query.Query.get` is only used to return a single
+        mapped instance, not multiple instances or
+        individual column constructs, and strictly
+        on a single primary key value.  The originating
+        :class:`_query.Query` must be constructed in this way,
+        i.e. against a single mapped entity,
+        with no additional filtering criterion.  Loading
+        options via :meth:`_query.Query.options` may be applied
+        however, and will be used if the object is not
+        yet locally present.
+
+        :param ident: A scalar, tuple, or dictionary representing the
+         primary key.  For a composite (e.g. multiple column) primary key,
+         a tuple or dictionary should be passed.
+
+         For a single-column primary key, the scalar calling form is typically
+         the most expedient.  If the primary key of a row is the value "5",
+         the call looks like::
+
+            my_object = query.get(5)
+
+         The tuple form contains primary key values typically in
+         the order in which they correspond to the mapped
+         :class:`_schema.Table`
+         object's primary key columns, or if the
+         :paramref:`_orm.Mapper.primary_key` configuration parameter were
+         used, in
+         the order used for that parameter. For example, if the primary key
+         of a row is represented by the integer
+         digits "5, 10" the call would look like::
+
+             my_object = query.get((5, 10))
+
+         The dictionary form should include as keys the mapped attribute names
+         corresponding to each element of the primary key.  If the mapped class
+         has the attributes ``id``, ``version_id`` as the attributes which
+         store the object's primary key value, the call would look like::
+
+            my_object = query.get({"id": 5, "version_id": 10})
+
+         .. versionadded:: 1.3 the :meth:`_query.Query.get`
+            method now optionally
+            accepts a dictionary of attribute names to values in order to
+            indicate a primary key identifier.
+
+
+        :return: The object instance, or ``None``.
+
+        """  # noqa: E501
+        self._no_criterion_assertion("get", order_by=False, distinct=False)
+
+        # we still implement _get_impl() so that baked query can override
+        # it
+        return self._get_impl(ident, loading.load_on_pk_identity)
+
+    def _get_impl(
+        self,
+        primary_key_identity: _PKIdentityArgument,
+        db_load_fn: Callable[..., Any],
+        identity_token: Optional[Any] = None,
+    ) -> Optional[Any]:
+        mapper = self._only_full_mapper_zero("get")
+        return self.session._get_impl(
+            mapper,
+            primary_key_identity,
+            db_load_fn,
+            populate_existing=self.load_options._populate_existing,
+            with_for_update=self._for_update_arg,
+            options=self._with_options,
+            identity_token=identity_token,
+            execution_options=self._execution_options,
+        )
+
+    @property
+    def lazy_loaded_from(self) -> Optional[InstanceState[Any]]:
+        """An :class:`.InstanceState` that is using this :class:`_query.Query`
+        for a lazy load operation.
+
+        .. deprecated:: 1.4  This attribute should be viewed via the
+           :attr:`.ORMExecuteState.lazy_loaded_from` attribute, within
+           the context of the :meth:`.SessionEvents.do_orm_execute`
+           event.
+
+        .. seealso::
+
+            :attr:`.ORMExecuteState.lazy_loaded_from`
+
+        """
+        return self.load_options._lazy_loaded_from  # type: ignore
+
+    @property
+    def _current_path(self) -> PathRegistry:
+        return self._compile_options._current_path  # type: ignore
+
+    @_generative
+    def correlate(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        """Return a :class:`.Query` construct which will correlate the given
+        FROM clauses to that of an enclosing :class:`.Query` or
+        :func:`~.expression.select`.
+
+        The method here accepts mapped classes, :func:`.aliased` constructs,
+        and :class:`_orm.Mapper` constructs as arguments, which are resolved
+        into expression constructs, in addition to appropriate expression
+        constructs.
+
+        The correlation arguments are ultimately passed to
+        :meth:`_expression.Select.correlate`
+        after coercion to expression constructs.
+
+        The correlation arguments take effect in such cases
+        as when :meth:`_query.Query.from_self` is used, or when
+        a subquery as returned by :meth:`_query.Query.subquery` is
+        embedded in another :func:`_expression.select` construct.
+
+        .. seealso::
+
+            :meth:`_sql.Select.correlate` - v2 equivalent method.
+
+        """
+
+        self._auto_correlate = False
+        if fromclauses and fromclauses[0] in {None, False}:
+            self._correlate = ()
+        else:
+            self._correlate = self._correlate + tuple(
+                coercions.expect(roles.FromClauseRole, f) for f in fromclauses
+            )
+        return self
+
+    @_generative
+    def autoflush(self, setting: bool) -> Self:
+        """Return a Query with a specific 'autoflush' setting.
+
+        As of SQLAlchemy 1.4, the :meth:`_orm.Query.autoflush` method
+        is equivalent to using the ``autoflush`` execution option at the
+        ORM level. See the section :ref:`orm_queryguide_autoflush` for
+        further background on this option.
+
+        """
+        self.load_options += {"_autoflush": setting}
+        return self
+
+    @_generative
+    def populate_existing(self) -> Self:
+        """Return a :class:`_query.Query`
+        that will expire and refresh all instances
+        as they are loaded, or reused from the current :class:`.Session`.
+
+        As of SQLAlchemy 1.4, the :meth:`_orm.Query.populate_existing` method
+        is equivalent to using the ``populate_existing`` execution option at
+        the ORM level. See the section :ref:`orm_queryguide_populate_existing`
+        for further background on this option.
+
+        """
+        self.load_options += {"_populate_existing": True}
+        return self
+
+    @_generative
+    def _with_invoke_all_eagers(self, value: bool) -> Self:
+        """Set the 'invoke all eagers' flag which causes joined- and
+        subquery loaders to traverse into already-loaded related objects
+        and collections.
+
+        Default is that of :attr:`_query.Query._invoke_all_eagers`.
+
+        """
+        self.load_options += {"_invoke_all_eagers": value}
+        return self
+
+    @util.became_legacy_20(
+        ":meth:`_orm.Query.with_parent`",
+        alternative="Use the :func:`_orm.with_parent` standalone construct.",
+    )
+    @util.preload_module("sqlalchemy.orm.relationships")
+    def with_parent(
+        self,
+        instance: object,
+        property: Optional[  # noqa: A002
+            attributes.QueryableAttribute[Any]
+        ] = None,
+        from_entity: Optional[_ExternalEntityType[Any]] = None,
+    ) -> Self:
+        """Add filtering criterion that relates the given instance
+        to a child object or collection, using its attribute state
+        as well as an established :func:`_orm.relationship()`
+        configuration.
+
+        The method uses the :func:`.with_parent` function to generate
+        the clause, the result of which is passed to
+        :meth:`_query.Query.filter`.
+
+        Parameters are the same as :func:`.with_parent`, with the exception
+        that the given property can be None, in which case a search is
+        performed against this :class:`_query.Query` object's target mapper.
+
+        :param instance:
+          An instance which has some :func:`_orm.relationship`.
+
+        :param property:
+          Class bound attribute which indicates
+          what relationship from the instance should be used to reconcile the
+          parent/child relationship.
+
+        :param from_entity:
+          Entity in which to consider as the left side.  This defaults to the
+          "zero" entity of the :class:`_query.Query` itself.
+
+        """
+        relationships = util.preloaded.orm_relationships
+
+        if from_entity:
+            entity_zero = inspect(from_entity)
+        else:
+            entity_zero = _legacy_filter_by_entity_zero(self)
+        if property is None:
+            # TODO: deprecate, property has to be supplied
+            mapper = object_mapper(instance)
+
+            for prop in mapper.iterate_properties:
+                if (
+                    isinstance(prop, relationships.RelationshipProperty)
+                    and prop.mapper is entity_zero.mapper  # type: ignore
+                ):
+                    property = prop  # type: ignore  # noqa: A001
+                    break
+            else:
+                raise sa_exc.InvalidRequestError(
+                    "Could not locate a property which relates instances "
+                    "of class '%s' to instances of class '%s'"
+                    % (
+                        entity_zero.mapper.class_.__name__,  # type: ignore
+                        instance.__class__.__name__,
+                    )
+                )
+
+        return self.filter(
+            with_parent(
+                instance,
+                property,  # type: ignore
+                entity_zero.entity,  # type: ignore
+            )
+        )
+
+    @_generative
+    def add_entity(
+        self,
+        entity: _EntityType[Any],
+        alias: Optional[Union[Alias, Subquery]] = None,
+    ) -> Query[Any]:
+        """add a mapped entity to the list of result columns
+        to be returned.
+
+        .. seealso::
+
+            :meth:`_sql.Select.add_columns` - v2 comparable method.
+        """
+
+        if alias is not None:
+            # TODO: deprecate
+            entity = AliasedClass(entity, alias)
+
+        self._raw_columns = list(self._raw_columns)
+
+        self._raw_columns.append(
+            coercions.expect(
+                roles.ColumnsClauseRole, entity, apply_propagate_attrs=self
+            )
+        )
+        return self
+
+    @_generative
+    def with_session(self, session: Session) -> Self:
+        """Return a :class:`_query.Query` that will use the given
+        :class:`.Session`.
+
+        While the :class:`_query.Query`
+        object is normally instantiated using the
+        :meth:`.Session.query` method, it is legal to build the
+        :class:`_query.Query`
+        directly without necessarily using a :class:`.Session`.  Such a
+        :class:`_query.Query` object, or any :class:`_query.Query`
+        already associated
+        with a different :class:`.Session`, can produce a new
+        :class:`_query.Query`
+        object associated with a target session using this method::
+
+            from sqlalchemy.orm import Query
+
+            query = Query([MyClass]).filter(MyClass.id == 5)
+
+            result = query.with_session(my_session).one()
+
+        """
+
+        self.session = session
+        return self
+
+    def _legacy_from_self(
+        self, *entities: _ColumnsClauseArgument[Any]
+    ) -> Self:
+        # used for query.count() as well as for the same
+        # function in BakedQuery, as well as some old tests in test_baked.py.
+
+        fromclause = (
+            self.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+            .correlate(None)
+            .subquery()
+            ._anonymous_fromclause()
+        )
+
+        q = self._from_selectable(fromclause)
+
+        if entities:
+            q._set_entities(entities)
+        return q
+
+    @_generative
+    def _set_enable_single_crit(self, val: bool) -> Self:
+        self._compile_options += {"_enable_single_crit": val}
+        return self
+
+    @_generative
+    def _from_selectable(
+        self, fromclause: FromClause, set_entity_from: bool = True
+    ) -> Self:
+        for attr in (
+            "_where_criteria",
+            "_order_by_clauses",
+            "_group_by_clauses",
+            "_limit_clause",
+            "_offset_clause",
+            "_last_joined_entity",
+            "_setup_joins",
+            "_memoized_select_entities",
+            "_distinct",
+            "_distinct_on",
+            "_having_criteria",
+            "_prefixes",
+            "_suffixes",
+        ):
+            self.__dict__.pop(attr, None)
+        self._set_select_from([fromclause], set_entity_from)
+        self._compile_options += {
+            "_enable_single_crit": False,
+        }
+
+        return self
+
+    @util.deprecated(
+        "1.4",
+        ":meth:`_query.Query.values` "
+        "is deprecated and will be removed in a "
+        "future release.  Please use :meth:`_query.Query.with_entities`",
+    )
+    def values(self, *columns: _ColumnsClauseArgument[Any]) -> Iterable[Any]:
+        """Return an iterator yielding result tuples corresponding
+        to the given list of columns
+
+        """
+        return self._values_no_warn(*columns)
+
+    _values = values
+
+    def _values_no_warn(
+        self, *columns: _ColumnsClauseArgument[Any]
+    ) -> Iterable[Any]:
+        if not columns:
+            return iter(())
+        q = self._clone().enable_eagerloads(False)
+        q._set_entities(columns)
+        if not q.load_options._yield_per:
+            q.load_options += {"_yield_per": 10}
+        return iter(q)
+
+    @util.deprecated(
+        "1.4",
+        ":meth:`_query.Query.value` "
+        "is deprecated and will be removed in a "
+        "future release.  Please use :meth:`_query.Query.with_entities` "
+        "in combination with :meth:`_query.Query.scalar`",
+    )
+    def value(self, column: _ColumnExpressionArgument[Any]) -> Any:
+        """Return a scalar result corresponding to the given
+        column expression.
+
+        """
+        try:
+            return next(self._values_no_warn(column))[0]  # type: ignore
+        except StopIteration:
+            return None
+
+    @overload
+    def with_entities(self, _entity: _EntityType[_O]) -> Query[_O]: ...
+
+    @overload
+    def with_entities(
+        self,
+        _colexpr: roles.TypedColumnsClauseRole[_T],
+    ) -> RowReturningQuery[Tuple[_T]]: ...
+
+    # START OVERLOADED FUNCTIONS self.with_entities RowReturningQuery 2-8
+
+    # code within this block is **programmatically,
+    # statically generated** by tools/generate_tuple_map_overloads.py
+
+    @overload
+    def with_entities(
+        self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
+    ) -> RowReturningQuery[Tuple[_T0, _T1]]: ...
+
+    @overload
+    def with_entities(
+        self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ...
+
+    @overload
+    def with_entities(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+    @overload
+    def with_entities(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+    @overload
+    def with_entities(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+    @overload
+    def with_entities(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+    @overload
+    def with_entities(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+        __ent7: _TCCA[_T7],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ...
+
+    # END OVERLOADED FUNCTIONS self.with_entities
+
+    @overload
+    def with_entities(
+        self, *entities: _ColumnsClauseArgument[Any]
+    ) -> Query[Any]: ...
+
+    @_generative
+    def with_entities(
+        self, *entities: _ColumnsClauseArgument[Any], **__kw: Any
+    ) -> Query[Any]:
+        r"""Return a new :class:`_query.Query`
+        replacing the SELECT list with the
+        given entities.
+
+        e.g.::
+
+            # Users, filtered on some arbitrary criterion
+            # and then ordered by related email address
+            q = (
+                session.query(User)
+                .join(User.address)
+                .filter(User.name.like("%ed%"))
+                .order_by(Address.email)
+            )
+
+            # given *only* User.id==5, Address.email, and 'q', what
+            # would the *next* User in the result be ?
+            subq = (
+                q.with_entities(Address.email)
+                .order_by(None)
+                .filter(User.id == 5)
+                .subquery()
+            )
+            q = q.join((subq, subq.c.email < Address.email)).limit(1)
+
+        .. seealso::
+
+            :meth:`_sql.Select.with_only_columns` - v2 comparable method.
+        """
+        if __kw:
+            raise _no_kw()
+
+        # Query has all the same fields as Select for this operation
+        # this could in theory be based on a protocol but not sure if it's
+        # worth it
+        _MemoizedSelectEntities._generate_for_statement(self)  # type: ignore
+        self._set_entities(entities)
+        return self
+
+    @_generative
+    def add_columns(
+        self, *column: _ColumnExpressionArgument[Any]
+    ) -> Query[Any]:
+        """Add one or more column expressions to the list
+        of result columns to be returned.
+
+        .. seealso::
+
+            :meth:`_sql.Select.add_columns` - v2 comparable method.
+        """
+
+        self._raw_columns = list(self._raw_columns)
+
+        self._raw_columns.extend(
+            coercions.expect(
+                roles.ColumnsClauseRole,
+                c,
+                apply_propagate_attrs=self,
+                post_inspect=True,
+            )
+            for c in column
+        )
+        return self
+
+    @util.deprecated(
+        "1.4",
+        ":meth:`_query.Query.add_column` "
+        "is deprecated and will be removed in a "
+        "future release.  Please use :meth:`_query.Query.add_columns`",
+    )
+    def add_column(self, column: _ColumnExpressionArgument[Any]) -> Query[Any]:
+        """Add a column expression to the list of result columns to be
+        returned.
+
+        """
+        return self.add_columns(column)
+
+    @_generative
+    def options(self, *args: ExecutableOption) -> Self:
+        """Return a new :class:`_query.Query` object,
+        applying the given list of
+        mapper options.
+
+        Most supplied options regard changing how column- and
+        relationship-mapped attributes are loaded.
+
+        .. seealso::
+
+            :ref:`loading_columns`
+
+            :ref:`relationship_loader_options`
+
+        """
+
+        opts = tuple(util.flatten_iterator(args))
+        if self._compile_options._current_path:
+            # opting for lower method overhead for the checks
+            for opt in opts:
+                if not opt._is_core and opt._is_legacy_option:  # type: ignore
+                    opt.process_query_conditionally(self)  # type: ignore
+        else:
+            for opt in opts:
+                if not opt._is_core and opt._is_legacy_option:  # type: ignore
+                    opt.process_query(self)  # type: ignore
+
+        self._with_options += opts
+        return self
+
+    def with_transformation(
+        self, fn: Callable[[Query[Any]], Query[Any]]
+    ) -> Query[Any]:
+        """Return a new :class:`_query.Query` object transformed by
+        the given function.
+
+        E.g.::
+
+            def filter_something(criterion):
+                def transform(q):
+                    return q.filter(criterion)
+
+                return transform
+
+
+            q = q.with_transformation(filter_something(x == 5))
+
+        This allows ad-hoc recipes to be created for :class:`_query.Query`
+        objects.
+
+        """
+        return fn(self)
+
+    def get_execution_options(self) -> _ImmutableExecuteOptions:
+        """Get the non-SQL options which will take effect during execution.
+
+        .. versionadded:: 1.3
+
+        .. seealso::
+
+            :meth:`_query.Query.execution_options`
+
+            :meth:`_sql.Select.get_execution_options` - v2 comparable method.
+
+        """
+        return self._execution_options
+
+    @overload
+    def execution_options(
+        self,
+        *,
+        compiled_cache: Optional[CompiledCacheType] = ...,
+        logging_token: str = ...,
+        isolation_level: IsolationLevel = ...,
+        no_parameters: bool = False,
+        stream_results: bool = False,
+        max_row_buffer: int = ...,
+        yield_per: int = ...,
+        insertmanyvalues_page_size: int = ...,
+        schema_translate_map: Optional[SchemaTranslateMapType] = ...,
+        populate_existing: bool = False,
+        autoflush: bool = False,
+        preserve_rowcount: bool = False,
+        **opt: Any,
+    ) -> Self: ...
+
+    @overload
+    def execution_options(self, **opt: Any) -> Self: ...
+
+    @_generative
+    def execution_options(self, **kwargs: Any) -> Self:
+        """Set non-SQL options which take effect during execution.
+
+        Options allowed here include all of those accepted by
+        :meth:`_engine.Connection.execution_options`, as well as a series
+        of ORM specific options:
+
+        ``populate_existing=True`` - equivalent to using
+        :meth:`_orm.Query.populate_existing`
+
+        ``autoflush=True|False`` - equivalent to using
+        :meth:`_orm.Query.autoflush`
+
+        ``yield_per=<value>`` - equivalent to using
+        :meth:`_orm.Query.yield_per`
+
+        Note that the ``stream_results`` execution option is enabled
+        automatically if the :meth:`~sqlalchemy.orm.query.Query.yield_per()`
+        method or execution option is used.
+
+        .. versionadded:: 1.4 - added ORM options to
+           :meth:`_orm.Query.execution_options`
+
+        The execution options may also be specified on a per execution basis
+        when using :term:`2.0 style` queries via the
+        :paramref:`_orm.Session.execution_options` parameter.
+
+        .. warning:: The
+           :paramref:`_engine.Connection.execution_options.stream_results`
+           parameter should not be used at the level of individual ORM
+           statement executions, as the :class:`_orm.Session` will not track
+           objects from different schema translate maps within a single
+           session.  For multiple schema translate maps within the scope of a
+           single :class:`_orm.Session`, see :ref:`examples_sharding`.
+
+
+        .. seealso::
+
+            :ref:`engine_stream_results`
+
+            :meth:`_query.Query.get_execution_options`
+
+            :meth:`_sql.Select.execution_options` - v2 equivalent method.
+
+        """
+        self._execution_options = self._execution_options.union(kwargs)
+        return self
+
+    @_generative
+    def with_for_update(
+        self,
+        *,
+        nowait: bool = False,
+        read: bool = False,
+        of: Optional[_ForUpdateOfArgument] = None,
+        skip_locked: bool = False,
+        key_share: bool = False,
+    ) -> Self:
+        """return a new :class:`_query.Query`
+        with the specified options for the
+        ``FOR UPDATE`` clause.
+
+        The behavior of this method is identical to that of
+        :meth:`_expression.GenerativeSelect.with_for_update`.
+        When called with no arguments,
+        the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause
+        appended.  When additional arguments are specified, backend-specific
+        options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE``
+        can take effect.
+
+        E.g.::
+
+            q = (
+                sess.query(User)
+                .populate_existing()
+                .with_for_update(nowait=True, of=User)
+            )
+
+        The above query on a PostgreSQL backend will render like:
+
+        .. sourcecode:: sql
+
+            SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT
+
+        .. warning::
+
+            Using ``with_for_update`` in the context of eager loading
+            relationships is not officially supported or recommended by
+            SQLAlchemy and may not work with certain queries on various
+            database backends.  When ``with_for_update`` is successfully used
+            with a query that involves :func:`_orm.joinedload`, SQLAlchemy will
+            attempt to emit SQL that locks all involved tables.
+
+        .. note::  It is generally a good idea to combine the use of the
+           :meth:`_orm.Query.populate_existing` method when using the
+           :meth:`_orm.Query.with_for_update` method.   The purpose of
+           :meth:`_orm.Query.populate_existing` is to force all the data read
+           from the SELECT to be populated into the ORM objects returned,
+           even if these objects are already in the :term:`identity map`.
+
+        .. seealso::
+
+            :meth:`_expression.GenerativeSelect.with_for_update`
+            - Core level method with
+            full argument and behavioral description.
+
+            :meth:`_orm.Query.populate_existing` - overwrites attributes of
+            objects already loaded in the identity map.
+
+        """  # noqa: E501
+
+        self._for_update_arg = ForUpdateArg(
+            read=read,
+            nowait=nowait,
+            of=of,
+            skip_locked=skip_locked,
+            key_share=key_share,
+        )
+        return self
+
+    @_generative
+    def params(
+        self, __params: Optional[Dict[str, Any]] = None, **kw: Any
+    ) -> Self:
+        r"""Add values for bind parameters which may have been
+        specified in filter().
+
+        Parameters may be specified using \**kwargs, or optionally a single
+        dictionary as the first positional argument. The reason for both is
+        that \**kwargs is convenient, however some parameter dictionaries
+        contain unicode keys in which case \**kwargs cannot be used.
+
+        """
+        if __params:
+            kw.update(__params)
+        self._params = self._params.union(kw)
+        return self
+
+    def where(self, *criterion: _ColumnExpressionArgument[bool]) -> Self:
+        """A synonym for :meth:`.Query.filter`.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :meth:`_sql.Select.where` - v2 equivalent method.
+
+        """
+        return self.filter(*criterion)
+
+    @_generative
+    @_assertions(_no_statement_condition, _no_limit_offset)
+    def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self:
+        r"""Apply the given filtering criterion to a copy
+        of this :class:`_query.Query`, using SQL expressions.
+
+        e.g.::
+
+            session.query(MyClass).filter(MyClass.name == "some name")
+
+        Multiple criteria may be specified as comma separated; the effect
+        is that they will be joined together using the :func:`.and_`
+        function::
+
+            session.query(MyClass).filter(MyClass.name == "some name", MyClass.id > 5)
+
+        The criterion is any SQL expression object applicable to the
+        WHERE clause of a select.   String expressions are coerced
+        into SQL expression constructs via the :func:`_expression.text`
+        construct.
+
+        .. seealso::
+
+            :meth:`_query.Query.filter_by` - filter on keyword expressions.
+
+            :meth:`_sql.Select.where` - v2 equivalent method.
+
+        """  # noqa: E501
+        for crit in list(criterion):
+            crit = coercions.expect(
+                roles.WhereHavingRole, crit, apply_propagate_attrs=self
+            )
+
+            self._where_criteria += (crit,)
+        return self
+
+    @util.memoized_property
+    def _last_joined_entity(
+        self,
+    ) -> Optional[Union[_InternalEntityType[Any], _JoinTargetElement]]:
+        if self._setup_joins:
+            return _determine_last_joined_entity(
+                self._setup_joins,
+            )
+        else:
+            return None
+
+    def _filter_by_zero(self) -> Any:
+        """for the filter_by() method, return the target entity for which
+        we will attempt to derive an expression from based on string name.
+
+        """
+
+        if self._setup_joins:
+            _last_joined_entity = self._last_joined_entity
+            if _last_joined_entity is not None:
+                return _last_joined_entity
+
+        # discussion related to #7239
+        # special check determines if we should try to derive attributes
+        # for filter_by() from the "from object", i.e., if the user
+        # called query.select_from(some selectable).filter_by(some_attr=value).
+        # We don't want to do that in the case that methods like
+        # from_self(), select_entity_from(), or a set op like union() were
+        # called; while these methods also place a
+        # selectable in the _from_obj collection, they also set up
+        # the _set_base_alias boolean which turns on the whole "adapt the
+        # entity to this selectable" thing, meaning the query still continues
+        # to construct itself in terms of the lead entity that was passed
+        # to query(), e.g. query(User).from_self() is still in terms of User,
+        # and not the subquery that from_self() created.   This feature of
+        # "implicitly adapt all occurrences of entity X to some arbitrary
+        # subquery" is the main thing I am trying to do away with in 2.0 as
+        # users should now used aliased() for that, but I can't entirely get
+        # rid of it due to query.union() and other set ops relying upon it.
+        #
+        # compare this to the base Select()._filter_by_zero() which can
+        # just return self._from_obj[0] if present, because there is no
+        # "_set_base_alias" feature.
+        #
+        # IOW, this conditional essentially detects if
+        # "select_from(some_selectable)" has been called, as opposed to
+        # "select_entity_from()", "from_self()"
+        # or "union() / some_set_op()".
+        if self._from_obj and not self._compile_options._set_base_alias:
+            return self._from_obj[0]
+
+        return self._raw_columns[0]
+
+    def filter_by(self, **kwargs: Any) -> Self:
+        r"""Apply the given filtering criterion to a copy
+        of this :class:`_query.Query`, using keyword expressions.
+
+        e.g.::
+
+            session.query(MyClass).filter_by(name="some name")
+
+        Multiple criteria may be specified as comma separated; the effect
+        is that they will be joined together using the :func:`.and_`
+        function::
+
+            session.query(MyClass).filter_by(name="some name", id=5)
+
+        The keyword expressions are extracted from the primary
+        entity of the query, or the last entity that was the
+        target of a call to :meth:`_query.Query.join`.
+
+        .. seealso::
+
+            :meth:`_query.Query.filter` - filter on SQL expressions.
+
+            :meth:`_sql.Select.filter_by` - v2 comparable method.
+
+        """
+        from_entity = self._filter_by_zero()
+
+        clauses = [
+            _entity_namespace_key(from_entity, key) == value
+            for key, value in kwargs.items()
+        ]
+        return self.filter(*clauses)
+
+    @_generative
+    def order_by(
+        self,
+        __first: Union[
+            Literal[None, False, _NoArg.NO_ARG],
+            _ColumnExpressionOrStrLabelArgument[Any],
+        ] = _NoArg.NO_ARG,
+        *clauses: _ColumnExpressionOrStrLabelArgument[Any],
+    ) -> Self:
+        """Apply one or more ORDER BY criteria to the query and return
+        the newly resulting :class:`_query.Query`.
+
+        e.g.::
+
+            q = session.query(Entity).order_by(Entity.id, Entity.name)
+
+        Calling this method multiple times is equivalent to calling it once
+        with all the clauses concatenated. All existing ORDER BY criteria may
+        be cancelled by passing ``None`` by itself.  New ORDER BY criteria may
+        then be added by invoking :meth:`_orm.Query.order_by` again, e.g.::
+
+            # will erase all ORDER BY and ORDER BY new_col alone
+            q = q.order_by(None).order_by(new_col)
+
+        .. seealso::
+
+            These sections describe ORDER BY in terms of :term:`2.0 style`
+            invocation but apply to :class:`_orm.Query` as well:
+
+            :ref:`tutorial_order_by` - in the :ref:`unified_tutorial`
+
+            :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial`
+
+            :meth:`_sql.Select.order_by` - v2 equivalent method.
+
+        """
+
+        for assertion in (self._no_statement_condition, self._no_limit_offset):
+            assertion("order_by")
+
+        if not clauses and (__first is None or __first is False):
+            self._order_by_clauses = ()
+        elif __first is not _NoArg.NO_ARG:
+            criterion = tuple(
+                coercions.expect(roles.OrderByRole, clause)
+                for clause in (__first,) + clauses
+            )
+            self._order_by_clauses += criterion
+
+        return self
+
+    @_generative
+    def group_by(
+        self,
+        __first: Union[
+            Literal[None, False, _NoArg.NO_ARG],
+            _ColumnExpressionOrStrLabelArgument[Any],
+        ] = _NoArg.NO_ARG,
+        *clauses: _ColumnExpressionOrStrLabelArgument[Any],
+    ) -> Self:
+        """Apply one or more GROUP BY criterion to the query and return
+        the newly resulting :class:`_query.Query`.
+
+        All existing GROUP BY settings can be suppressed by
+        passing ``None`` - this will suppress any GROUP BY configured
+        on mappers as well.
+
+        .. seealso::
+
+            These sections describe GROUP BY in terms of :term:`2.0 style`
+            invocation but apply to :class:`_orm.Query` as well:
+
+            :ref:`tutorial_group_by_w_aggregates` - in the
+            :ref:`unified_tutorial`
+
+            :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial`
+
+            :meth:`_sql.Select.group_by` - v2 equivalent method.
+
+        """
+
+        for assertion in (self._no_statement_condition, self._no_limit_offset):
+            assertion("group_by")
+
+        if not clauses and (__first is None or __first is False):
+            self._group_by_clauses = ()
+        elif __first is not _NoArg.NO_ARG:
+            criterion = tuple(
+                coercions.expect(roles.GroupByRole, clause)
+                for clause in (__first,) + clauses
+            )
+            self._group_by_clauses += criterion
+        return self
+
+    @_generative
+    @_assertions(_no_statement_condition, _no_limit_offset)
+    def having(self, *having: _ColumnExpressionArgument[bool]) -> Self:
+        r"""Apply a HAVING criterion to the query and return the
+        newly resulting :class:`_query.Query`.
+
+        :meth:`_query.Query.having` is used in conjunction with
+        :meth:`_query.Query.group_by`.
+
+        HAVING criterion makes it possible to use filters on aggregate
+        functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
+
+            q = (
+                session.query(User.id)
+                .join(User.addresses)
+                .group_by(User.id)
+                .having(func.count(Address.id) > 2)
+            )
+
+        .. seealso::
+
+            :meth:`_sql.Select.having` - v2 equivalent method.
+
+        """
+
+        for criterion in having:
+            having_criteria = coercions.expect(
+                roles.WhereHavingRole, criterion
+            )
+            self._having_criteria += (having_criteria,)
+        return self
+
+    def _set_op(self, expr_fn: Any, *q: Query[Any]) -> Self:
+        list_of_queries = (self,) + q
+        return self._from_selectable(expr_fn(*(list_of_queries)).subquery())
+
+    def union(self, *q: Query[Any]) -> Self:
+        """Produce a UNION of this Query against one or more queries.
+
+        e.g.::
+
+            q1 = sess.query(SomeClass).filter(SomeClass.foo == "bar")
+            q2 = sess.query(SomeClass).filter(SomeClass.bar == "foo")
+
+            q3 = q1.union(q2)
+
+        The method accepts multiple Query objects so as to control
+        the level of nesting.  A series of ``union()`` calls such as::
+
+            x.union(y).union(z).all()
+
+        will nest on each ``union()``, and produces:
+
+        .. sourcecode:: sql
+
+            SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION
+                            SELECT * FROM y) UNION SELECT * FROM Z)
+
+        Whereas::
+
+            x.union(y, z).all()
+
+        produces:
+
+        .. sourcecode:: sql
+
+            SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION
+                            SELECT * FROM Z)
+
+        Note that many database backends do not allow ORDER BY to
+        be rendered on a query called within UNION, EXCEPT, etc.
+        To disable all ORDER BY clauses including those configured
+        on mappers, issue ``query.order_by(None)`` - the resulting
+        :class:`_query.Query` object will not render ORDER BY within
+        its SELECT statement.
+
+        .. seealso::
+
+            :meth:`_sql.Select.union` - v2 equivalent method.
+
+        """
+        return self._set_op(expression.union, *q)
+
+    def union_all(self, *q: Query[Any]) -> Self:
+        """Produce a UNION ALL of this Query against one or more queries.
+
+        Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
+        that method for usage examples.
+
+        .. seealso::
+
+            :meth:`_sql.Select.union_all` - v2 equivalent method.
+
+        """
+        return self._set_op(expression.union_all, *q)
+
+    def intersect(self, *q: Query[Any]) -> Self:
+        """Produce an INTERSECT of this Query against one or more queries.
+
+        Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
+        that method for usage examples.
+
+        .. seealso::
+
+            :meth:`_sql.Select.intersect` - v2 equivalent method.
+
+        """
+        return self._set_op(expression.intersect, *q)
+
+    def intersect_all(self, *q: Query[Any]) -> Self:
+        """Produce an INTERSECT ALL of this Query against one or more queries.
+
+        Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
+        that method for usage examples.
+
+        .. seealso::
+
+            :meth:`_sql.Select.intersect_all` - v2 equivalent method.
+
+        """
+        return self._set_op(expression.intersect_all, *q)
+
+    def except_(self, *q: Query[Any]) -> Self:
+        """Produce an EXCEPT of this Query against one or more queries.
+
+        Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
+        that method for usage examples.
+
+        .. seealso::
+
+            :meth:`_sql.Select.except_` - v2 equivalent method.
+
+        """
+        return self._set_op(expression.except_, *q)
+
+    def except_all(self, *q: Query[Any]) -> Self:
+        """Produce an EXCEPT ALL of this Query against one or more queries.
+
+        Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
+        that method for usage examples.
+
+        .. seealso::
+
+            :meth:`_sql.Select.except_all` - v2 equivalent method.
+
+        """
+        return self._set_op(expression.except_all, *q)
+
+    @_generative
+    @_assertions(_no_statement_condition, _no_limit_offset)
+    def join(
+        self,
+        target: _JoinTargetArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        *,
+        isouter: bool = False,
+        full: bool = False,
+    ) -> Self:
+        r"""Create a SQL JOIN against this :class:`_query.Query`
+        object's criterion
+        and apply generatively, returning the newly resulting
+        :class:`_query.Query`.
+
+        **Simple Relationship Joins**
+
+        Consider a mapping between two classes ``User`` and ``Address``,
+        with a relationship ``User.addresses`` representing a collection
+        of ``Address`` objects associated with each ``User``.   The most
+        common usage of :meth:`_query.Query.join`
+        is to create a JOIN along this
+        relationship, using the ``User.addresses`` attribute as an indicator
+        for how this should occur::
+
+            q = session.query(User).join(User.addresses)
+
+        Where above, the call to :meth:`_query.Query.join` along
+        ``User.addresses`` will result in SQL approximately equivalent to:
+
+        .. sourcecode:: sql
+
+            SELECT user.id, user.name
+            FROM user JOIN address ON user.id = address.user_id
+
+        In the above example we refer to ``User.addresses`` as passed to
+        :meth:`_query.Query.join` as the "on clause", that is, it indicates
+        how the "ON" portion of the JOIN should be constructed.
+
+        To construct a chain of joins, multiple :meth:`_query.Query.join`
+        calls may be used.  The relationship-bound attribute implies both
+        the left and right side of the join at once::
+
+            q = (
+                session.query(User)
+                .join(User.orders)
+                .join(Order.items)
+                .join(Item.keywords)
+            )
+
+        .. note:: as seen in the above example, **the order in which each
+           call to the join() method occurs is important**.    Query would not,
+           for example, know how to join correctly if we were to specify
+           ``User``, then ``Item``, then ``Order``, in our chain of joins; in
+           such a case, depending on the arguments passed, it may raise an
+           error that it doesn't know how to join, or it may produce invalid
+           SQL in which case the database will raise an error. In correct
+           practice, the
+           :meth:`_query.Query.join` method is invoked in such a way that lines
+           up with how we would want the JOIN clauses in SQL to be
+           rendered, and each call should represent a clear link from what
+           precedes it.
+
+        **Joins to a Target Entity or Selectable**
+
+        A second form of :meth:`_query.Query.join` allows any mapped entity or
+        core selectable construct as a target.   In this usage,
+        :meth:`_query.Query.join` will attempt to create a JOIN along the
+        natural foreign key relationship between two entities::
+
+            q = session.query(User).join(Address)
+
+        In the above calling form, :meth:`_query.Query.join` is called upon to
+        create the "on clause" automatically for us.  This calling form will
+        ultimately raise an error if either there are no foreign keys between
+        the two entities, or if there are multiple foreign key linkages between
+        the target entity and the entity or entities already present on the
+        left side such that creating a join requires more information.  Note
+        that when indicating a join to a target without any ON clause, ORM
+        configured relationships are not taken into account.
+
+        **Joins to a Target with an ON Clause**
+
+        The third calling form allows both the target entity as well
+        as the ON clause to be passed explicitly.    A example that includes
+        a SQL expression as the ON clause is as follows::
+
+            q = session.query(User).join(Address, User.id == Address.user_id)
+
+        The above form may also use a relationship-bound attribute as the
+        ON clause as well::
+
+            q = session.query(User).join(Address, User.addresses)
+
+        The above syntax can be useful for the case where we wish
+        to join to an alias of a particular target entity.  If we wanted
+        to join to ``Address`` twice, it could be achieved using two
+        aliases set up using the :func:`~sqlalchemy.orm.aliased` function::
+
+            a1 = aliased(Address)
+            a2 = aliased(Address)
+
+            q = (
+                session.query(User)
+                .join(a1, User.addresses)
+                .join(a2, User.addresses)
+                .filter(a1.email_address == "ed@foo.com")
+                .filter(a2.email_address == "ed@bar.com")
+            )
+
+        The relationship-bound calling form can also specify a target entity
+        using the :meth:`_orm.PropComparator.of_type` method; a query
+        equivalent to the one above would be::
+
+            a1 = aliased(Address)
+            a2 = aliased(Address)
+
+            q = (
+                session.query(User)
+                .join(User.addresses.of_type(a1))
+                .join(User.addresses.of_type(a2))
+                .filter(a1.email_address == "ed@foo.com")
+                .filter(a2.email_address == "ed@bar.com")
+            )
+
+        **Augmenting Built-in ON Clauses**
+
+        As a substitute for providing a full custom ON condition for an
+        existing relationship, the :meth:`_orm.PropComparator.and_` function
+        may be applied to a relationship attribute to augment additional
+        criteria into the ON clause; the additional criteria will be combined
+        with the default criteria using AND::
+
+            q = session.query(User).join(
+                User.addresses.and_(Address.email_address != "foo@bar.com")
+            )
+
+        .. versionadded:: 1.4
+
+        **Joining to Tables and Subqueries**
+
+
+        The target of a join may also be any table or SELECT statement,
+        which may be related to a target entity or not.   Use the
+        appropriate ``.subquery()`` method in order to make a subquery
+        out of a query::
+
+            subq = (
+                session.query(Address)
+                .filter(Address.email_address == "ed@foo.com")
+                .subquery()
+            )
+
+
+            q = session.query(User).join(subq, User.id == subq.c.user_id)
+
+        Joining to a subquery in terms of a specific relationship and/or
+        target entity may be achieved by linking the subquery to the
+        entity using :func:`_orm.aliased`::
+
+            subq = (
+                session.query(Address)
+                .filter(Address.email_address == "ed@foo.com")
+                .subquery()
+            )
+
+            address_subq = aliased(Address, subq)
+
+            q = session.query(User).join(User.addresses.of_type(address_subq))
+
+        **Controlling what to Join From**
+
+        In cases where the left side of the current state of
+        :class:`_query.Query` is not in line with what we want to join from,
+        the :meth:`_query.Query.select_from` method may be used::
+
+            q = (
+                session.query(Address)
+                .select_from(User)
+                .join(User.addresses)
+                .filter(User.name == "ed")
+            )
+
+        Which will produce SQL similar to:
+
+        .. sourcecode:: sql
+
+            SELECT address.* FROM user
+                JOIN address ON user.id=address.user_id
+                WHERE user.name = :name_1
+
+        .. seealso::
+
+            :meth:`_sql.Select.join` - v2 equivalent method.
+
+        :param \*props: Incoming arguments for :meth:`_query.Query.join`,
+         the props collection in modern use should be considered to be a  one
+         or two argument form, either as a single "target" entity or ORM
+         attribute-bound relationship, or as a target entity plus an "on
+         clause" which  may be a SQL expression or ORM attribute-bound
+         relationship.
+
+        :param isouter=False: If True, the join used will be a left outer join,
+         just as if the :meth:`_query.Query.outerjoin` method were called.
+
+        :param full=False: render FULL OUTER JOIN; implies ``isouter``.
+
+        """
+
+        join_target = coercions.expect(
+            roles.JoinTargetRole,
+            target,
+            apply_propagate_attrs=self,
+            legacy=True,
+        )
+        if onclause is not None:
+            onclause_element = coercions.expect(
+                roles.OnClauseRole, onclause, legacy=True
+            )
+        else:
+            onclause_element = None
+
+        self._setup_joins += (
+            (
+                join_target,
+                onclause_element,
+                None,
+                {
+                    "isouter": isouter,
+                    "full": full,
+                },
+            ),
+        )
+
+        self.__dict__.pop("_last_joined_entity", None)
+        return self
+
+    def outerjoin(
+        self,
+        target: _JoinTargetArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        *,
+        full: bool = False,
+    ) -> Self:
+        """Create a left outer join against this ``Query`` object's criterion
+        and apply generatively, returning the newly resulting ``Query``.
+
+        Usage is the same as the ``join()`` method.
+
+        .. seealso::
+
+            :meth:`_sql.Select.outerjoin` - v2 equivalent method.
+
+        """
+        return self.join(target, onclause=onclause, isouter=True, full=full)
+
+    @_generative
+    @_assertions(_no_statement_condition)
+    def reset_joinpoint(self) -> Self:
+        """Return a new :class:`.Query`, where the "join point" has
+        been reset back to the base FROM entities of the query.
+
+        This method is usually used in conjunction with the
+        ``aliased=True`` feature of the :meth:`~.Query.join`
+        method.  See the example in :meth:`~.Query.join` for how
+        this is used.
+
+        """
+        self._last_joined_entity = None
+
+        return self
+
+    @_generative
+    @_assertions(_no_clauseelement_condition)
+    def select_from(self, *from_obj: _FromClauseArgument) -> Self:
+        r"""Set the FROM clause of this :class:`.Query` explicitly.
+
+        :meth:`.Query.select_from` is often used in conjunction with
+        :meth:`.Query.join` in order to control which entity is selected
+        from on the "left" side of the join.
+
+        The entity or selectable object here effectively replaces the
+        "left edge" of any calls to :meth:`~.Query.join`, when no
+        joinpoint is otherwise established - usually, the default "join
+        point" is the leftmost entity in the :class:`~.Query` object's
+        list of entities to be selected.
+
+        A typical example::
+
+            q = (
+                session.query(Address)
+                .select_from(User)
+                .join(User.addresses)
+                .filter(User.name == "ed")
+            )
+
+        Which produces SQL equivalent to:
+
+        .. sourcecode:: sql
+
+            SELECT address.* FROM user
+            JOIN address ON user.id=address.user_id
+            WHERE user.name = :name_1
+
+        :param \*from_obj: collection of one or more entities to apply
+         to the FROM clause.  Entities can be mapped classes,
+         :class:`.AliasedClass` objects, :class:`.Mapper` objects
+         as well as core :class:`.FromClause` elements like subqueries.
+
+        .. seealso::
+
+            :meth:`~.Query.join`
+
+            :meth:`.Query.select_entity_from`
+
+            :meth:`_sql.Select.select_from` - v2 equivalent method.
+
+        """
+
+        self._set_select_from(from_obj, False)
+        return self
+
+    def __getitem__(self, item: Any) -> Any:
+        return orm_util._getitem(
+            self,
+            item,
+        )
+
+    @_generative
+    @_assertions(_no_statement_condition)
+    def slice(
+        self,
+        start: int,
+        stop: int,
+    ) -> Self:
+        """Computes the "slice" of the :class:`_query.Query` represented by
+        the given indices and returns the resulting :class:`_query.Query`.
+
+        The start and stop indices behave like the argument to Python's
+        built-in :func:`range` function. This method provides an
+        alternative to using ``LIMIT``/``OFFSET`` to get a slice of the
+        query.
+
+        For example, ::
+
+            session.query(User).order_by(User.id).slice(1, 3)
+
+        renders as
+
+        .. sourcecode:: sql
+
+           SELECT users.id AS users_id,
+                  users.name AS users_name
+           FROM users ORDER BY users.id
+           LIMIT ? OFFSET ?
+           (2, 1)
+
+        .. seealso::
+
+           :meth:`_query.Query.limit`
+
+           :meth:`_query.Query.offset`
+
+           :meth:`_sql.Select.slice` - v2 equivalent method.
+
+        """
+
+        self._limit_clause, self._offset_clause = sql_util._make_slice(
+            self._limit_clause, self._offset_clause, start, stop
+        )
+        return self
+
+    @_generative
+    @_assertions(_no_statement_condition)
+    def limit(self, limit: _LimitOffsetType) -> Self:
+        """Apply a ``LIMIT`` to the query and return the newly resulting
+        ``Query``.
+
+        .. seealso::
+
+            :meth:`_sql.Select.limit` - v2 equivalent method.
+
+        """
+        self._limit_clause = sql_util._offset_or_limit_clause(limit)
+        return self
+
+    @_generative
+    @_assertions(_no_statement_condition)
+    def offset(self, offset: _LimitOffsetType) -> Self:
+        """Apply an ``OFFSET`` to the query and return the newly resulting
+        ``Query``.
+
+        .. seealso::
+
+            :meth:`_sql.Select.offset` - v2 equivalent method.
+        """
+        self._offset_clause = sql_util._offset_or_limit_clause(offset)
+        return self
+
+    @_generative
+    @_assertions(_no_statement_condition)
+    def distinct(self, *expr: _ColumnExpressionArgument[Any]) -> Self:
+        r"""Apply a ``DISTINCT`` to the query and return the newly resulting
+        ``Query``.
+
+
+        .. note::
+
+            The ORM-level :meth:`.distinct` call includes logic that will
+            automatically add columns from the ORDER BY of the query to the
+            columns clause of the SELECT statement, to satisfy the common need
+            of the database backend that ORDER BY columns be part of the SELECT
+            list when DISTINCT is used.   These columns *are not* added to the
+            list of columns actually fetched by the :class:`_query.Query`,
+            however,
+            so would not affect results. The columns are passed through when
+            using the :attr:`_query.Query.statement` accessor, however.
+
+            .. deprecated:: 2.0  This logic is deprecated and will be removed
+               in SQLAlchemy 2.0.     See :ref:`migration_20_query_distinct`
+               for a description of this use case in 2.0.
+
+        .. seealso::
+
+            :meth:`_sql.Select.distinct` - v2 equivalent method.
+
+        :param \*expr: optional column expressions.  When present,
+         the PostgreSQL dialect will render a ``DISTINCT ON (<expressions>)``
+         construct.
+
+         .. deprecated:: 1.4 Using \*expr in other dialects is deprecated
+            and will raise :class:`_exc.CompileError` in a future version.
+
+        """
+        if expr:
+            self._distinct = True
+            self._distinct_on = self._distinct_on + tuple(
+                coercions.expect(roles.ByOfRole, e) for e in expr
+            )
+        else:
+            self._distinct = True
+        return self
+
+    def all(self) -> List[_T]:
+        """Return the results represented by this :class:`_query.Query`
+        as a list.
+
+        This results in an execution of the underlying SQL statement.
+
+        .. warning::  The :class:`_query.Query` object,
+           when asked to return either
+           a sequence or iterator that consists of full ORM-mapped entities,
+           will **deduplicate entries based on primary key**.  See the FAQ for
+           more details.
+
+            .. seealso::
+
+                :ref:`faq_query_deduplicating`
+
+        .. seealso::
+
+            :meth:`_engine.Result.all` - v2 comparable method.
+
+            :meth:`_engine.Result.scalars` - v2 comparable method.
+        """
+        return self._iter().all()  # type: ignore
+
+    @_generative
+    @_assertions(_no_clauseelement_condition)
+    def from_statement(self, statement: ExecutableReturnsRows) -> Self:
+        """Execute the given SELECT statement and return results.
+
+        This method bypasses all internal statement compilation, and the
+        statement is executed without modification.
+
+        The statement is typically either a :func:`_expression.text`
+        or :func:`_expression.select` construct, and should return the set
+        of columns
+        appropriate to the entity class represented by this
+        :class:`_query.Query`.
+
+        .. seealso::
+
+            :meth:`_sql.Select.from_statement` - v2 comparable method.
+
+        """
+        statement = coercions.expect(
+            roles.SelectStatementRole, statement, apply_propagate_attrs=self
+        )
+        self._statement = statement
+        return self
+
+    def first(self) -> Optional[_T]:
+        """Return the first result of this ``Query`` or
+        None if the result doesn't contain any row.
+
+        first() applies a limit of one within the generated SQL, so that
+        only one primary entity row is generated on the server side
+        (note this may consist of multiple result rows if join-loaded
+        collections are present).
+
+        Calling :meth:`_query.Query.first`
+        results in an execution of the underlying
+        query.
+
+        .. seealso::
+
+            :meth:`_query.Query.one`
+
+            :meth:`_query.Query.one_or_none`
+
+            :meth:`_engine.Result.first` - v2 comparable method.
+
+            :meth:`_engine.Result.scalars` - v2 comparable method.
+
+        """
+        # replicates limit(1) behavior
+        if self._statement is not None:
+            return self._iter().first()  # type: ignore
+        else:
+            return self.limit(1)._iter().first()  # type: ignore
+
+    def one_or_none(self) -> Optional[_T]:
+        """Return at most one result or raise an exception.
+
+        Returns ``None`` if the query selects
+        no rows.  Raises ``sqlalchemy.orm.exc.MultipleResultsFound``
+        if multiple object identities are returned, or if multiple
+        rows are returned for a query that returns only scalar values
+        as opposed to full identity-mapped entities.
+
+        Calling :meth:`_query.Query.one_or_none`
+        results in an execution of the
+        underlying query.
+
+        .. seealso::
+
+            :meth:`_query.Query.first`
+
+            :meth:`_query.Query.one`
+
+            :meth:`_engine.Result.one_or_none` - v2 comparable method.
+
+            :meth:`_engine.Result.scalar_one_or_none` - v2 comparable method.
+
+        """
+        return self._iter().one_or_none()  # type: ignore
+
+    def one(self) -> _T:
+        """Return exactly one result or raise an exception.
+
+        Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects
+        no rows.  Raises ``sqlalchemy.orm.exc.MultipleResultsFound``
+        if multiple object identities are returned, or if multiple
+        rows are returned for a query that returns only scalar values
+        as opposed to full identity-mapped entities.
+
+        Calling :meth:`.one` results in an execution of the underlying query.
+
+        .. seealso::
+
+            :meth:`_query.Query.first`
+
+            :meth:`_query.Query.one_or_none`
+
+            :meth:`_engine.Result.one` - v2 comparable method.
+
+            :meth:`_engine.Result.scalar_one` - v2 comparable method.
+
+        """
+        return self._iter().one()  # type: ignore
+
+    def scalar(self) -> Any:
+        """Return the first element of the first result or None
+        if no rows present.  If multiple rows are returned,
+        raises MultipleResultsFound.
+
+          >>> session.query(Item).scalar()
+          <Item>
+          >>> session.query(Item.id).scalar()
+          1
+          >>> session.query(Item.id).filter(Item.id < 0).scalar()
+          None
+          >>> session.query(Item.id, Item.name).scalar()
+          1
+          >>> session.query(func.count(Parent.id)).scalar()
+          20
+
+        This results in an execution of the underlying query.
+
+        .. seealso::
+
+            :meth:`_engine.Result.scalar` - v2 comparable method.
+
+        """
+        # TODO: not sure why we can't use result.scalar() here
+        try:
+            ret = self.one()
+            if not isinstance(ret, collections_abc.Sequence):
+                return ret
+            return ret[0]
+        except sa_exc.NoResultFound:
+            return None
+
+    def __iter__(self) -> Iterator[_T]:
+        result = self._iter()
+        try:
+            yield from result  # type: ignore
+        except GeneratorExit:
+            # issue #8710 - direct iteration is not re-usable after
+            # an iterable block is broken, so close the result
+            result._soft_close()
+            raise
+
+    def _iter(self) -> Union[ScalarResult[_T], Result[_T]]:
+        # new style execution.
+        params = self._params
+
+        statement = self._statement_20()
+        result: Union[ScalarResult[_T], Result[_T]] = self.session.execute(
+            statement,
+            params,
+            execution_options={"_sa_orm_load_options": self.load_options},
+        )
+
+        # legacy: automatically set scalars, unique
+        if result._attributes.get("is_single_entity", False):
+            result = cast("Result[_T]", result).scalars()
+
+        if (
+            result._attributes.get("filtered", False)
+            and not self.load_options._yield_per
+        ):
+            result = result.unique()
+
+        return result
+
+    def __str__(self) -> str:
+        statement = self._statement_20()
+
+        try:
+            bind = (
+                self._get_bind_args(statement, self.session.get_bind)
+                if self.session
+                else None
+            )
+        except sa_exc.UnboundExecutionError:
+            bind = None
+
+        return str(statement.compile(bind))
+
+    def _get_bind_args(self, statement: Any, fn: Any, **kw: Any) -> Any:
+        return fn(clause=statement, **kw)
+
+    @property
+    def column_descriptions(self) -> List[ORMColumnDescription]:
+        """Return metadata about the columns which would be
+        returned by this :class:`_query.Query`.
+
+        Format is a list of dictionaries::
+
+            user_alias = aliased(User, name="user2")
+            q = sess.query(User, User.id, user_alias)
+
+            # this expression:
+            q.column_descriptions
+
+            # would return:
+            [
+                {
+                    "name": "User",
+                    "type": User,
+                    "aliased": False,
+                    "expr": User,
+                    "entity": User,
+                },
+                {
+                    "name": "id",
+                    "type": Integer(),
+                    "aliased": False,
+                    "expr": User.id,
+                    "entity": User,
+                },
+                {
+                    "name": "user2",
+                    "type": User,
+                    "aliased": True,
+                    "expr": user_alias,
+                    "entity": user_alias,
+                },
+            ]
+
+        .. seealso::
+
+            This API is available using :term:`2.0 style` queries as well,
+            documented at:
+
+            * :ref:`queryguide_inspection`
+
+            * :attr:`.Select.column_descriptions`
+
+        """
+
+        return _column_descriptions(self, legacy=True)
+
+    @util.deprecated(
+        "2.0",
+        "The :meth:`_orm.Query.instances` method is deprecated and will "
+        "be removed in a future release. "
+        "Use the Select.from_statement() method or aliased() construct in "
+        "conjunction with Session.execute() instead.",
+    )
+    def instances(
+        self,
+        result_proxy: CursorResult[Any],
+        context: Optional[QueryContext] = None,
+    ) -> Any:
+        """Return an ORM result given a :class:`_engine.CursorResult` and
+        :class:`.QueryContext`.
+
+        """
+        if context is None:
+            util.warn_deprecated(
+                "Using the Query.instances() method without a context "
+                "is deprecated and will be disallowed in a future release.  "
+                "Please make use of :meth:`_query.Query.from_statement` "
+                "for linking ORM results to arbitrary select constructs.",
+                version="1.4",
+            )
+            compile_state = self._compile_state(for_statement=False)
+
+            context = QueryContext(
+                compile_state,
+                compile_state.statement,
+                compile_state.statement,
+                self._params,
+                self.session,
+                self.load_options,
+            )
+
+        result = loading.instances(result_proxy, context)
+
+        # legacy: automatically set scalars, unique
+        if result._attributes.get("is_single_entity", False):
+            result = result.scalars()  # type: ignore
+
+        if result._attributes.get("filtered", False):
+            result = result.unique()
+
+        # TODO: isn't this supposed to be a list?
+        return result
+
+    @util.became_legacy_20(
+        ":meth:`_orm.Query.merge_result`",
+        alternative="The method is superseded by the "
+        ":func:`_orm.merge_frozen_result` function.",
+        enable_warnings=False,  # warnings occur via loading.merge_result
+    )
+    def merge_result(
+        self,
+        iterator: Union[
+            FrozenResult[Any], Iterable[Sequence[Any]], Iterable[object]
+        ],
+        load: bool = True,
+    ) -> Union[FrozenResult[Any], Iterable[Any]]:
+        """Merge a result into this :class:`_query.Query` object's Session.
+
+        Given an iterator returned by a :class:`_query.Query`
+        of the same structure
+        as this one, return an identical iterator of results, with all mapped
+        instances merged into the session using :meth:`.Session.merge`. This
+        is an optimized method which will merge all mapped instances,
+        preserving the structure of the result rows and unmapped columns with
+        less method overhead than that of calling :meth:`.Session.merge`
+        explicitly for each value.
+
+        The structure of the results is determined based on the column list of
+        this :class:`_query.Query` - if these do not correspond,
+        unchecked errors
+        will occur.
+
+        The 'load' argument is the same as that of :meth:`.Session.merge`.
+
+        For an example of how :meth:`_query.Query.merge_result` is used, see
+        the source code for the example :ref:`examples_caching`, where
+        :meth:`_query.Query.merge_result` is used to efficiently restore state
+        from a cache back into a target :class:`.Session`.
+
+        """
+
+        return loading.merge_result(self, iterator, load)
+
+    def exists(self) -> Exists:
+        """A convenience method that turns a query into an EXISTS subquery
+        of the form EXISTS (SELECT 1 FROM ... WHERE ...).
+
+        e.g.::
+
+            q = session.query(User).filter(User.name == "fred")
+            session.query(q.exists())
+
+        Producing SQL similar to:
+
+        .. sourcecode:: sql
+
+            SELECT EXISTS (
+                SELECT 1 FROM users WHERE users.name = :name_1
+            ) AS anon_1
+
+        The EXISTS construct is usually used in the WHERE clause::
+
+            session.query(User.id).filter(q.exists()).scalar()
+
+        Note that some databases such as SQL Server don't allow an
+        EXISTS expression to be present in the columns clause of a
+        SELECT.    To select a simple boolean value based on the exists
+        as a WHERE, use :func:`.literal`::
+
+            from sqlalchemy import literal
+
+            session.query(literal(True)).filter(q.exists()).scalar()
+
+        .. seealso::
+
+            :meth:`_sql.Select.exists` - v2 comparable method.
+
+        """
+
+        # .add_columns() for the case that we are a query().select_from(X),
+        # so that ".statement" can be produced (#2995) but also without
+        # omitting the FROM clause from a query(X) (#2818);
+        # .with_only_columns() after we have a core select() so that
+        # we get just "SELECT 1" without any entities.
+
+        inner = (
+            self.enable_eagerloads(False)
+            .add_columns(sql.literal_column("1"))
+            .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+            ._get_select_statement_only()
+            .with_only_columns(1)
+        )
+
+        ezero = self._entity_from_pre_ent_zero()
+        if ezero is not None:
+            inner = inner.select_from(ezero)
+
+        return sql.exists(inner)
+
+    def count(self) -> int:
+        r"""Return a count of rows this the SQL formed by this :class:`Query`
+        would return.
+
+        This generates the SQL for this Query as follows:
+
+        .. sourcecode:: sql
+
+            SELECT count(1) AS count_1 FROM (
+                SELECT <rest of query follows...>
+            ) AS anon_1
+
+        The above SQL returns a single row, which is the aggregate value
+        of the count function; the :meth:`_query.Query.count`
+        method then returns
+        that single integer value.
+
+        .. warning::
+
+            It is important to note that the value returned by
+            count() is **not the same as the number of ORM objects that this
+            Query would return from a method such as the .all() method**.
+            The :class:`_query.Query` object,
+            when asked to return full entities,
+            will **deduplicate entries based on primary key**, meaning if the
+            same primary key value would appear in the results more than once,
+            only one object of that primary key would be present.  This does
+            not apply to a query that is against individual columns.
+
+            .. seealso::
+
+                :ref:`faq_query_deduplicating`
+
+        For fine grained control over specific columns to count, to skip the
+        usage of a subquery or otherwise control of the FROM clause, or to use
+        other aggregate functions, use :attr:`~sqlalchemy.sql.expression.func`
+        expressions in conjunction with :meth:`~.Session.query`, i.e.::
+
+            from sqlalchemy import func
+
+            # count User records, without
+            # using a subquery.
+            session.query(func.count(User.id))
+
+            # return count of user "id" grouped
+            # by "name"
+            session.query(func.count(User.id)).group_by(User.name)
+
+            from sqlalchemy import distinct
+
+            # count distinct "name" values
+            session.query(func.count(distinct(User.name)))
+
+        .. seealso::
+
+            :ref:`migration_20_query_usage`
+
+        """
+        col = sql.func.count(sql.literal_column("*"))
+        return (  # type: ignore
+            self._legacy_from_self(col).enable_eagerloads(False).scalar()
+        )
+
+    def delete(
+        self,
+        synchronize_session: SynchronizeSessionArgument = "auto",
+        delete_args: Optional[Dict[Any, Any]] = None,
+    ) -> int:
+        r"""Perform a DELETE with an arbitrary WHERE clause.
+
+        Deletes rows matched by this query from the database.
+
+        E.g.::
+
+            sess.query(User).filter(User.age == 25).delete(synchronize_session=False)
+
+            sess.query(User).filter(User.age == 25).delete(
+                synchronize_session="evaluate"
+            )
+
+        .. warning::
+
+            See the section :ref:`orm_expression_update_delete` for important
+            caveats and warnings, including limitations when using bulk UPDATE
+            and DELETE with mapper inheritance configurations.
+
+        :param synchronize_session: chooses the strategy to update the
+         attributes on objects in the session.   See the section
+         :ref:`orm_expression_update_delete` for a discussion of these
+         strategies.
+
+        :param delete_args: Optional dictionary, if present will be passed
+         to the underlying :func:`_expression.delete` construct as the ``**kw``
+         for the object.  May be used to pass dialect-specific arguments such
+         as ``mysql_limit``.
+
+         .. versionadded:: 2.0.37
+
+        :return: the count of rows matched as returned by the database's
+          "row count" feature.
+
+        .. seealso::
+
+            :ref:`orm_expression_update_delete`
+
+        """  # noqa: E501
+
+        bulk_del = BulkDelete(self, delete_args)
+        if self.dispatch.before_compile_delete:
+            for fn in self.dispatch.before_compile_delete:
+                new_query = fn(bulk_del.query, bulk_del)
+                if new_query is not None:
+                    bulk_del.query = new_query
+
+                self = bulk_del.query
+
+        delete_ = sql.delete(*self._raw_columns)  # type: ignore
+
+        if delete_args:
+            delete_ = delete_.with_dialect_options(**delete_args)
+
+        delete_._where_criteria = self._where_criteria
+        result: CursorResult[Any] = self.session.execute(
+            delete_,
+            self._params,
+            execution_options=self._execution_options.union(
+                {"synchronize_session": synchronize_session}
+            ),
+        )
+        bulk_del.result = result  # type: ignore
+        self.session.dispatch.after_bulk_delete(bulk_del)
+        result.close()
+
+        return result.rowcount
+
+    def update(
+        self,
+        values: Dict[_DMLColumnArgument, Any],
+        synchronize_session: SynchronizeSessionArgument = "auto",
+        update_args: Optional[Dict[Any, Any]] = None,
+    ) -> int:
+        r"""Perform an UPDATE with an arbitrary WHERE clause.
+
+        Updates rows matched by this query in the database.
+
+        E.g.::
+
+            sess.query(User).filter(User.age == 25).update(
+                {User.age: User.age - 10}, synchronize_session=False
+            )
+
+            sess.query(User).filter(User.age == 25).update(
+                {"age": User.age - 10}, synchronize_session="evaluate"
+            )
+
+        .. warning::
+
+            See the section :ref:`orm_expression_update_delete` for important
+            caveats and warnings, including limitations when using arbitrary
+            UPDATE and DELETE with mapper inheritance configurations.
+
+        :param values: a dictionary with attributes names, or alternatively
+         mapped attributes or SQL expressions, as keys, and literal
+         values or sql expressions as values.   If :ref:`parameter-ordered
+         mode <tutorial_parameter_ordered_updates>` is desired, the values can
+         be passed as a list of 2-tuples; this requires that the
+         :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
+         flag is passed to the :paramref:`.Query.update.update_args` dictionary
+         as well.
+
+        :param synchronize_session: chooses the strategy to update the
+         attributes on objects in the session.   See the section
+         :ref:`orm_expression_update_delete` for a discussion of these
+         strategies.
+
+        :param update_args: Optional dictionary, if present will be passed
+         to the underlying :func:`_expression.update` construct as the ``**kw``
+         for the object.  May be used to pass dialect-specific arguments such
+         as ``mysql_limit``, as well as other special arguments such as
+         :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`.
+
+        :return: the count of rows matched as returned by the database's
+         "row count" feature.
+
+
+        .. seealso::
+
+            :ref:`orm_expression_update_delete`
+
+        """
+
+        update_args = update_args or {}
+
+        bulk_ud = BulkUpdate(self, values, update_args)
+
+        if self.dispatch.before_compile_update:
+            for fn in self.dispatch.before_compile_update:
+                new_query = fn(bulk_ud.query, bulk_ud)
+                if new_query is not None:
+                    bulk_ud.query = new_query
+            self = bulk_ud.query
+
+        upd = sql.update(*self._raw_columns)  # type: ignore
+
+        ppo = update_args.pop("preserve_parameter_order", False)
+        if ppo:
+            upd = upd.ordered_values(*values)  # type: ignore
+        else:
+            upd = upd.values(values)
+        if update_args:
+            upd = upd.with_dialect_options(**update_args)
+
+        upd._where_criteria = self._where_criteria
+        result: CursorResult[Any] = self.session.execute(
+            upd,
+            self._params,
+            execution_options=self._execution_options.union(
+                {"synchronize_session": synchronize_session}
+            ),
+        )
+        bulk_ud.result = result  # type: ignore
+        self.session.dispatch.after_bulk_update(bulk_ud)
+        result.close()
+        return result.rowcount
+
+    def _compile_state(
+        self, for_statement: bool = False, **kw: Any
+    ) -> ORMCompileState:
+        """Create an out-of-compiler ORMCompileState object.
+
+        The ORMCompileState object is normally created directly as a result
+        of the SQLCompiler.process() method being handed a Select()
+        or FromStatement() object that uses the "orm" plugin.   This method
+        provides a means of creating this ORMCompileState object directly
+        without using the compiler.
+
+        This method is used only for deprecated cases, which include
+        the .from_self() method for a Query that has multiple levels
+        of .from_self() in use, as well as the instances() method.  It is
+        also used within the test suite to generate ORMCompileState objects
+        for test purposes.
+
+        """
+
+        stmt = self._statement_20(for_statement=for_statement, **kw)
+        assert for_statement == stmt._compile_options._for_statement
+
+        # this chooses between ORMFromStatementCompileState and
+        # ORMSelectCompileState.  We could also base this on
+        # query._statement is not None as we have the ORM Query here
+        # however this is the more general path.
+        compile_state_cls = cast(
+            ORMCompileState,
+            ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"),
+        )
+
+        return compile_state_cls._create_orm_context(
+            stmt, toplevel=True, compiler=None
+        )
+
+    def _compile_context(self, for_statement: bool = False) -> QueryContext:
+        compile_state = self._compile_state(for_statement=for_statement)
+        context = QueryContext(
+            compile_state,
+            compile_state.statement,
+            compile_state.statement,
+            self._params,
+            self.session,
+            self.load_options,
+        )
+
+        return context
+
+
+class AliasOption(interfaces.LoaderOption):
+    inherit_cache = False
+
+    @util.deprecated(
+        "1.4",
+        "The :class:`.AliasOption` object is not necessary "
+        "for entities to be matched up to a query that is established "
+        "via :meth:`.Query.from_statement` and now does nothing.",
+    )
+    def __init__(self, alias: Union[Alias, Subquery]):
+        r"""Return a :class:`.MapperOption` that will indicate to the
+        :class:`_query.Query`
+        that the main table has been aliased.
+
+        """
+
+    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+        pass
+
+
+class BulkUD:
+    """State used for the orm.Query version of update() / delete().
+
+    This object is now specific to Query only.
+
+    """
+
+    def __init__(self, query: Query[Any]):
+        self.query = query.enable_eagerloads(False)
+        self._validate_query_state()
+        self.mapper = self.query._entity_from_pre_ent_zero()
+
+    def _validate_query_state(self) -> None:
+        for attr, methname, notset, op in (
+            ("_limit_clause", "limit()", None, operator.is_),
+            ("_offset_clause", "offset()", None, operator.is_),
+            ("_order_by_clauses", "order_by()", (), operator.eq),
+            ("_group_by_clauses", "group_by()", (), operator.eq),
+            ("_distinct", "distinct()", False, operator.is_),
+            (
+                "_from_obj",
+                "join(), outerjoin(), select_from(), or from_self()",
+                (),
+                operator.eq,
+            ),
+            (
+                "_setup_joins",
+                "join(), outerjoin(), select_from(), or from_self()",
+                (),
+                operator.eq,
+            ),
+        ):
+            if not op(getattr(self.query, attr), notset):
+                raise sa_exc.InvalidRequestError(
+                    "Can't call Query.update() or Query.delete() "
+                    "when %s has been called" % (methname,)
+                )
+
+    @property
+    def session(self) -> Session:
+        return self.query.session
+
+
+class BulkUpdate(BulkUD):
+    """BulkUD which handles UPDATEs."""
+
+    def __init__(
+        self,
+        query: Query[Any],
+        values: Dict[_DMLColumnArgument, Any],
+        update_kwargs: Optional[Dict[Any, Any]],
+    ):
+        super().__init__(query)
+        self.values = values
+        self.update_kwargs = update_kwargs
+
+
+class BulkDelete(BulkUD):
+    """BulkUD which handles DELETEs."""
+
+    def __init__(
+        self,
+        query: Query[Any],
+        delete_kwargs: Optional[Dict[Any, Any]],
+    ):
+        super().__init__(query)
+        self.delete_kwargs = delete_kwargs
+
+
+class RowReturningQuery(Query[Row[_TP]]):
+    if TYPE_CHECKING:
+
+        def tuples(self) -> Query[_TP]:  # type: ignore
+            ...
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py
new file mode 100644
index 00000000..0d0bc708
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/relationships.py
@@ -0,0 +1,3514 @@
+# orm/relationships.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Heuristics related to join conditions as used in
+:func:`_orm.relationship`.
+
+Provides the :class:`.JoinCondition` object, which encapsulates
+SQL annotation and aliasing behavior focused on the `primaryjoin`
+and `secondaryjoin` aspects of :func:`_orm.relationship`.
+
+"""
+from __future__ import annotations
+
+import collections
+from collections import abc
+import dataclasses
+import inspect as _py_inspect
+import itertools
+import re
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import NamedTuple
+from typing import NoReturn
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import strategy_options
+from ._typing import insp_is_aliased_class
+from ._typing import is_has_collection_adapter
+from .base import _DeclarativeMapped
+from .base import _is_mapped_class
+from .base import class_mapper
+from .base import DynamicMapped
+from .base import LoaderCallableStatus
+from .base import PassiveFlag
+from .base import state_str
+from .base import WriteOnlyMapped
+from .interfaces import _AttributeOptions
+from .interfaces import _IntrospectsAnnotations
+from .interfaces import MANYTOMANY
+from .interfaces import MANYTOONE
+from .interfaces import ONETOMANY
+from .interfaces import PropComparator
+from .interfaces import RelationshipDirection
+from .interfaces import StrategizedProperty
+from .util import _orm_annotate
+from .util import _orm_deannotate
+from .util import CascadeOptions
+from .. import exc as sa_exc
+from .. import Exists
+from .. import log
+from .. import schema
+from .. import sql
+from .. import util
+from ..inspection import inspect
+from ..sql import coercions
+from ..sql import expression
+from ..sql import operators
+from ..sql import roles
+from ..sql import visitors
+from ..sql._typing import _ColumnExpressionArgument
+from ..sql._typing import _HasClauseElement
+from ..sql.annotation import _safe_annotate
+from ..sql.elements import ColumnClause
+from ..sql.elements import ColumnElement
+from ..sql.util import _deep_annotate
+from ..sql.util import _deep_deannotate
+from ..sql.util import _shallow_annotate
+from ..sql.util import adapt_criterion_to_null
+from ..sql.util import ClauseAdapter
+from ..sql.util import join_condition
+from ..sql.util import selectables_overlap
+from ..sql.util import visit_binary_product
+from ..util.typing import de_optionalize_union_types
+from ..util.typing import Literal
+from ..util.typing import resolve_name_to_real_class_name
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _ExternalEntityType
+    from ._typing import _IdentityKeyType
+    from ._typing import _InstanceDict
+    from ._typing import _InternalEntityType
+    from ._typing import _O
+    from ._typing import _RegistryType
+    from .base import Mapped
+    from .clsregistry import _class_resolver
+    from .clsregistry import _ModNS
+    from .decl_base import _ClassScanMapperConfig
+    from .dependency import DependencyProcessor
+    from .mapper import Mapper
+    from .query import Query
+    from .session import Session
+    from .state import InstanceState
+    from .strategies import LazyLoader
+    from .util import AliasedClass
+    from .util import AliasedInsp
+    from ..sql._typing import _CoreAdapterProto
+    from ..sql._typing import _EquivalentColumnMap
+    from ..sql._typing import _InfoType
+    from ..sql.annotation import _AnnotationDict
+    from ..sql.annotation import SupportsAnnotations
+    from ..sql.elements import BinaryExpression
+    from ..sql.elements import BindParameter
+    from ..sql.elements import ClauseElement
+    from ..sql.schema import Table
+    from ..sql.selectable import FromClause
+    from ..util.typing import _AnnotationScanType
+    from ..util.typing import RODescriptorReference
+
+_T = TypeVar("_T", bound=Any)
+_T1 = TypeVar("_T1", bound=Any)
+_T2 = TypeVar("_T2", bound=Any)
+
+_PT = TypeVar("_PT", bound=Any)
+
+_PT2 = TypeVar("_PT2", bound=Any)
+
+
+_RelationshipArgumentType = Union[
+    str,
+    Type[_T],
+    Callable[[], Type[_T]],
+    "Mapper[_T]",
+    "AliasedClass[_T]",
+    Callable[[], "Mapper[_T]"],
+    Callable[[], "AliasedClass[_T]"],
+]
+
+_LazyLoadArgumentType = Literal[
+    "select",
+    "joined",
+    "selectin",
+    "subquery",
+    "raise",
+    "raise_on_sql",
+    "noload",
+    "immediate",
+    "write_only",
+    "dynamic",
+    True,
+    False,
+    None,
+]
+
+
+_RelationshipJoinConditionArgument = Union[
+    str, _ColumnExpressionArgument[bool]
+]
+_RelationshipSecondaryArgument = Union[
+    "FromClause", str, Callable[[], "FromClause"]
+]
+_ORMOrderByArgument = Union[
+    Literal[False],
+    str,
+    _ColumnExpressionArgument[Any],
+    Callable[[], _ColumnExpressionArgument[Any]],
+    Callable[[], Iterable[_ColumnExpressionArgument[Any]]],
+    Iterable[Union[str, _ColumnExpressionArgument[Any]]],
+]
+ORMBackrefArgument = Union[str, Tuple[str, Dict[str, Any]]]
+
+_ORMColCollectionElement = Union[
+    ColumnClause[Any],
+    _HasClauseElement[Any],
+    roles.DMLColumnRole,
+    "Mapped[Any]",
+]
+_ORMColCollectionArgument = Union[
+    str,
+    Sequence[_ORMColCollectionElement],
+    Callable[[], Sequence[_ORMColCollectionElement]],
+    Callable[[], _ORMColCollectionElement],
+    _ORMColCollectionElement,
+]
+
+
+_CEA = TypeVar("_CEA", bound=_ColumnExpressionArgument[Any])
+
+_CE = TypeVar("_CE", bound="ColumnElement[Any]")
+
+
+_ColumnPairIterable = Iterable[Tuple[ColumnElement[Any], ColumnElement[Any]]]
+
+_ColumnPairs = Sequence[Tuple[ColumnElement[Any], ColumnElement[Any]]]
+
+_MutableColumnPairs = List[Tuple[ColumnElement[Any], ColumnElement[Any]]]
+
+
+def remote(expr: _CEA) -> _CEA:
+    """Annotate a portion of a primaryjoin expression
+    with a 'remote' annotation.
+
+    See the section :ref:`relationship_custom_foreign` for a
+    description of use.
+
+    .. seealso::
+
+        :ref:`relationship_custom_foreign`
+
+        :func:`.foreign`
+
+    """
+    return _annotate_columns(  # type: ignore
+        coercions.expect(roles.ColumnArgumentRole, expr), {"remote": True}
+    )
+
+
+def foreign(expr: _CEA) -> _CEA:
+    """Annotate a portion of a primaryjoin expression
+    with a 'foreign' annotation.
+
+    See the section :ref:`relationship_custom_foreign` for a
+    description of use.
+
+    .. seealso::
+
+        :ref:`relationship_custom_foreign`
+
+        :func:`.remote`
+
+    """
+
+    return _annotate_columns(  # type: ignore
+        coercions.expect(roles.ColumnArgumentRole, expr), {"foreign": True}
+    )
+
+
+@dataclasses.dataclass
+class _RelationshipArg(Generic[_T1, _T2]):
+    """stores a user-defined parameter value that must be resolved and
+    parsed later at mapper configuration time.
+
+    """
+
+    __slots__ = "name", "argument", "resolved"
+    name: str
+    argument: _T1
+    resolved: Optional[_T2]
+
+    def _is_populated(self) -> bool:
+        return self.argument is not None
+
+    def _resolve_against_registry(
+        self, clsregistry_resolver: Callable[[str, bool], _class_resolver]
+    ) -> None:
+        attr_value = self.argument
+
+        if isinstance(attr_value, str):
+            self.resolved = clsregistry_resolver(
+                attr_value, self.name == "secondary"
+            )()
+        elif callable(attr_value) and not _is_mapped_class(attr_value):
+            self.resolved = attr_value()
+        else:
+            self.resolved = attr_value
+
+
+_RelationshipOrderByArg = Union[Literal[False], Tuple[ColumnElement[Any], ...]]
+
+
+class _RelationshipArgs(NamedTuple):
+    """stores user-passed parameters that are resolved at mapper configuration
+    time.
+
+    """
+
+    secondary: _RelationshipArg[
+        Optional[_RelationshipSecondaryArgument],
+        Optional[FromClause],
+    ]
+    primaryjoin: _RelationshipArg[
+        Optional[_RelationshipJoinConditionArgument],
+        Optional[ColumnElement[Any]],
+    ]
+    secondaryjoin: _RelationshipArg[
+        Optional[_RelationshipJoinConditionArgument],
+        Optional[ColumnElement[Any]],
+    ]
+    order_by: _RelationshipArg[_ORMOrderByArgument, _RelationshipOrderByArg]
+    foreign_keys: _RelationshipArg[
+        Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]]
+    ]
+    remote_side: _RelationshipArg[
+        Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]]
+    ]
+
+
+@log.class_logger
+class RelationshipProperty(
+    _IntrospectsAnnotations, StrategizedProperty[_T], log.Identified
+):
+    """Describes an object property that holds a single item or list
+    of items that correspond to a related database table.
+
+    Public constructor is the :func:`_orm.relationship` function.
+
+    .. seealso::
+
+        :ref:`relationship_config_toplevel`
+
+    """
+
+    strategy_wildcard_key = strategy_options._RELATIONSHIP_TOKEN
+    inherit_cache = True
+    """:meta private:"""
+
+    _links_to_entity = True
+    _is_relationship = True
+
+    _overlaps: Sequence[str]
+
+    _lazy_strategy: LazyLoader
+
+    _persistence_only = dict(
+        passive_deletes=False,
+        passive_updates=True,
+        enable_typechecks=True,
+        active_history=False,
+        cascade_backrefs=False,
+    )
+
+    _dependency_processor: Optional[DependencyProcessor] = None
+
+    primaryjoin: ColumnElement[bool]
+    secondaryjoin: Optional[ColumnElement[bool]]
+    secondary: Optional[FromClause]
+    _join_condition: JoinCondition
+    order_by: _RelationshipOrderByArg
+
+    _user_defined_foreign_keys: Set[ColumnElement[Any]]
+    _calculated_foreign_keys: Set[ColumnElement[Any]]
+
+    remote_side: Set[ColumnElement[Any]]
+    local_columns: Set[ColumnElement[Any]]
+
+    synchronize_pairs: _ColumnPairs
+    secondary_synchronize_pairs: Optional[_ColumnPairs]
+
+    local_remote_pairs: Optional[_ColumnPairs]
+
+    direction: RelationshipDirection
+
+    _init_args: _RelationshipArgs
+
+    def __init__(
+        self,
+        argument: Optional[_RelationshipArgumentType[_T]] = None,
+        secondary: Optional[_RelationshipSecondaryArgument] = None,
+        *,
+        uselist: Optional[bool] = None,
+        collection_class: Optional[
+            Union[Type[Collection[Any]], Callable[[], Collection[Any]]]
+        ] = None,
+        primaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
+        secondaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
+        back_populates: Optional[str] = None,
+        order_by: _ORMOrderByArgument = False,
+        backref: Optional[ORMBackrefArgument] = None,
+        overlaps: Optional[str] = None,
+        post_update: bool = False,
+        cascade: str = "save-update, merge",
+        viewonly: bool = False,
+        attribute_options: Optional[_AttributeOptions] = None,
+        lazy: _LazyLoadArgumentType = "select",
+        passive_deletes: Union[Literal["all"], bool] = False,
+        passive_updates: bool = True,
+        active_history: bool = False,
+        enable_typechecks: bool = True,
+        foreign_keys: Optional[_ORMColCollectionArgument] = None,
+        remote_side: Optional[_ORMColCollectionArgument] = None,
+        join_depth: Optional[int] = None,
+        comparator_factory: Optional[
+            Type[RelationshipProperty.Comparator[Any]]
+        ] = None,
+        single_parent: bool = False,
+        innerjoin: bool = False,
+        distinct_target_key: Optional[bool] = None,
+        load_on_pending: bool = False,
+        query_class: Optional[Type[Query[Any]]] = None,
+        info: Optional[_InfoType] = None,
+        omit_join: Literal[None, False] = None,
+        sync_backref: Optional[bool] = None,
+        doc: Optional[str] = None,
+        bake_queries: Literal[True] = True,
+        cascade_backrefs: Literal[False] = False,
+        _local_remote_pairs: Optional[_ColumnPairs] = None,
+        _legacy_inactive_history_style: bool = False,
+    ):
+        super().__init__(attribute_options=attribute_options)
+
+        self.uselist = uselist
+        self.argument = argument
+
+        self._init_args = _RelationshipArgs(
+            _RelationshipArg("secondary", secondary, None),
+            _RelationshipArg("primaryjoin", primaryjoin, None),
+            _RelationshipArg("secondaryjoin", secondaryjoin, None),
+            _RelationshipArg("order_by", order_by, None),
+            _RelationshipArg("foreign_keys", foreign_keys, None),
+            _RelationshipArg("remote_side", remote_side, None),
+        )
+
+        self.post_update = post_update
+        self.viewonly = viewonly
+        if viewonly:
+            self._warn_for_persistence_only_flags(
+                passive_deletes=passive_deletes,
+                passive_updates=passive_updates,
+                enable_typechecks=enable_typechecks,
+                active_history=active_history,
+                cascade_backrefs=cascade_backrefs,
+            )
+        if viewonly and sync_backref:
+            raise sa_exc.ArgumentError(
+                "sync_backref and viewonly cannot both be True"
+            )
+        self.sync_backref = sync_backref
+        self.lazy = lazy
+        self.single_parent = single_parent
+        self.collection_class = collection_class
+        self.passive_deletes = passive_deletes
+
+        if cascade_backrefs:
+            raise sa_exc.ArgumentError(
+                "The 'cascade_backrefs' parameter passed to "
+                "relationship() may only be set to False."
+            )
+
+        self.passive_updates = passive_updates
+        self.enable_typechecks = enable_typechecks
+        self.query_class = query_class
+        self.innerjoin = innerjoin
+        self.distinct_target_key = distinct_target_key
+        self.doc = doc
+        self.active_history = active_history
+        self._legacy_inactive_history_style = _legacy_inactive_history_style
+
+        self.join_depth = join_depth
+        if omit_join:
+            util.warn(
+                "setting omit_join to True is not supported; selectin "
+                "loading of this relationship may not work correctly if this "
+                "flag is set explicitly.  omit_join optimization is "
+                "automatically detected for conditions under which it is "
+                "supported."
+            )
+
+        self.omit_join = omit_join
+        self.local_remote_pairs = _local_remote_pairs
+        self.load_on_pending = load_on_pending
+        self.comparator_factory = (
+            comparator_factory or RelationshipProperty.Comparator
+        )
+        util.set_creation_order(self)
+
+        if info is not None:
+            self.info.update(info)
+
+        self.strategy_key = (("lazy", self.lazy),)
+
+        self._reverse_property: Set[RelationshipProperty[Any]] = set()
+
+        if overlaps:
+            self._overlaps = set(re.split(r"\s*,\s*", overlaps))  # type: ignore  # noqa: E501
+        else:
+            self._overlaps = ()
+
+        # mypy ignoring the @property setter
+        self.cascade = cascade  # type: ignore
+
+        self.back_populates = back_populates
+
+        if self.back_populates:
+            if backref:
+                raise sa_exc.ArgumentError(
+                    "backref and back_populates keyword arguments "
+                    "are mutually exclusive"
+                )
+            self.backref = None
+        else:
+            self.backref = backref
+
+    def _warn_for_persistence_only_flags(self, **kw: Any) -> None:
+        for k, v in kw.items():
+            if v != self._persistence_only[k]:
+                # we are warning here rather than warn deprecated as this is a
+                # configuration mistake, and Python shows regular warnings more
+                # aggressively than deprecation warnings by default. Unlike the
+                # case of setting viewonly with cascade, the settings being
+                # warned about here are not actively doing the wrong thing
+                # against viewonly=True, so it is not as urgent to have these
+                # raise an error.
+                util.warn(
+                    "Setting %s on relationship() while also "
+                    "setting viewonly=True does not make sense, as a "
+                    "viewonly=True relationship does not perform persistence "
+                    "operations. This configuration may raise an error "
+                    "in a future release." % (k,)
+                )
+
+    def instrument_class(self, mapper: Mapper[Any]) -> None:
+        attributes.register_descriptor(
+            mapper.class_,
+            self.key,
+            comparator=self.comparator_factory(self, mapper),
+            parententity=mapper,
+            doc=self.doc,
+        )
+
+    class Comparator(util.MemoizedSlots, PropComparator[_PT]):
+        """Produce boolean, comparison, and other operators for
+        :class:`.RelationshipProperty` attributes.
+
+        See the documentation for :class:`.PropComparator` for a brief
+        overview of ORM level operator definition.
+
+        .. seealso::
+
+            :class:`.PropComparator`
+
+            :class:`.ColumnProperty.Comparator`
+
+            :class:`.ColumnOperators`
+
+            :ref:`types_operators`
+
+            :attr:`.TypeEngine.comparator_factory`
+
+        """
+
+        __slots__ = (
+            "entity",
+            "mapper",
+            "property",
+            "_of_type",
+            "_extra_criteria",
+        )
+
+        prop: RODescriptorReference[RelationshipProperty[_PT]]
+        _of_type: Optional[_EntityType[_PT]]
+
+        def __init__(
+            self,
+            prop: RelationshipProperty[_PT],
+            parentmapper: _InternalEntityType[Any],
+            adapt_to_entity: Optional[AliasedInsp[Any]] = None,
+            of_type: Optional[_EntityType[_PT]] = None,
+            extra_criteria: Tuple[ColumnElement[bool], ...] = (),
+        ):
+            """Construction of :class:`.RelationshipProperty.Comparator`
+            is internal to the ORM's attribute mechanics.
+
+            """
+            self.prop = prop
+            self._parententity = parentmapper
+            self._adapt_to_entity = adapt_to_entity
+            if of_type:
+                self._of_type = of_type
+            else:
+                self._of_type = None
+            self._extra_criteria = extra_criteria
+
+        def adapt_to_entity(
+            self, adapt_to_entity: AliasedInsp[Any]
+        ) -> RelationshipProperty.Comparator[Any]:
+            return self.__class__(
+                self.prop,
+                self._parententity,
+                adapt_to_entity=adapt_to_entity,
+                of_type=self._of_type,
+            )
+
+        entity: _InternalEntityType[_PT]
+        """The target entity referred to by this
+        :class:`.RelationshipProperty.Comparator`.
+
+        This is either a :class:`_orm.Mapper` or :class:`.AliasedInsp`
+        object.
+
+        This is the "target" or "remote" side of the
+        :func:`_orm.relationship`.
+
+        """
+
+        mapper: Mapper[_PT]
+        """The target :class:`_orm.Mapper` referred to by this
+        :class:`.RelationshipProperty.Comparator`.
+
+        This is the "target" or "remote" side of the
+        :func:`_orm.relationship`.
+
+        """
+
+        def _memoized_attr_entity(self) -> _InternalEntityType[_PT]:
+            if self._of_type:
+                return inspect(self._of_type)  # type: ignore
+            else:
+                return self.prop.entity
+
+        def _memoized_attr_mapper(self) -> Mapper[_PT]:
+            return self.entity.mapper
+
+        def _source_selectable(self) -> FromClause:
+            if self._adapt_to_entity:
+                return self._adapt_to_entity.selectable
+            else:
+                return self.property.parent._with_polymorphic_selectable
+
+        def __clause_element__(self) -> ColumnElement[bool]:
+            adapt_from = self._source_selectable()
+            if self._of_type:
+                of_type_entity = inspect(self._of_type)
+            else:
+                of_type_entity = None
+
+            (
+                pj,
+                sj,
+                source,
+                dest,
+                secondary,
+                target_adapter,
+            ) = self.prop._create_joins(
+                source_selectable=adapt_from,
+                source_polymorphic=True,
+                of_type_entity=of_type_entity,
+                alias_secondary=True,
+                extra_criteria=self._extra_criteria,
+            )
+            if sj is not None:
+                return pj & sj
+            else:
+                return pj
+
+        def of_type(self, class_: _EntityType[Any]) -> PropComparator[_PT]:
+            r"""Redefine this object in terms of a polymorphic subclass.
+
+            See :meth:`.PropComparator.of_type` for an example.
+
+
+            """
+            return RelationshipProperty.Comparator(
+                self.prop,
+                self._parententity,
+                adapt_to_entity=self._adapt_to_entity,
+                of_type=class_,
+                extra_criteria=self._extra_criteria,
+            )
+
+        def and_(
+            self, *criteria: _ColumnExpressionArgument[bool]
+        ) -> PropComparator[Any]:
+            """Add AND criteria.
+
+            See :meth:`.PropComparator.and_` for an example.
+
+            .. versionadded:: 1.4
+
+            """
+            exprs = tuple(
+                coercions.expect(roles.WhereHavingRole, clause)
+                for clause in util.coerce_generator_arg(criteria)
+            )
+
+            return RelationshipProperty.Comparator(
+                self.prop,
+                self._parententity,
+                adapt_to_entity=self._adapt_to_entity,
+                of_type=self._of_type,
+                extra_criteria=self._extra_criteria + exprs,
+            )
+
+        def in_(self, other: Any) -> NoReturn:
+            """Produce an IN clause - this is not implemented
+            for :func:`_orm.relationship`-based attributes at this time.
+
+            """
+            raise NotImplementedError(
+                "in_() not yet supported for "
+                "relationships.  For a simple "
+                "many-to-one, use in_() against "
+                "the set of foreign key values."
+            )
+
+        # https://github.com/python/mypy/issues/4266
+        __hash__ = None  # type: ignore
+
+        def __eq__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+            """Implement the ``==`` operator.
+
+            In a many-to-one context, such as:
+
+            .. sourcecode:: text
+
+              MyClass.some_prop == <some object>
+
+            this will typically produce a
+            clause such as:
+
+            .. sourcecode:: text
+
+              mytable.related_id == <some id>
+
+            Where ``<some id>`` is the primary key of the given
+            object.
+
+            The ``==`` operator provides partial functionality for non-
+            many-to-one comparisons:
+
+            * Comparisons against collections are not supported.
+              Use :meth:`~.Relationship.Comparator.contains`.
+            * Compared to a scalar one-to-many, will produce a
+              clause that compares the target columns in the parent to
+              the given target.
+            * Compared to a scalar many-to-many, an alias
+              of the association table will be rendered as
+              well, forming a natural join that is part of the
+              main body of the query. This will not work for
+              queries that go beyond simple AND conjunctions of
+              comparisons, such as those which use OR. Use
+              explicit joins, outerjoins, or
+              :meth:`~.Relationship.Comparator.has` for
+              more comprehensive non-many-to-one scalar
+              membership tests.
+            * Comparisons against ``None`` given in a one-to-many
+              or many-to-many context produce a NOT EXISTS clause.
+
+            """
+            if other is None or isinstance(other, expression.Null):
+                if self.property.direction in [ONETOMANY, MANYTOMANY]:
+                    return ~self._criterion_exists()
+                else:
+                    return _orm_annotate(
+                        self.property._optimized_compare(
+                            None, adapt_source=self.adapter
+                        )
+                    )
+            elif self.property.uselist:
+                raise sa_exc.InvalidRequestError(
+                    "Can't compare a collection to an object or collection; "
+                    "use contains() to test for membership."
+                )
+            else:
+                return _orm_annotate(
+                    self.property._optimized_compare(
+                        other, adapt_source=self.adapter
+                    )
+                )
+
+        def _criterion_exists(
+            self,
+            criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+            **kwargs: Any,
+        ) -> Exists:
+            where_criteria = (
+                coercions.expect(roles.WhereHavingRole, criterion)
+                if criterion is not None
+                else None
+            )
+
+            if getattr(self, "_of_type", None):
+                info: Optional[_InternalEntityType[Any]] = inspect(
+                    self._of_type
+                )
+                assert info is not None
+                target_mapper, to_selectable, is_aliased_class = (
+                    info.mapper,
+                    info.selectable,
+                    info.is_aliased_class,
+                )
+                if self.property._is_self_referential and not is_aliased_class:
+                    to_selectable = to_selectable._anonymous_fromclause()
+
+                single_crit = target_mapper._single_table_criterion
+                if single_crit is not None:
+                    if where_criteria is not None:
+                        where_criteria = single_crit & where_criteria
+                    else:
+                        where_criteria = single_crit
+            else:
+                is_aliased_class = False
+                to_selectable = None
+
+            if self.adapter:
+                source_selectable = self._source_selectable()
+            else:
+                source_selectable = None
+
+            (
+                pj,
+                sj,
+                source,
+                dest,
+                secondary,
+                target_adapter,
+            ) = self.property._create_joins(
+                dest_selectable=to_selectable,
+                source_selectable=source_selectable,
+            )
+
+            for k in kwargs:
+                crit = getattr(self.property.mapper.class_, k) == kwargs[k]
+                if where_criteria is None:
+                    where_criteria = crit
+                else:
+                    where_criteria = where_criteria & crit
+
+            # annotate the *local* side of the join condition, in the case
+            # of pj + sj this is the full primaryjoin, in the case of just
+            # pj its the local side of the primaryjoin.
+            if sj is not None:
+                j = _orm_annotate(pj) & sj
+            else:
+                j = _orm_annotate(pj, exclude=self.property.remote_side)
+
+            if (
+                where_criteria is not None
+                and target_adapter
+                and not is_aliased_class
+            ):
+                # limit this adapter to annotated only?
+                where_criteria = target_adapter.traverse(where_criteria)
+
+            # only have the "joined left side" of what we
+            # return be subject to Query adaption.  The right
+            # side of it is used for an exists() subquery and
+            # should not correlate or otherwise reach out
+            # to anything in the enclosing query.
+            if where_criteria is not None:
+                where_criteria = where_criteria._annotate(
+                    {"no_replacement_traverse": True}
+                )
+
+            crit = j & sql.True_._ifnone(where_criteria)
+
+            if secondary is not None:
+                ex = (
+                    sql.exists(1)
+                    .where(crit)
+                    .select_from(dest, secondary)
+                    .correlate_except(dest, secondary)
+                )
+            else:
+                ex = (
+                    sql.exists(1)
+                    .where(crit)
+                    .select_from(dest)
+                    .correlate_except(dest)
+                )
+            return ex
+
+        def any(
+            self,
+            criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+            **kwargs: Any,
+        ) -> ColumnElement[bool]:
+            """Produce an expression that tests a collection against
+            particular criterion, using EXISTS.
+
+            An expression like::
+
+                session.query(MyClass).filter(
+                    MyClass.somereference.any(SomeRelated.x == 2)
+                )
+
+            Will produce a query like:
+
+            .. sourcecode:: sql
+
+                SELECT * FROM my_table WHERE
+                EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
+                AND related.x=2)
+
+            Because :meth:`~.Relationship.Comparator.any` uses
+            a correlated subquery, its performance is not nearly as
+            good when compared against large target tables as that of
+            using a join.
+
+            :meth:`~.Relationship.Comparator.any` is particularly
+            useful for testing for empty collections::
+
+                session.query(MyClass).filter(~MyClass.somereference.any())
+
+            will produce:
+
+            .. sourcecode:: sql
+
+                SELECT * FROM my_table WHERE
+                NOT (EXISTS (SELECT 1 FROM related WHERE
+                related.my_id=my_table.id))
+
+            :meth:`~.Relationship.Comparator.any` is only
+            valid for collections, i.e. a :func:`_orm.relationship`
+            that has ``uselist=True``.  For scalar references,
+            use :meth:`~.Relationship.Comparator.has`.
+
+            """
+            if not self.property.uselist:
+                raise sa_exc.InvalidRequestError(
+                    "'any()' not implemented for scalar "
+                    "attributes. Use has()."
+                )
+
+            return self._criterion_exists(criterion, **kwargs)
+
+        def has(
+            self,
+            criterion: Optional[_ColumnExpressionArgument[bool]] = None,
+            **kwargs: Any,
+        ) -> ColumnElement[bool]:
+            """Produce an expression that tests a scalar reference against
+            particular criterion, using EXISTS.
+
+            An expression like::
+
+                session.query(MyClass).filter(
+                    MyClass.somereference.has(SomeRelated.x == 2)
+                )
+
+            Will produce a query like:
+
+            .. sourcecode:: sql
+
+                SELECT * FROM my_table WHERE
+                EXISTS (SELECT 1 FROM related WHERE
+                related.id==my_table.related_id AND related.x=2)
+
+            Because :meth:`~.Relationship.Comparator.has` uses
+            a correlated subquery, its performance is not nearly as
+            good when compared against large target tables as that of
+            using a join.
+
+            :meth:`~.Relationship.Comparator.has` is only
+            valid for scalar references, i.e. a :func:`_orm.relationship`
+            that has ``uselist=False``.  For collection references,
+            use :meth:`~.Relationship.Comparator.any`.
+
+            """
+            if self.property.uselist:
+                raise sa_exc.InvalidRequestError(
+                    "'has()' not implemented for collections. Use any()."
+                )
+            return self._criterion_exists(criterion, **kwargs)
+
+        def contains(
+            self, other: _ColumnExpressionArgument[Any], **kwargs: Any
+        ) -> ColumnElement[bool]:
+            """Return a simple expression that tests a collection for
+            containment of a particular item.
+
+            :meth:`~.Relationship.Comparator.contains` is
+            only valid for a collection, i.e. a
+            :func:`_orm.relationship` that implements
+            one-to-many or many-to-many with ``uselist=True``.
+
+            When used in a simple one-to-many context, an
+            expression like::
+
+                MyClass.contains(other)
+
+            Produces a clause like:
+
+            .. sourcecode:: sql
+
+                mytable.id == <some id>
+
+            Where ``<some id>`` is the value of the foreign key
+            attribute on ``other`` which refers to the primary
+            key of its parent object. From this it follows that
+            :meth:`~.Relationship.Comparator.contains` is
+            very useful when used with simple one-to-many
+            operations.
+
+            For many-to-many operations, the behavior of
+            :meth:`~.Relationship.Comparator.contains`
+            has more caveats. The association table will be
+            rendered in the statement, producing an "implicit"
+            join, that is, includes multiple tables in the FROM
+            clause which are equated in the WHERE clause::
+
+                query(MyClass).filter(MyClass.contains(other))
+
+            Produces a query like:
+
+            .. sourcecode:: sql
+
+                SELECT * FROM my_table, my_association_table AS
+                my_association_table_1 WHERE
+                my_table.id = my_association_table_1.parent_id
+                AND my_association_table_1.child_id = <some id>
+
+            Where ``<some id>`` would be the primary key of
+            ``other``. From the above, it is clear that
+            :meth:`~.Relationship.Comparator.contains`
+            will **not** work with many-to-many collections when
+            used in queries that move beyond simple AND
+            conjunctions, such as multiple
+            :meth:`~.Relationship.Comparator.contains`
+            expressions joined by OR. In such cases subqueries or
+            explicit "outer joins" will need to be used instead.
+            See :meth:`~.Relationship.Comparator.any` for
+            a less-performant alternative using EXISTS, or refer
+            to :meth:`_query.Query.outerjoin`
+            as well as :ref:`orm_queryguide_joins`
+            for more details on constructing outer joins.
+
+            kwargs may be ignored by this operator but are required for API
+            conformance.
+            """
+            if not self.prop.uselist:
+                raise sa_exc.InvalidRequestError(
+                    "'contains' not implemented for scalar "
+                    "attributes.  Use =="
+                )
+
+            clause = self.prop._optimized_compare(
+                other, adapt_source=self.adapter
+            )
+
+            if self.prop.secondaryjoin is not None:
+                clause.negation_clause = self.__negated_contains_or_equals(
+                    other
+                )
+
+            return clause
+
+        def __negated_contains_or_equals(
+            self, other: Any
+        ) -> ColumnElement[bool]:
+            if self.prop.direction == MANYTOONE:
+                state = attributes.instance_state(other)
+
+                def state_bindparam(
+                    local_col: ColumnElement[Any],
+                    state: InstanceState[Any],
+                    remote_col: ColumnElement[Any],
+                ) -> BindParameter[Any]:
+                    dict_ = state.dict
+                    return sql.bindparam(
+                        local_col.key,
+                        type_=local_col.type,
+                        unique=True,
+                        callable_=self.prop._get_attr_w_warn_on_none(
+                            self.prop.mapper, state, dict_, remote_col
+                        ),
+                    )
+
+                def adapt(col: _CE) -> _CE:
+                    if self.adapter:
+                        return self.adapter(col)
+                    else:
+                        return col
+
+                if self.property._use_get:
+                    return sql.and_(
+                        *[
+                            sql.or_(
+                                adapt(x)
+                                != state_bindparam(adapt(x), state, y),
+                                adapt(x) == None,
+                            )
+                            for (x, y) in self.property.local_remote_pairs
+                        ]
+                    )
+
+            criterion = sql.and_(
+                *[
+                    x == y
+                    for (x, y) in zip(
+                        self.property.mapper.primary_key,
+                        self.property.mapper.primary_key_from_instance(other),
+                    )
+                ]
+            )
+
+            return ~self._criterion_exists(criterion)
+
+        def __ne__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+            """Implement the ``!=`` operator.
+
+            In a many-to-one context, such as:
+
+            .. sourcecode:: text
+
+              MyClass.some_prop != <some object>
+
+            This will typically produce a clause such as:
+
+            .. sourcecode:: sql
+
+              mytable.related_id != <some id>
+
+            Where ``<some id>`` is the primary key of the
+            given object.
+
+            The ``!=`` operator provides partial functionality for non-
+            many-to-one comparisons:
+
+            * Comparisons against collections are not supported.
+              Use
+              :meth:`~.Relationship.Comparator.contains`
+              in conjunction with :func:`_expression.not_`.
+            * Compared to a scalar one-to-many, will produce a
+              clause that compares the target columns in the parent to
+              the given target.
+            * Compared to a scalar many-to-many, an alias
+              of the association table will be rendered as
+              well, forming a natural join that is part of the
+              main body of the query. This will not work for
+              queries that go beyond simple AND conjunctions of
+              comparisons, such as those which use OR. Use
+              explicit joins, outerjoins, or
+              :meth:`~.Relationship.Comparator.has` in
+              conjunction with :func:`_expression.not_` for
+              more comprehensive non-many-to-one scalar
+              membership tests.
+            * Comparisons against ``None`` given in a one-to-many
+              or many-to-many context produce an EXISTS clause.
+
+            """
+            if other is None or isinstance(other, expression.Null):
+                if self.property.direction == MANYTOONE:
+                    return _orm_annotate(
+                        ~self.property._optimized_compare(
+                            None, adapt_source=self.adapter
+                        )
+                    )
+
+                else:
+                    return self._criterion_exists()
+            elif self.property.uselist:
+                raise sa_exc.InvalidRequestError(
+                    "Can't compare a collection"
+                    " to an object or collection; use "
+                    "contains() to test for membership."
+                )
+            else:
+                return _orm_annotate(self.__negated_contains_or_equals(other))
+
+        def _memoized_attr_property(self) -> RelationshipProperty[_PT]:
+            self.prop.parent._check_configure()
+            return self.prop
+
+    def _with_parent(
+        self,
+        instance: object,
+        alias_secondary: bool = True,
+        from_entity: Optional[_EntityType[Any]] = None,
+    ) -> ColumnElement[bool]:
+        assert instance is not None
+        adapt_source: Optional[_CoreAdapterProto] = None
+        if from_entity is not None:
+            insp: Optional[_InternalEntityType[Any]] = inspect(from_entity)
+            assert insp is not None
+            if insp_is_aliased_class(insp):
+                adapt_source = insp._adapter.adapt_clause
+        return self._optimized_compare(
+            instance,
+            value_is_parent=True,
+            adapt_source=adapt_source,
+            alias_secondary=alias_secondary,
+        )
+
+    def _optimized_compare(
+        self,
+        state: Any,
+        value_is_parent: bool = False,
+        adapt_source: Optional[_CoreAdapterProto] = None,
+        alias_secondary: bool = True,
+    ) -> ColumnElement[bool]:
+        if state is not None:
+            try:
+                state = inspect(state)
+            except sa_exc.NoInspectionAvailable:
+                state = None
+
+            if state is None or not getattr(state, "is_instance", False):
+                raise sa_exc.ArgumentError(
+                    "Mapped instance expected for relationship "
+                    "comparison to object.   Classes, queries and other "
+                    "SQL elements are not accepted in this context; for "
+                    "comparison with a subquery, "
+                    "use %s.has(**criteria)." % self
+                )
+        reverse_direction = not value_is_parent
+
+        if state is None:
+            return self._lazy_none_clause(
+                reverse_direction, adapt_source=adapt_source
+            )
+
+        if not reverse_direction:
+            criterion, bind_to_col = (
+                self._lazy_strategy._lazywhere,
+                self._lazy_strategy._bind_to_col,
+            )
+        else:
+            criterion, bind_to_col = (
+                self._lazy_strategy._rev_lazywhere,
+                self._lazy_strategy._rev_bind_to_col,
+            )
+
+        if reverse_direction:
+            mapper = self.mapper
+        else:
+            mapper = self.parent
+
+        dict_ = attributes.instance_dict(state.obj())
+
+        def visit_bindparam(bindparam: BindParameter[Any]) -> None:
+            if bindparam._identifying_key in bind_to_col:
+                bindparam.callable = self._get_attr_w_warn_on_none(
+                    mapper,
+                    state,
+                    dict_,
+                    bind_to_col[bindparam._identifying_key],
+                )
+
+        if self.secondary is not None and alias_secondary:
+            criterion = ClauseAdapter(
+                self.secondary._anonymous_fromclause()
+            ).traverse(criterion)
+
+        criterion = visitors.cloned_traverse(
+            criterion, {}, {"bindparam": visit_bindparam}
+        )
+
+        if adapt_source:
+            criterion = adapt_source(criterion)
+        return criterion
+
+    def _get_attr_w_warn_on_none(
+        self,
+        mapper: Mapper[Any],
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        column: ColumnElement[Any],
+    ) -> Callable[[], Any]:
+        """Create the callable that is used in a many-to-one expression.
+
+        E.g.::
+
+            u1 = s.query(User).get(5)
+
+            expr = Address.user == u1
+
+        Above, the SQL should be "address.user_id = 5". The callable
+        returned by this method produces the value "5" based on the identity
+        of ``u1``.
+
+        """
+
+        # in this callable, we're trying to thread the needle through
+        # a wide variety of scenarios, including:
+        #
+        # * the object hasn't been flushed yet and there's no value for
+        #   the attribute as of yet
+        #
+        # * the object hasn't been flushed yet but it has a user-defined
+        #   value
+        #
+        # * the object has a value but it's expired and not locally present
+        #
+        # * the object has a value but it's expired and not locally present,
+        #   and the object is also detached
+        #
+        # * The object hadn't been flushed yet, there was no value, but
+        #   later, the object has been expired and detached, and *now*
+        #   they're trying to evaluate it
+        #
+        # * the object had a value, but it was changed to a new value, and
+        #   then expired
+        #
+        # * the object had a value, but it was changed to a new value, and
+        #   then expired, then the object was detached
+        #
+        # * the object has a user-set value, but it's None and we don't do
+        #   the comparison correctly for that so warn
+        #
+
+        prop = mapper.get_property_by_column(column)
+
+        # by invoking this method, InstanceState will track the last known
+        # value for this key each time the attribute is to be expired.
+        # this feature was added explicitly for use in this method.
+        state._track_last_known_value(prop.key)
+
+        lkv_fixed = state._last_known_values
+
+        def _go() -> Any:
+            assert lkv_fixed is not None
+            last_known = to_return = lkv_fixed[prop.key]
+            existing_is_available = (
+                last_known is not LoaderCallableStatus.NO_VALUE
+            )
+
+            # we support that the value may have changed.  so here we
+            # try to get the most recent value including re-fetching.
+            # only if we can't get a value now due to detachment do we return
+            # the last known value
+            current_value = mapper._get_state_attr_by_column(
+                state,
+                dict_,
+                column,
+                passive=(
+                    PassiveFlag.PASSIVE_OFF
+                    if state.persistent
+                    else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK
+                ),
+            )
+
+            if current_value is LoaderCallableStatus.NEVER_SET:
+                if not existing_is_available:
+                    raise sa_exc.InvalidRequestError(
+                        "Can't resolve value for column %s on object "
+                        "%s; no value has been set for this column"
+                        % (column, state_str(state))
+                    )
+            elif current_value is LoaderCallableStatus.PASSIVE_NO_RESULT:
+                if not existing_is_available:
+                    raise sa_exc.InvalidRequestError(
+                        "Can't resolve value for column %s on object "
+                        "%s; the object is detached and the value was "
+                        "expired" % (column, state_str(state))
+                    )
+            else:
+                to_return = current_value
+            if to_return is None:
+                util.warn(
+                    "Got None for value of column %s; this is unsupported "
+                    "for a relationship comparison and will not "
+                    "currently produce an IS comparison "
+                    "(but may in a future release)" % column
+                )
+            return to_return
+
+        return _go
+
+    def _lazy_none_clause(
+        self,
+        reverse_direction: bool = False,
+        adapt_source: Optional[_CoreAdapterProto] = None,
+    ) -> ColumnElement[bool]:
+        if not reverse_direction:
+            criterion, bind_to_col = (
+                self._lazy_strategy._lazywhere,
+                self._lazy_strategy._bind_to_col,
+            )
+        else:
+            criterion, bind_to_col = (
+                self._lazy_strategy._rev_lazywhere,
+                self._lazy_strategy._rev_bind_to_col,
+            )
+
+        criterion = adapt_criterion_to_null(criterion, bind_to_col)
+
+        if adapt_source:
+            criterion = adapt_source(criterion)
+        return criterion
+
+    def __str__(self) -> str:
+        return str(self.parent.class_.__name__) + "." + self.key
+
+    def merge(
+        self,
+        session: Session,
+        source_state: InstanceState[Any],
+        source_dict: _InstanceDict,
+        dest_state: InstanceState[Any],
+        dest_dict: _InstanceDict,
+        load: bool,
+        _recursive: Dict[Any, object],
+        _resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
+    ) -> None:
+        if load:
+            for r in self._reverse_property:
+                if (source_state, r) in _recursive:
+                    return
+
+        if "merge" not in self._cascade:
+            return
+
+        if self.key not in source_dict:
+            return
+
+        if self.uselist:
+            impl = source_state.get_impl(self.key)
+
+            assert is_has_collection_adapter(impl)
+            instances_iterable = impl.get_collection(source_state, source_dict)
+
+            # if this is a CollectionAttributeImpl, then empty should
+            # be False, otherwise "self.key in source_dict" should not be
+            # True
+            assert not instances_iterable.empty if impl.collection else True
+
+            if load:
+                # for a full merge, pre-load the destination collection,
+                # so that individual _merge of each item pulls from identity
+                # map for those already present.
+                # also assumes CollectionAttributeImpl behavior of loading
+                # "old" list in any case
+                dest_state.get_impl(self.key).get(
+                    dest_state, dest_dict, passive=PassiveFlag.PASSIVE_MERGE
+                )
+
+            dest_list = []
+            for current in instances_iterable:
+                current_state = attributes.instance_state(current)
+                current_dict = attributes.instance_dict(current)
+                _recursive[(current_state, self)] = True
+                obj = session._merge(
+                    current_state,
+                    current_dict,
+                    load=load,
+                    _recursive=_recursive,
+                    _resolve_conflict_map=_resolve_conflict_map,
+                )
+                if obj is not None:
+                    dest_list.append(obj)
+
+            if not load:
+                coll = attributes.init_state_collection(
+                    dest_state, dest_dict, self.key
+                )
+                for c in dest_list:
+                    coll.append_without_event(c)
+            else:
+                dest_impl = dest_state.get_impl(self.key)
+                assert is_has_collection_adapter(dest_impl)
+                dest_impl.set(
+                    dest_state,
+                    dest_dict,
+                    dest_list,
+                    _adapt=False,
+                    passive=PassiveFlag.PASSIVE_MERGE,
+                )
+        else:
+            current = source_dict[self.key]
+            if current is not None:
+                current_state = attributes.instance_state(current)
+                current_dict = attributes.instance_dict(current)
+                _recursive[(current_state, self)] = True
+                obj = session._merge(
+                    current_state,
+                    current_dict,
+                    load=load,
+                    _recursive=_recursive,
+                    _resolve_conflict_map=_resolve_conflict_map,
+                )
+            else:
+                obj = None
+
+            if not load:
+                dest_dict[self.key] = obj
+            else:
+                dest_state.get_impl(self.key).set(
+                    dest_state, dest_dict, obj, None
+                )
+
+    def _value_as_iterable(
+        self,
+        state: InstanceState[_O],
+        dict_: _InstanceDict,
+        key: str,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> Sequence[Tuple[InstanceState[_O], _O]]:
+        """Return a list of tuples (state, obj) for the given
+        key.
+
+        returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
+        """
+
+        impl = state.manager[key].impl
+        x = impl.get(state, dict_, passive=passive)
+        if x is LoaderCallableStatus.PASSIVE_NO_RESULT or x is None:
+            return []
+        elif is_has_collection_adapter(impl):
+            return [
+                (attributes.instance_state(o), o)
+                for o in impl.get_collection(state, dict_, x, passive=passive)
+            ]
+        else:
+            return [(attributes.instance_state(x), x)]
+
+    def cascade_iterator(
+        self,
+        type_: str,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        visited_states: Set[InstanceState[Any]],
+        halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None,
+    ) -> Iterator[Tuple[Any, Mapper[Any], InstanceState[Any], _InstanceDict]]:
+        # assert type_ in self._cascade
+
+        # only actively lazy load on the 'delete' cascade
+        if type_ != "delete" or self.passive_deletes:
+            passive = PassiveFlag.PASSIVE_NO_INITIALIZE
+        else:
+            passive = PassiveFlag.PASSIVE_OFF | PassiveFlag.NO_RAISE
+
+        if type_ == "save-update":
+            tuples = state.manager[self.key].impl.get_all_pending(state, dict_)
+        else:
+            tuples = self._value_as_iterable(
+                state, dict_, self.key, passive=passive
+            )
+
+        skip_pending = (
+            type_ == "refresh-expire" and "delete-orphan" not in self._cascade
+        )
+
+        for instance_state, c in tuples:
+            if instance_state in visited_states:
+                continue
+
+            if c is None:
+                # would like to emit a warning here, but
+                # would not be consistent with collection.append(None)
+                # current behavior of silently skipping.
+                # see [ticket:2229]
+                continue
+
+            assert instance_state is not None
+            instance_dict = attributes.instance_dict(c)
+
+            if halt_on and halt_on(instance_state):
+                continue
+
+            if skip_pending and not instance_state.key:
+                continue
+
+            instance_mapper = instance_state.manager.mapper
+
+            if not instance_mapper.isa(self.mapper.class_manager.mapper):
+                raise AssertionError(
+                    "Attribute '%s' on class '%s' "
+                    "doesn't handle objects "
+                    "of type '%s'"
+                    % (self.key, self.parent.class_, c.__class__)
+                )
+
+            visited_states.add(instance_state)
+
+            yield c, instance_mapper, instance_state, instance_dict
+
+    @property
+    def _effective_sync_backref(self) -> bool:
+        if self.viewonly:
+            return False
+        else:
+            return self.sync_backref is not False
+
+    @staticmethod
+    def _check_sync_backref(
+        rel_a: RelationshipProperty[Any], rel_b: RelationshipProperty[Any]
+    ) -> None:
+        if rel_a.viewonly and rel_b.sync_backref:
+            raise sa_exc.InvalidRequestError(
+                "Relationship %s cannot specify sync_backref=True since %s "
+                "includes viewonly=True." % (rel_b, rel_a)
+            )
+        if (
+            rel_a.viewonly
+            and not rel_b.viewonly
+            and rel_b.sync_backref is not False
+        ):
+            rel_b.sync_backref = False
+
+    def _add_reverse_property(self, key: str) -> None:
+        other = self.mapper.get_property(key, _configure_mappers=False)
+        if not isinstance(other, RelationshipProperty):
+            raise sa_exc.InvalidRequestError(
+                "back_populates on relationship '%s' refers to attribute '%s' "
+                "that is not a relationship.  The back_populates parameter "
+                "should refer to the name of a relationship on the target "
+                "class." % (self, other)
+            )
+        # viewonly and sync_backref cases
+        # 1. self.viewonly==True and other.sync_backref==True -> error
+        # 2. self.viewonly==True and other.viewonly==False and
+        #    other.sync_backref==None -> warn sync_backref=False, set to False
+        self._check_sync_backref(self, other)
+        # 3. other.viewonly==True and self.sync_backref==True -> error
+        # 4. other.viewonly==True and self.viewonly==False and
+        #    self.sync_backref==None -> warn sync_backref=False, set to False
+        self._check_sync_backref(other, self)
+
+        self._reverse_property.add(other)
+        other._reverse_property.add(self)
+
+        other._setup_entity()
+
+        if not other.mapper.common_parent(self.parent):
+            raise sa_exc.ArgumentError(
+                "reverse_property %r on "
+                "relationship %s references relationship %s, which "
+                "does not reference mapper %s"
+                % (key, self, other, self.parent)
+            )
+
+        if (
+            other._configure_started
+            and self.direction in (ONETOMANY, MANYTOONE)
+            and self.direction == other.direction
+        ):
+            raise sa_exc.ArgumentError(
+                "%s and back-reference %s are "
+                "both of the same direction %r.  Did you mean to "
+                "set remote_side on the many-to-one side ?"
+                % (other, self, self.direction)
+            )
+
+    @util.memoized_property
+    def entity(self) -> _InternalEntityType[_T]:
+        """Return the target mapped entity, which is an inspect() of the
+        class or aliased class that is referenced by this
+        :class:`.RelationshipProperty`.
+
+        """
+        self.parent._check_configure()
+        return self.entity
+
+    @util.memoized_property
+    def mapper(self) -> Mapper[_T]:
+        """Return the targeted :class:`_orm.Mapper` for this
+        :class:`.RelationshipProperty`.
+
+        """
+        return self.entity.mapper
+
+    def do_init(self) -> None:
+        self._check_conflicts()
+        self._process_dependent_arguments()
+        self._setup_entity()
+        self._setup_registry_dependencies()
+        self._setup_join_conditions()
+        self._check_cascade_settings(self._cascade)
+        self._post_init()
+        self._generate_backref()
+        self._join_condition._warn_for_conflicting_sync_targets()
+        super().do_init()
+        self._lazy_strategy = cast(
+            "LazyLoader", self._get_strategy((("lazy", "select"),))
+        )
+
+    def _setup_registry_dependencies(self) -> None:
+        self.parent.mapper.registry._set_depends_on(
+            self.entity.mapper.registry
+        )
+
+    def _process_dependent_arguments(self) -> None:
+        """Convert incoming configuration arguments to their
+        proper form.
+
+        Callables are resolved, ORM annotations removed.
+
+        """
+
+        # accept callables for other attributes which may require
+        # deferred initialization.  This technique is used
+        # by declarative "string configs" and some recipes.
+        init_args = self._init_args
+
+        for attr in (
+            "order_by",
+            "primaryjoin",
+            "secondaryjoin",
+            "secondary",
+            "foreign_keys",
+            "remote_side",
+        ):
+            rel_arg = getattr(init_args, attr)
+
+            rel_arg._resolve_against_registry(self._clsregistry_resolvers[1])
+
+        # remove "annotations" which are present if mapped class
+        # descriptors are used to create the join expression.
+        for attr in "primaryjoin", "secondaryjoin":
+            rel_arg = getattr(init_args, attr)
+            val = rel_arg.resolved
+            if val is not None:
+                rel_arg.resolved = _orm_deannotate(
+                    coercions.expect(
+                        roles.ColumnArgumentRole, val, argname=attr
+                    )
+                )
+
+        secondary = init_args.secondary.resolved
+        if secondary is not None and _is_mapped_class(secondary):
+            raise sa_exc.ArgumentError(
+                "secondary argument %s passed to to relationship() %s must "
+                "be a Table object or other FROM clause; can't send a mapped "
+                "class directly as rows in 'secondary' are persisted "
+                "independently of a class that is mapped "
+                "to that same table." % (secondary, self)
+            )
+
+        # ensure expressions in self.order_by, foreign_keys,
+        # remote_side are all columns, not strings.
+        if (
+            init_args.order_by.resolved is not False
+            and init_args.order_by.resolved is not None
+        ):
+            self.order_by = tuple(
+                coercions.expect(
+                    roles.ColumnArgumentRole, x, argname="order_by"
+                )
+                for x in util.to_list(init_args.order_by.resolved)
+            )
+        else:
+            self.order_by = False
+
+        self._user_defined_foreign_keys = util.column_set(
+            coercions.expect(
+                roles.ColumnArgumentRole, x, argname="foreign_keys"
+            )
+            for x in util.to_column_set(init_args.foreign_keys.resolved)
+        )
+
+        self.remote_side = util.column_set(
+            coercions.expect(
+                roles.ColumnArgumentRole, x, argname="remote_side"
+            )
+            for x in util.to_column_set(init_args.remote_side.resolved)
+        )
+
+    def declarative_scan(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        registry: _RegistryType,
+        cls: Type[Any],
+        originating_module: Optional[str],
+        key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
+        annotation: Optional[_AnnotationScanType],
+        extracted_mapped_annotation: Optional[_AnnotationScanType],
+        is_dataclass_field: bool,
+    ) -> None:
+        argument = extracted_mapped_annotation
+
+        if extracted_mapped_annotation is None:
+            if self.argument is None:
+                self._raise_for_required(key, cls)
+            else:
+                return
+
+        argument = extracted_mapped_annotation
+        assert originating_module is not None
+
+        if mapped_container is not None:
+            is_write_only = issubclass(mapped_container, WriteOnlyMapped)
+            is_dynamic = issubclass(mapped_container, DynamicMapped)
+            if is_write_only:
+                self.lazy = "write_only"
+                self.strategy_key = (("lazy", self.lazy),)
+            elif is_dynamic:
+                self.lazy = "dynamic"
+                self.strategy_key = (("lazy", self.lazy),)
+        else:
+            is_write_only = is_dynamic = False
+
+        argument = de_optionalize_union_types(argument)
+
+        if hasattr(argument, "__origin__"):
+            arg_origin = argument.__origin__
+            if isinstance(arg_origin, type) and issubclass(
+                arg_origin, abc.Collection
+            ):
+                if self.collection_class is None:
+                    if _py_inspect.isabstract(arg_origin):
+                        raise sa_exc.ArgumentError(
+                            f"Collection annotation type {arg_origin} cannot "
+                            "be instantiated; please provide an explicit "
+                            "'collection_class' parameter "
+                            "(e.g. list, set, etc.) to the "
+                            "relationship() function to accompany this "
+                            "annotation"
+                        )
+
+                    self.collection_class = arg_origin
+
+            elif not is_write_only and not is_dynamic:
+                self.uselist = False
+
+            if argument.__args__:  # type: ignore
+                if isinstance(arg_origin, type) and issubclass(
+                    arg_origin, typing.Mapping
+                ):
+                    type_arg = argument.__args__[-1]  # type: ignore
+                else:
+                    type_arg = argument.__args__[0]  # type: ignore
+                if hasattr(type_arg, "__forward_arg__"):
+                    str_argument = type_arg.__forward_arg__
+
+                    argument = resolve_name_to_real_class_name(
+                        str_argument, originating_module
+                    )
+                else:
+                    argument = type_arg
+            else:
+                raise sa_exc.ArgumentError(
+                    f"Generic alias {argument} requires an argument"
+                )
+        elif hasattr(argument, "__forward_arg__"):
+            argument = argument.__forward_arg__
+
+            argument = resolve_name_to_real_class_name(
+                argument, originating_module
+            )
+
+        if (
+            self.collection_class is None
+            and not is_write_only
+            and not is_dynamic
+        ):
+            self.uselist = False
+
+        # ticket #8759
+        # if a lead argument was given to relationship(), like
+        # `relationship("B")`, use that, don't replace it with class we
+        # found in the annotation.  The declarative_scan() method call here is
+        # still useful, as we continue to derive collection type and do
+        # checking of the annotation in any case.
+        if self.argument is None:
+            self.argument = cast("_RelationshipArgumentType[_T]", argument)
+
+    @util.preload_module("sqlalchemy.orm.mapper")
+    def _setup_entity(self, __argument: Any = None) -> None:
+        if "entity" in self.__dict__:
+            return
+
+        mapperlib = util.preloaded.orm_mapper
+
+        if __argument:
+            argument = __argument
+        else:
+            argument = self.argument
+
+        resolved_argument: _ExternalEntityType[Any]
+
+        if isinstance(argument, str):
+            # we might want to cleanup clsregistry API to make this
+            # more straightforward
+            resolved_argument = cast(
+                "_ExternalEntityType[Any]",
+                self._clsregistry_resolve_name(argument)(),
+            )
+        elif callable(argument) and not isinstance(
+            argument, (type, mapperlib.Mapper)
+        ):
+            resolved_argument = argument()
+        else:
+            resolved_argument = argument
+
+        entity: _InternalEntityType[Any]
+
+        if isinstance(resolved_argument, type):
+            entity = class_mapper(resolved_argument, configure=False)
+        else:
+            try:
+                entity = inspect(resolved_argument)
+            except sa_exc.NoInspectionAvailable:
+                entity = None  # type: ignore
+
+            if not hasattr(entity, "mapper"):
+                raise sa_exc.ArgumentError(
+                    "relationship '%s' expects "
+                    "a class or a mapper argument (received: %s)"
+                    % (self.key, type(resolved_argument))
+                )
+
+        self.entity = entity
+        self.target = self.entity.persist_selectable
+
+    def _setup_join_conditions(self) -> None:
+        self._join_condition = jc = JoinCondition(
+            parent_persist_selectable=self.parent.persist_selectable,
+            child_persist_selectable=self.entity.persist_selectable,
+            parent_local_selectable=self.parent.local_table,
+            child_local_selectable=self.entity.local_table,
+            primaryjoin=self._init_args.primaryjoin.resolved,
+            secondary=self._init_args.secondary.resolved,
+            secondaryjoin=self._init_args.secondaryjoin.resolved,
+            parent_equivalents=self.parent._equivalent_columns,
+            child_equivalents=self.mapper._equivalent_columns,
+            consider_as_foreign_keys=self._user_defined_foreign_keys,
+            local_remote_pairs=self.local_remote_pairs,
+            remote_side=self.remote_side,
+            self_referential=self._is_self_referential,
+            prop=self,
+            support_sync=not self.viewonly,
+            can_be_synced_fn=self._columns_are_mapped,
+        )
+        self.primaryjoin = jc.primaryjoin
+        self.secondaryjoin = jc.secondaryjoin
+        self.secondary = jc.secondary
+        self.direction = jc.direction
+        self.local_remote_pairs = jc.local_remote_pairs
+        self.remote_side = jc.remote_columns
+        self.local_columns = jc.local_columns
+        self.synchronize_pairs = jc.synchronize_pairs
+        self._calculated_foreign_keys = jc.foreign_key_columns
+        self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
+
+    @property
+    def _clsregistry_resolve_arg(
+        self,
+    ) -> Callable[[str, bool], _class_resolver]:
+        return self._clsregistry_resolvers[1]
+
+    @property
+    def _clsregistry_resolve_name(
+        self,
+    ) -> Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]]:
+        return self._clsregistry_resolvers[0]
+
+    @util.memoized_property
+    @util.preload_module("sqlalchemy.orm.clsregistry")
+    def _clsregistry_resolvers(
+        self,
+    ) -> Tuple[
+        Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]],
+        Callable[[str, bool], _class_resolver],
+    ]:
+        _resolver = util.preloaded.orm_clsregistry._resolver
+
+        return _resolver(self.parent.class_, self)
+
+    def _check_conflicts(self) -> None:
+        """Test that this relationship is legal, warn about
+        inheritance conflicts."""
+        if self.parent.non_primary and not class_mapper(
+            self.parent.class_, configure=False
+        ).has_property(self.key):
+            raise sa_exc.ArgumentError(
+                "Attempting to assign a new "
+                "relationship '%s' to a non-primary mapper on "
+                "class '%s'.  New relationships can only be added "
+                "to the primary mapper, i.e. the very first mapper "
+                "created for class '%s' "
+                % (
+                    self.key,
+                    self.parent.class_.__name__,
+                    self.parent.class_.__name__,
+                )
+            )
+
+    @property
+    def cascade(self) -> CascadeOptions:
+        """Return the current cascade setting for this
+        :class:`.RelationshipProperty`.
+        """
+        return self._cascade
+
+    @cascade.setter
+    def cascade(self, cascade: Union[str, CascadeOptions]) -> None:
+        self._set_cascade(cascade)
+
+    def _set_cascade(self, cascade_arg: Union[str, CascadeOptions]) -> None:
+        cascade = CascadeOptions(cascade_arg)
+
+        if self.viewonly:
+            cascade = CascadeOptions(
+                cascade.intersection(CascadeOptions._viewonly_cascades)
+            )
+
+        if "mapper" in self.__dict__:
+            self._check_cascade_settings(cascade)
+        self._cascade = cascade
+
+        if self._dependency_processor:
+            self._dependency_processor.cascade = cascade
+
+    def _check_cascade_settings(self, cascade: CascadeOptions) -> None:
+        if (
+            cascade.delete_orphan
+            and not self.single_parent
+            and (self.direction is MANYTOMANY or self.direction is MANYTOONE)
+        ):
+            raise sa_exc.ArgumentError(
+                "For %(direction)s relationship %(rel)s, delete-orphan "
+                "cascade is normally "
+                'configured only on the "one" side of a one-to-many '
+                "relationship, "
+                'and not on the "many" side of a many-to-one or many-to-many '
+                "relationship.  "
+                "To force this relationship to allow a particular "
+                '"%(relatedcls)s" object to be referenced by only '
+                'a single "%(clsname)s" object at a time via the '
+                "%(rel)s relationship, which "
+                "would allow "
+                "delete-orphan cascade to take place in this direction, set "
+                "the single_parent=True flag."
+                % {
+                    "rel": self,
+                    "direction": (
+                        "many-to-one"
+                        if self.direction is MANYTOONE
+                        else "many-to-many"
+                    ),
+                    "clsname": self.parent.class_.__name__,
+                    "relatedcls": self.mapper.class_.__name__,
+                },
+                code="bbf0",
+            )
+
+        if self.passive_deletes == "all" and (
+            "delete" in cascade or "delete-orphan" in cascade
+        ):
+            raise sa_exc.ArgumentError(
+                "On %s, can't set passive_deletes='all' in conjunction "
+                "with 'delete' or 'delete-orphan' cascade" % self
+            )
+
+        if cascade.delete_orphan:
+            self.mapper.primary_mapper()._delete_orphans.append(
+                (self.key, self.parent.class_)
+            )
+
+    def _persists_for(self, mapper: Mapper[Any]) -> bool:
+        """Return True if this property will persist values on behalf
+        of the given mapper.
+
+        """
+
+        return (
+            self.key in mapper.relationships
+            and mapper.relationships[self.key] is self
+        )
+
+    def _columns_are_mapped(self, *cols: ColumnElement[Any]) -> bool:
+        """Return True if all columns in the given collection are
+        mapped by the tables referenced by this :class:`.RelationshipProperty`.
+
+        """
+
+        secondary = self._init_args.secondary.resolved
+        for c in cols:
+            if secondary is not None and secondary.c.contains_column(c):
+                continue
+            if not self.parent.persist_selectable.c.contains_column(
+                c
+            ) and not self.target.c.contains_column(c):
+                return False
+        return True
+
+    def _generate_backref(self) -> None:
+        """Interpret the 'backref' instruction to create a
+        :func:`_orm.relationship` complementary to this one."""
+
+        if self.parent.non_primary:
+            return
+        if self.backref is not None and not self.back_populates:
+            kwargs: Dict[str, Any]
+            if isinstance(self.backref, str):
+                backref_key, kwargs = self.backref, {}
+            else:
+                backref_key, kwargs = self.backref
+            mapper = self.mapper.primary_mapper()
+
+            if not mapper.concrete:
+                check = set(mapper.iterate_to_root()).union(
+                    mapper.self_and_descendants
+                )
+                for m in check:
+                    if m.has_property(backref_key) and not m.concrete:
+                        raise sa_exc.ArgumentError(
+                            "Error creating backref "
+                            "'%s' on relationship '%s': property of that "
+                            "name exists on mapper '%s'"
+                            % (backref_key, self, m)
+                        )
+
+            # determine primaryjoin/secondaryjoin for the
+            # backref.  Use the one we had, so that
+            # a custom join doesn't have to be specified in
+            # both directions.
+            if self.secondary is not None:
+                # for many to many, just switch primaryjoin/
+                # secondaryjoin.   use the annotated
+                # pj/sj on the _join_condition.
+                pj = kwargs.pop(
+                    "primaryjoin",
+                    self._join_condition.secondaryjoin_minus_local,
+                )
+                sj = kwargs.pop(
+                    "secondaryjoin",
+                    self._join_condition.primaryjoin_minus_local,
+                )
+            else:
+                pj = kwargs.pop(
+                    "primaryjoin",
+                    self._join_condition.primaryjoin_reverse_remote,
+                )
+                sj = kwargs.pop("secondaryjoin", None)
+                if sj:
+                    raise sa_exc.InvalidRequestError(
+                        "Can't assign 'secondaryjoin' on a backref "
+                        "against a non-secondary relationship."
+                    )
+
+            foreign_keys = kwargs.pop(
+                "foreign_keys", self._user_defined_foreign_keys
+            )
+            parent = self.parent.primary_mapper()
+            kwargs.setdefault("viewonly", self.viewonly)
+            kwargs.setdefault("post_update", self.post_update)
+            kwargs.setdefault("passive_updates", self.passive_updates)
+            kwargs.setdefault("sync_backref", self.sync_backref)
+            self.back_populates = backref_key
+            relationship = RelationshipProperty(
+                parent,
+                self.secondary,
+                primaryjoin=pj,
+                secondaryjoin=sj,
+                foreign_keys=foreign_keys,
+                back_populates=self.key,
+                **kwargs,
+            )
+            mapper._configure_property(
+                backref_key, relationship, warn_for_existing=True
+            )
+
+        if self.back_populates:
+            self._add_reverse_property(self.back_populates)
+
+    @util.preload_module("sqlalchemy.orm.dependency")
+    def _post_init(self) -> None:
+        dependency = util.preloaded.orm_dependency
+
+        if self.uselist is None:
+            self.uselist = self.direction is not MANYTOONE
+        if not self.viewonly:
+            self._dependency_processor = (  # type: ignore
+                dependency.DependencyProcessor.from_relationship
+            )(self)
+
+    @util.memoized_property
+    def _use_get(self) -> bool:
+        """memoize the 'use_get' attribute of this RelationshipLoader's
+        lazyloader."""
+
+        strategy = self._lazy_strategy
+        return strategy.use_get
+
+    @util.memoized_property
+    def _is_self_referential(self) -> bool:
+        return self.mapper.common_parent(self.parent)
+
+    def _create_joins(
+        self,
+        source_polymorphic: bool = False,
+        source_selectable: Optional[FromClause] = None,
+        dest_selectable: Optional[FromClause] = None,
+        of_type_entity: Optional[_InternalEntityType[Any]] = None,
+        alias_secondary: bool = False,
+        extra_criteria: Tuple[ColumnElement[bool], ...] = (),
+    ) -> Tuple[
+        ColumnElement[bool],
+        Optional[ColumnElement[bool]],
+        FromClause,
+        FromClause,
+        Optional[FromClause],
+        Optional[ClauseAdapter],
+    ]:
+        aliased = False
+
+        if alias_secondary and self.secondary is not None:
+            aliased = True
+
+        if source_selectable is None:
+            if source_polymorphic and self.parent.with_polymorphic:
+                source_selectable = self.parent._with_polymorphic_selectable
+
+        if of_type_entity:
+            dest_mapper = of_type_entity.mapper
+            if dest_selectable is None:
+                dest_selectable = of_type_entity.selectable
+                aliased = True
+        else:
+            dest_mapper = self.mapper
+
+        if dest_selectable is None:
+            dest_selectable = self.entity.selectable
+            if self.mapper.with_polymorphic:
+                aliased = True
+
+            if self._is_self_referential and source_selectable is None:
+                dest_selectable = dest_selectable._anonymous_fromclause()
+                aliased = True
+        elif (
+            dest_selectable is not self.mapper._with_polymorphic_selectable
+            or self.mapper.with_polymorphic
+        ):
+            aliased = True
+
+        single_crit = dest_mapper._single_table_criterion
+        aliased = aliased or (
+            source_selectable is not None
+            and (
+                source_selectable
+                is not self.parent._with_polymorphic_selectable
+                or source_selectable._is_subquery
+            )
+        )
+
+        (
+            primaryjoin,
+            secondaryjoin,
+            secondary,
+            target_adapter,
+            dest_selectable,
+        ) = self._join_condition.join_targets(
+            source_selectable,
+            dest_selectable,
+            aliased,
+            single_crit,
+            extra_criteria,
+        )
+        if source_selectable is None:
+            source_selectable = self.parent.local_table
+        if dest_selectable is None:
+            dest_selectable = self.entity.local_table
+        return (
+            primaryjoin,
+            secondaryjoin,
+            source_selectable,
+            dest_selectable,
+            secondary,
+            target_adapter,
+        )
+
+
+def _annotate_columns(element: _CE, annotations: _AnnotationDict) -> _CE:
+    def clone(elem: _CE) -> _CE:
+        if isinstance(elem, expression.ColumnClause):
+            elem = elem._annotate(annotations.copy())  # type: ignore
+        elem._copy_internals(clone=clone)
+        return elem
+
+    if element is not None:
+        element = clone(element)
+    clone = None  # type: ignore # remove gc cycles
+    return element
+
+
+class JoinCondition:
+    primaryjoin_initial: Optional[ColumnElement[bool]]
+    primaryjoin: ColumnElement[bool]
+    secondaryjoin: Optional[ColumnElement[bool]]
+    secondary: Optional[FromClause]
+    prop: RelationshipProperty[Any]
+
+    synchronize_pairs: _ColumnPairs
+    secondary_synchronize_pairs: _ColumnPairs
+    direction: RelationshipDirection
+
+    parent_persist_selectable: FromClause
+    child_persist_selectable: FromClause
+    parent_local_selectable: FromClause
+    child_local_selectable: FromClause
+
+    _local_remote_pairs: Optional[_ColumnPairs]
+
+    def __init__(
+        self,
+        parent_persist_selectable: FromClause,
+        child_persist_selectable: FromClause,
+        parent_local_selectable: FromClause,
+        child_local_selectable: FromClause,
+        *,
+        primaryjoin: Optional[ColumnElement[bool]] = None,
+        secondary: Optional[FromClause] = None,
+        secondaryjoin: Optional[ColumnElement[bool]] = None,
+        parent_equivalents: Optional[_EquivalentColumnMap] = None,
+        child_equivalents: Optional[_EquivalentColumnMap] = None,
+        consider_as_foreign_keys: Any = None,
+        local_remote_pairs: Optional[_ColumnPairs] = None,
+        remote_side: Any = None,
+        self_referential: Any = False,
+        prop: RelationshipProperty[Any],
+        support_sync: bool = True,
+        can_be_synced_fn: Callable[..., bool] = lambda *c: True,
+    ):
+        self.parent_persist_selectable = parent_persist_selectable
+        self.parent_local_selectable = parent_local_selectable
+        self.child_persist_selectable = child_persist_selectable
+        self.child_local_selectable = child_local_selectable
+        self.parent_equivalents = parent_equivalents
+        self.child_equivalents = child_equivalents
+        self.primaryjoin_initial = primaryjoin
+        self.secondaryjoin = secondaryjoin
+        self.secondary = secondary
+        self.consider_as_foreign_keys = consider_as_foreign_keys
+        self._local_remote_pairs = local_remote_pairs
+        self._remote_side = remote_side
+        self.prop = prop
+        self.self_referential = self_referential
+        self.support_sync = support_sync
+        self.can_be_synced_fn = can_be_synced_fn
+
+        self._determine_joins()
+        assert self.primaryjoin is not None
+
+        self._sanitize_joins()
+        self._annotate_fks()
+        self._annotate_remote()
+        self._annotate_local()
+        self._annotate_parentmapper()
+        self._setup_pairs()
+        self._check_foreign_cols(self.primaryjoin, True)
+        if self.secondaryjoin is not None:
+            self._check_foreign_cols(self.secondaryjoin, False)
+        self._determine_direction()
+        self._check_remote_side()
+        self._log_joins()
+
+    def _log_joins(self) -> None:
+        log = self.prop.logger
+        log.info("%s setup primary join %s", self.prop, self.primaryjoin)
+        log.info("%s setup secondary join %s", self.prop, self.secondaryjoin)
+        log.info(
+            "%s synchronize pairs [%s]",
+            self.prop,
+            ",".join(
+                "(%s => %s)" % (l, r) for (l, r) in self.synchronize_pairs
+            ),
+        )
+        log.info(
+            "%s secondary synchronize pairs [%s]",
+            self.prop,
+            ",".join(
+                "(%s => %s)" % (l, r)
+                for (l, r) in self.secondary_synchronize_pairs or []
+            ),
+        )
+        log.info(
+            "%s local/remote pairs [%s]",
+            self.prop,
+            ",".join(
+                "(%s / %s)" % (l, r) for (l, r) in self.local_remote_pairs
+            ),
+        )
+        log.info(
+            "%s remote columns [%s]",
+            self.prop,
+            ",".join("%s" % col for col in self.remote_columns),
+        )
+        log.info(
+            "%s local columns [%s]",
+            self.prop,
+            ",".join("%s" % col for col in self.local_columns),
+        )
+        log.info("%s relationship direction %s", self.prop, self.direction)
+
+    def _sanitize_joins(self) -> None:
+        """remove the parententity annotation from our join conditions which
+        can leak in here based on some declarative patterns and maybe others.
+
+        "parentmapper" is relied upon both by the ORM evaluator as well as
+        the use case in _join_fixture_inh_selfref_w_entity
+        that relies upon it being present, see :ticket:`3364`.
+
+        """
+
+        self.primaryjoin = _deep_deannotate(
+            self.primaryjoin, values=("parententity", "proxy_key")
+        )
+        if self.secondaryjoin is not None:
+            self.secondaryjoin = _deep_deannotate(
+                self.secondaryjoin, values=("parententity", "proxy_key")
+            )
+
+    def _determine_joins(self) -> None:
+        """Determine the 'primaryjoin' and 'secondaryjoin' attributes,
+        if not passed to the constructor already.
+
+        This is based on analysis of the foreign key relationships
+        between the parent and target mapped selectables.
+
+        """
+        if self.secondaryjoin is not None and self.secondary is None:
+            raise sa_exc.ArgumentError(
+                "Property %s specified with secondary "
+                "join condition but "
+                "no secondary argument" % self.prop
+            )
+
+        # find a join between the given mapper's mapped table and
+        # the given table. will try the mapper's local table first
+        # for more specificity, then if not found will try the more
+        # general mapped table, which in the case of inheritance is
+        # a join.
+        try:
+            consider_as_foreign_keys = self.consider_as_foreign_keys or None
+            if self.secondary is not None:
+                if self.secondaryjoin is None:
+                    self.secondaryjoin = join_condition(
+                        self.child_persist_selectable,
+                        self.secondary,
+                        a_subset=self.child_local_selectable,
+                        consider_as_foreign_keys=consider_as_foreign_keys,
+                    )
+                if self.primaryjoin_initial is None:
+                    self.primaryjoin = join_condition(
+                        self.parent_persist_selectable,
+                        self.secondary,
+                        a_subset=self.parent_local_selectable,
+                        consider_as_foreign_keys=consider_as_foreign_keys,
+                    )
+                else:
+                    self.primaryjoin = self.primaryjoin_initial
+            else:
+                if self.primaryjoin_initial is None:
+                    self.primaryjoin = join_condition(
+                        self.parent_persist_selectable,
+                        self.child_persist_selectable,
+                        a_subset=self.parent_local_selectable,
+                        consider_as_foreign_keys=consider_as_foreign_keys,
+                    )
+                else:
+                    self.primaryjoin = self.primaryjoin_initial
+        except sa_exc.NoForeignKeysError as nfe:
+            if self.secondary is not None:
+                raise sa_exc.NoForeignKeysError(
+                    "Could not determine join "
+                    "condition between parent/child tables on "
+                    "relationship %s - there are no foreign keys "
+                    "linking these tables via secondary table '%s'.  "
+                    "Ensure that referencing columns are associated "
+                    "with a ForeignKey or ForeignKeyConstraint, or "
+                    "specify 'primaryjoin' and 'secondaryjoin' "
+                    "expressions." % (self.prop, self.secondary)
+                ) from nfe
+            else:
+                raise sa_exc.NoForeignKeysError(
+                    "Could not determine join "
+                    "condition between parent/child tables on "
+                    "relationship %s - there are no foreign keys "
+                    "linking these tables.  "
+                    "Ensure that referencing columns are associated "
+                    "with a ForeignKey or ForeignKeyConstraint, or "
+                    "specify a 'primaryjoin' expression." % self.prop
+                ) from nfe
+        except sa_exc.AmbiguousForeignKeysError as afe:
+            if self.secondary is not None:
+                raise sa_exc.AmbiguousForeignKeysError(
+                    "Could not determine join "
+                    "condition between parent/child tables on "
+                    "relationship %s - there are multiple foreign key "
+                    "paths linking the tables via secondary table '%s'.  "
+                    "Specify the 'foreign_keys' "
+                    "argument, providing a list of those columns which "
+                    "should be counted as containing a foreign key "
+                    "reference from the secondary table to each of the "
+                    "parent and child tables." % (self.prop, self.secondary)
+                ) from afe
+            else:
+                raise sa_exc.AmbiguousForeignKeysError(
+                    "Could not determine join "
+                    "condition between parent/child tables on "
+                    "relationship %s - there are multiple foreign key "
+                    "paths linking the tables.  Specify the "
+                    "'foreign_keys' argument, providing a list of those "
+                    "columns which should be counted as containing a "
+                    "foreign key reference to the parent table." % self.prop
+                ) from afe
+
+    @property
+    def primaryjoin_minus_local(self) -> ColumnElement[bool]:
+        return _deep_deannotate(self.primaryjoin, values=("local", "remote"))
+
+    @property
+    def secondaryjoin_minus_local(self) -> ColumnElement[bool]:
+        assert self.secondaryjoin is not None
+        return _deep_deannotate(self.secondaryjoin, values=("local", "remote"))
+
+    @util.memoized_property
+    def primaryjoin_reverse_remote(self) -> ColumnElement[bool]:
+        """Return the primaryjoin condition suitable for the
+        "reverse" direction.
+
+        If the primaryjoin was delivered here with pre-existing
+        "remote" annotations, the local/remote annotations
+        are reversed.  Otherwise, the local/remote annotations
+        are removed.
+
+        """
+        if self._has_remote_annotations:
+
+            def replace(element: _CE, **kw: Any) -> Optional[_CE]:
+                if "remote" in element._annotations:
+                    v = dict(element._annotations)
+                    del v["remote"]
+                    v["local"] = True
+                    return element._with_annotations(v)
+                elif "local" in element._annotations:
+                    v = dict(element._annotations)
+                    del v["local"]
+                    v["remote"] = True
+                    return element._with_annotations(v)
+
+                return None
+
+            return visitors.replacement_traverse(self.primaryjoin, {}, replace)
+        else:
+            if self._has_foreign_annotations:
+                # TODO: coverage
+                return _deep_deannotate(
+                    self.primaryjoin, values=("local", "remote")
+                )
+            else:
+                return _deep_deannotate(self.primaryjoin)
+
+    def _has_annotation(self, clause: ClauseElement, annotation: str) -> bool:
+        for col in visitors.iterate(clause, {}):
+            if annotation in col._annotations:
+                return True
+        else:
+            return False
+
+    @util.memoized_property
+    def _has_foreign_annotations(self) -> bool:
+        return self._has_annotation(self.primaryjoin, "foreign")
+
+    @util.memoized_property
+    def _has_remote_annotations(self) -> bool:
+        return self._has_annotation(self.primaryjoin, "remote")
+
+    def _annotate_fks(self) -> None:
+        """Annotate the primaryjoin and secondaryjoin
+        structures with 'foreign' annotations marking columns
+        considered as foreign.
+
+        """
+        if self._has_foreign_annotations:
+            return
+
+        if self.consider_as_foreign_keys:
+            self._annotate_from_fk_list()
+        else:
+            self._annotate_present_fks()
+
+    def _annotate_from_fk_list(self) -> None:
+        def check_fk(element: _CE, **kw: Any) -> Optional[_CE]:
+            if element in self.consider_as_foreign_keys:
+                return element._annotate({"foreign": True})
+            return None
+
+        self.primaryjoin = visitors.replacement_traverse(
+            self.primaryjoin, {}, check_fk
+        )
+        if self.secondaryjoin is not None:
+            self.secondaryjoin = visitors.replacement_traverse(
+                self.secondaryjoin, {}, check_fk
+            )
+
+    def _annotate_present_fks(self) -> None:
+        if self.secondary is not None:
+            secondarycols = util.column_set(self.secondary.c)
+        else:
+            secondarycols = set()
+
+        def is_foreign(
+            a: ColumnElement[Any], b: ColumnElement[Any]
+        ) -> Optional[ColumnElement[Any]]:
+            if isinstance(a, schema.Column) and isinstance(b, schema.Column):
+                if a.references(b):
+                    return a
+                elif b.references(a):
+                    return b
+
+            if secondarycols:
+                if a in secondarycols and b not in secondarycols:
+                    return a
+                elif b in secondarycols and a not in secondarycols:
+                    return b
+
+            return None
+
+        def visit_binary(binary: BinaryExpression[Any]) -> None:
+            if not isinstance(
+                binary.left, sql.ColumnElement
+            ) or not isinstance(binary.right, sql.ColumnElement):
+                return
+
+            if (
+                "foreign" not in binary.left._annotations
+                and "foreign" not in binary.right._annotations
+            ):
+                col = is_foreign(binary.left, binary.right)
+                if col is not None:
+                    if col.compare(binary.left):
+                        binary.left = binary.left._annotate({"foreign": True})
+                    elif col.compare(binary.right):
+                        binary.right = binary.right._annotate(
+                            {"foreign": True}
+                        )
+
+        self.primaryjoin = visitors.cloned_traverse(
+            self.primaryjoin, {}, {"binary": visit_binary}
+        )
+        if self.secondaryjoin is not None:
+            self.secondaryjoin = visitors.cloned_traverse(
+                self.secondaryjoin, {}, {"binary": visit_binary}
+            )
+
+    def _refers_to_parent_table(self) -> bool:
+        """Return True if the join condition contains column
+        comparisons where both columns are in both tables.
+
+        """
+        pt = self.parent_persist_selectable
+        mt = self.child_persist_selectable
+        result = False
+
+        def visit_binary(binary: BinaryExpression[Any]) -> None:
+            nonlocal result
+            c, f = binary.left, binary.right
+            if (
+                isinstance(c, expression.ColumnClause)
+                and isinstance(f, expression.ColumnClause)
+                and pt.is_derived_from(c.table)
+                and pt.is_derived_from(f.table)
+                and mt.is_derived_from(c.table)
+                and mt.is_derived_from(f.table)
+            ):
+                result = True
+
+        visitors.traverse(self.primaryjoin, {}, {"binary": visit_binary})
+        return result
+
+    def _tables_overlap(self) -> bool:
+        """Return True if parent/child tables have some overlap."""
+
+        return selectables_overlap(
+            self.parent_persist_selectable, self.child_persist_selectable
+        )
+
+    def _annotate_remote(self) -> None:
+        """Annotate the primaryjoin and secondaryjoin
+        structures with 'remote' annotations marking columns
+        considered as part of the 'remote' side.
+
+        """
+        if self._has_remote_annotations:
+            return
+
+        if self.secondary is not None:
+            self._annotate_remote_secondary()
+        elif self._local_remote_pairs or self._remote_side:
+            self._annotate_remote_from_args()
+        elif self._refers_to_parent_table():
+            self._annotate_selfref(
+                lambda col: "foreign" in col._annotations, False
+            )
+        elif self._tables_overlap():
+            self._annotate_remote_with_overlap()
+        else:
+            self._annotate_remote_distinct_selectables()
+
+    def _annotate_remote_secondary(self) -> None:
+        """annotate 'remote' in primaryjoin, secondaryjoin
+        when 'secondary' is present.
+
+        """
+
+        assert self.secondary is not None
+        fixed_secondary = self.secondary
+
+        def repl(element: _CE, **kw: Any) -> Optional[_CE]:
+            if fixed_secondary.c.contains_column(element):
+                return element._annotate({"remote": True})
+            return None
+
+        self.primaryjoin = visitors.replacement_traverse(
+            self.primaryjoin, {}, repl
+        )
+
+        assert self.secondaryjoin is not None
+        self.secondaryjoin = visitors.replacement_traverse(
+            self.secondaryjoin, {}, repl
+        )
+
+    def _annotate_selfref(
+        self, fn: Callable[[ColumnElement[Any]], bool], remote_side_given: bool
+    ) -> None:
+        """annotate 'remote' in primaryjoin, secondaryjoin
+        when the relationship is detected as self-referential.
+
+        """
+
+        def visit_binary(binary: BinaryExpression[Any]) -> None:
+            equated = binary.left.compare(binary.right)
+            if isinstance(binary.left, expression.ColumnClause) and isinstance(
+                binary.right, expression.ColumnClause
+            ):
+                # assume one to many - FKs are "remote"
+                if fn(binary.left):
+                    binary.left = binary.left._annotate({"remote": True})
+                if fn(binary.right) and not equated:
+                    binary.right = binary.right._annotate({"remote": True})
+            elif not remote_side_given:
+                self._warn_non_column_elements()
+
+        self.primaryjoin = visitors.cloned_traverse(
+            self.primaryjoin, {}, {"binary": visit_binary}
+        )
+
+    def _annotate_remote_from_args(self) -> None:
+        """annotate 'remote' in primaryjoin, secondaryjoin
+        when the 'remote_side' or '_local_remote_pairs'
+        arguments are used.
+
+        """
+        if self._local_remote_pairs:
+            if self._remote_side:
+                raise sa_exc.ArgumentError(
+                    "remote_side argument is redundant "
+                    "against more detailed _local_remote_side "
+                    "argument."
+                )
+
+            remote_side = [r for (l, r) in self._local_remote_pairs]
+        else:
+            remote_side = self._remote_side
+
+        if self._refers_to_parent_table():
+            self._annotate_selfref(lambda col: col in remote_side, True)
+        else:
+
+            def repl(element: _CE, **kw: Any) -> Optional[_CE]:
+                # use set() to avoid generating ``__eq__()`` expressions
+                # against each element
+                if element in set(remote_side):
+                    return element._annotate({"remote": True})
+                return None
+
+            self.primaryjoin = visitors.replacement_traverse(
+                self.primaryjoin, {}, repl
+            )
+
+    def _annotate_remote_with_overlap(self) -> None:
+        """annotate 'remote' in primaryjoin, secondaryjoin
+        when the parent/child tables have some set of
+        tables in common, though is not a fully self-referential
+        relationship.
+
+        """
+
+        def visit_binary(binary: BinaryExpression[Any]) -> None:
+            binary.left, binary.right = proc_left_right(
+                binary.left, binary.right
+            )
+            binary.right, binary.left = proc_left_right(
+                binary.right, binary.left
+            )
+
+        check_entities = (
+            self.prop is not None and self.prop.mapper is not self.prop.parent
+        )
+
+        def proc_left_right(
+            left: ColumnElement[Any], right: ColumnElement[Any]
+        ) -> Tuple[ColumnElement[Any], ColumnElement[Any]]:
+            if isinstance(left, expression.ColumnClause) and isinstance(
+                right, expression.ColumnClause
+            ):
+                if self.child_persist_selectable.c.contains_column(
+                    right
+                ) and self.parent_persist_selectable.c.contains_column(left):
+                    right = right._annotate({"remote": True})
+            elif (
+                check_entities
+                and right._annotations.get("parentmapper") is self.prop.mapper
+            ):
+                right = right._annotate({"remote": True})
+            elif (
+                check_entities
+                and left._annotations.get("parentmapper") is self.prop.mapper
+            ):
+                left = left._annotate({"remote": True})
+            else:
+                self._warn_non_column_elements()
+
+            return left, right
+
+        self.primaryjoin = visitors.cloned_traverse(
+            self.primaryjoin, {}, {"binary": visit_binary}
+        )
+
+    def _annotate_remote_distinct_selectables(self) -> None:
+        """annotate 'remote' in primaryjoin, secondaryjoin
+        when the parent/child tables are entirely
+        separate.
+
+        """
+
+        def repl(element: _CE, **kw: Any) -> Optional[_CE]:
+            if self.child_persist_selectable.c.contains_column(element) and (
+                not self.parent_local_selectable.c.contains_column(element)
+                or self.child_local_selectable.c.contains_column(element)
+            ):
+                return element._annotate({"remote": True})
+            return None
+
+        self.primaryjoin = visitors.replacement_traverse(
+            self.primaryjoin, {}, repl
+        )
+
+    def _warn_non_column_elements(self) -> None:
+        util.warn(
+            "Non-simple column elements in primary "
+            "join condition for property %s - consider using "
+            "remote() annotations to mark the remote side." % self.prop
+        )
+
+    def _annotate_local(self) -> None:
+        """Annotate the primaryjoin and secondaryjoin
+        structures with 'local' annotations.
+
+        This annotates all column elements found
+        simultaneously in the parent table
+        and the join condition that don't have a
+        'remote' annotation set up from
+        _annotate_remote() or user-defined.
+
+        """
+        if self._has_annotation(self.primaryjoin, "local"):
+            return
+
+        if self._local_remote_pairs:
+            local_side = util.column_set(
+                [l for (l, r) in self._local_remote_pairs]
+            )
+        else:
+            local_side = util.column_set(self.parent_persist_selectable.c)
+
+        def locals_(element: _CE, **kw: Any) -> Optional[_CE]:
+            if "remote" not in element._annotations and element in local_side:
+                return element._annotate({"local": True})
+            return None
+
+        self.primaryjoin = visitors.replacement_traverse(
+            self.primaryjoin, {}, locals_
+        )
+
+    def _annotate_parentmapper(self) -> None:
+        def parentmappers_(element: _CE, **kw: Any) -> Optional[_CE]:
+            if "remote" in element._annotations:
+                return element._annotate({"parentmapper": self.prop.mapper})
+            elif "local" in element._annotations:
+                return element._annotate({"parentmapper": self.prop.parent})
+            return None
+
+        self.primaryjoin = visitors.replacement_traverse(
+            self.primaryjoin, {}, parentmappers_
+        )
+
+    def _check_remote_side(self) -> None:
+        if not self.local_remote_pairs:
+            raise sa_exc.ArgumentError(
+                "Relationship %s could "
+                "not determine any unambiguous local/remote column "
+                "pairs based on join condition and remote_side "
+                "arguments.  "
+                "Consider using the remote() annotation to "
+                "accurately mark those elements of the join "
+                "condition that are on the remote side of "
+                "the relationship." % (self.prop,)
+            )
+        else:
+            not_target = util.column_set(
+                self.parent_persist_selectable.c
+            ).difference(self.child_persist_selectable.c)
+
+            for _, rmt in self.local_remote_pairs:
+                if rmt in not_target:
+                    util.warn(
+                        "Expression %s is marked as 'remote', but these "
+                        "column(s) are local to the local side.  The "
+                        "remote() annotation is needed only for a "
+                        "self-referential relationship where both sides "
+                        "of the relationship refer to the same tables."
+                        % (rmt,)
+                    )
+
+    def _check_foreign_cols(
+        self, join_condition: ColumnElement[bool], primary: bool
+    ) -> None:
+        """Check the foreign key columns collected and emit error
+        messages."""
+
+        can_sync = False
+
+        foreign_cols = self._gather_columns_with_annotation(
+            join_condition, "foreign"
+        )
+
+        has_foreign = bool(foreign_cols)
+
+        if primary:
+            can_sync = bool(self.synchronize_pairs)
+        else:
+            can_sync = bool(self.secondary_synchronize_pairs)
+
+        if (
+            self.support_sync
+            and can_sync
+            or (not self.support_sync and has_foreign)
+        ):
+            return
+
+        # from here below is just determining the best error message
+        # to report.  Check for a join condition using any operator
+        # (not just ==), perhaps they need to turn on "viewonly=True".
+        if self.support_sync and has_foreign and not can_sync:
+            err = (
+                "Could not locate any simple equality expressions "
+                "involving locally mapped foreign key columns for "
+                "%s join condition "
+                "'%s' on relationship %s."
+                % (
+                    primary and "primary" or "secondary",
+                    join_condition,
+                    self.prop,
+                )
+            )
+            err += (
+                "  Ensure that referencing columns are associated "
+                "with a ForeignKey or ForeignKeyConstraint, or are "
+                "annotated in the join condition with the foreign() "
+                "annotation. To allow comparison operators other than "
+                "'==', the relationship can be marked as viewonly=True."
+            )
+
+            raise sa_exc.ArgumentError(err)
+        else:
+            err = (
+                "Could not locate any relevant foreign key columns "
+                "for %s join condition '%s' on relationship %s."
+                % (
+                    primary and "primary" or "secondary",
+                    join_condition,
+                    self.prop,
+                )
+            )
+            err += (
+                "  Ensure that referencing columns are associated "
+                "with a ForeignKey or ForeignKeyConstraint, or are "
+                "annotated in the join condition with the foreign() "
+                "annotation."
+            )
+            raise sa_exc.ArgumentError(err)
+
+    def _determine_direction(self) -> None:
+        """Determine if this relationship is one to many, many to one,
+        many to many.
+
+        """
+        if self.secondaryjoin is not None:
+            self.direction = MANYTOMANY
+        else:
+            parentcols = util.column_set(self.parent_persist_selectable.c)
+            targetcols = util.column_set(self.child_persist_selectable.c)
+
+            # fk collection which suggests ONETOMANY.
+            onetomany_fk = targetcols.intersection(self.foreign_key_columns)
+
+            # fk collection which suggests MANYTOONE.
+
+            manytoone_fk = parentcols.intersection(self.foreign_key_columns)
+
+            if onetomany_fk and manytoone_fk:
+                # fks on both sides.  test for overlap of local/remote
+                # with foreign key.
+                # we will gather columns directly from their annotations
+                # without deannotating, so that we can distinguish on a column
+                # that refers to itself.
+
+                # 1. columns that are both remote and FK suggest
+                # onetomany.
+                onetomany_local = self._gather_columns_with_annotation(
+                    self.primaryjoin, "remote", "foreign"
+                )
+
+                # 2. columns that are FK but are not remote (e.g. local)
+                # suggest manytoone.
+                manytoone_local = {
+                    c
+                    for c in self._gather_columns_with_annotation(
+                        self.primaryjoin, "foreign"
+                    )
+                    if "remote" not in c._annotations
+                }
+
+                # 3. if both collections are present, remove columns that
+                # refer to themselves.  This is for the case of
+                # and_(Me.id == Me.remote_id, Me.version == Me.version)
+                if onetomany_local and manytoone_local:
+                    self_equated = self.remote_columns.intersection(
+                        self.local_columns
+                    )
+                    onetomany_local = onetomany_local.difference(self_equated)
+                    manytoone_local = manytoone_local.difference(self_equated)
+
+                # at this point, if only one or the other collection is
+                # present, we know the direction, otherwise it's still
+                # ambiguous.
+
+                if onetomany_local and not manytoone_local:
+                    self.direction = ONETOMANY
+                elif manytoone_local and not onetomany_local:
+                    self.direction = MANYTOONE
+                else:
+                    raise sa_exc.ArgumentError(
+                        "Can't determine relationship"
+                        " direction for relationship '%s' - foreign "
+                        "key columns within the join condition are present "
+                        "in both the parent and the child's mapped tables.  "
+                        "Ensure that only those columns referring "
+                        "to a parent column are marked as foreign, "
+                        "either via the foreign() annotation or "
+                        "via the foreign_keys argument." % self.prop
+                    )
+            elif onetomany_fk:
+                self.direction = ONETOMANY
+            elif manytoone_fk:
+                self.direction = MANYTOONE
+            else:
+                raise sa_exc.ArgumentError(
+                    "Can't determine relationship "
+                    "direction for relationship '%s' - foreign "
+                    "key columns are present in neither the parent "
+                    "nor the child's mapped tables" % self.prop
+                )
+
+    def _deannotate_pairs(
+        self, collection: _ColumnPairIterable
+    ) -> _MutableColumnPairs:
+        """provide deannotation for the various lists of
+        pairs, so that using them in hashes doesn't incur
+        high-overhead __eq__() comparisons against
+        original columns mapped.
+
+        """
+        return [(x._deannotate(), y._deannotate()) for x, y in collection]
+
+    def _setup_pairs(self) -> None:
+        sync_pairs: _MutableColumnPairs = []
+        lrp: util.OrderedSet[Tuple[ColumnElement[Any], ColumnElement[Any]]] = (
+            util.OrderedSet([])
+        )
+        secondary_sync_pairs: _MutableColumnPairs = []
+
+        def go(
+            joincond: ColumnElement[bool],
+            collection: _MutableColumnPairs,
+        ) -> None:
+            def visit_binary(
+                binary: BinaryExpression[Any],
+                left: ColumnElement[Any],
+                right: ColumnElement[Any],
+            ) -> None:
+                if (
+                    "remote" in right._annotations
+                    and "remote" not in left._annotations
+                    and self.can_be_synced_fn(left)
+                ):
+                    lrp.add((left, right))
+                elif (
+                    "remote" in left._annotations
+                    and "remote" not in right._annotations
+                    and self.can_be_synced_fn(right)
+                ):
+                    lrp.add((right, left))
+                if binary.operator is operators.eq and self.can_be_synced_fn(
+                    left, right
+                ):
+                    if "foreign" in right._annotations:
+                        collection.append((left, right))
+                    elif "foreign" in left._annotations:
+                        collection.append((right, left))
+
+            visit_binary_product(visit_binary, joincond)
+
+        for joincond, collection in [
+            (self.primaryjoin, sync_pairs),
+            (self.secondaryjoin, secondary_sync_pairs),
+        ]:
+            if joincond is None:
+                continue
+            go(joincond, collection)
+
+        self.local_remote_pairs = self._deannotate_pairs(lrp)
+        self.synchronize_pairs = self._deannotate_pairs(sync_pairs)
+        self.secondary_synchronize_pairs = self._deannotate_pairs(
+            secondary_sync_pairs
+        )
+
+    _track_overlapping_sync_targets: weakref.WeakKeyDictionary[
+        ColumnElement[Any],
+        weakref.WeakKeyDictionary[
+            RelationshipProperty[Any], ColumnElement[Any]
+        ],
+    ] = weakref.WeakKeyDictionary()
+
+    def _warn_for_conflicting_sync_targets(self) -> None:
+        if not self.support_sync:
+            return
+
+        # we would like to detect if we are synchronizing any column
+        # pairs in conflict with another relationship that wishes to sync
+        # an entirely different column to the same target.   This is a
+        # very rare edge case so we will try to minimize the memory/overhead
+        # impact of this check
+        for from_, to_ in [
+            (from_, to_) for (from_, to_) in self.synchronize_pairs
+        ] + [
+            (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs
+        ]:
+            # save ourselves a ton of memory and overhead by only
+            # considering columns that are subject to a overlapping
+            # FK constraints at the core level.   This condition can arise
+            # if multiple relationships overlap foreign() directly, but
+            # we're going to assume it's typically a ForeignKeyConstraint-
+            # level configuration that benefits from this warning.
+
+            if to_ not in self._track_overlapping_sync_targets:
+                self._track_overlapping_sync_targets[to_] = (
+                    weakref.WeakKeyDictionary({self.prop: from_})
+                )
+            else:
+                other_props = []
+                prop_to_from = self._track_overlapping_sync_targets[to_]
+
+                for pr, fr_ in prop_to_from.items():
+                    if (
+                        not pr.mapper._dispose_called
+                        and pr not in self.prop._reverse_property
+                        and pr.key not in self.prop._overlaps
+                        and self.prop.key not in pr._overlaps
+                        # note: the "__*" symbol is used internally by
+                        # SQLAlchemy as a general means of suppressing the
+                        # overlaps warning for some extension cases, however
+                        # this is not currently
+                        # a publicly supported symbol and may change at
+                        # any time.
+                        and "__*" not in self.prop._overlaps
+                        and "__*" not in pr._overlaps
+                        and not self.prop.parent.is_sibling(pr.parent)
+                        and not self.prop.mapper.is_sibling(pr.mapper)
+                        and not self.prop.parent.is_sibling(pr.mapper)
+                        and not self.prop.mapper.is_sibling(pr.parent)
+                        and (
+                            self.prop.key != pr.key
+                            or not self.prop.parent.common_parent(pr.parent)
+                        )
+                    ):
+                        other_props.append((pr, fr_))
+
+                if other_props:
+                    util.warn(
+                        "relationship '%s' will copy column %s to column %s, "
+                        "which conflicts with relationship(s): %s. "
+                        "If this is not the intention, consider if these "
+                        "relationships should be linked with "
+                        "back_populates, or if viewonly=True should be "
+                        "applied to one or more if they are read-only. "
+                        "For the less common case that foreign key "
+                        "constraints are partially overlapping, the "
+                        "orm.foreign() "
+                        "annotation can be used to isolate the columns that "
+                        "should be written towards.   To silence this "
+                        "warning, add the parameter 'overlaps=\"%s\"' to the "
+                        "'%s' relationship."
+                        % (
+                            self.prop,
+                            from_,
+                            to_,
+                            ", ".join(
+                                sorted(
+                                    "'%s' (copies %s to %s)" % (pr, fr_, to_)
+                                    for (pr, fr_) in other_props
+                                )
+                            ),
+                            ",".join(sorted(pr.key for pr, fr in other_props)),
+                            self.prop,
+                        ),
+                        code="qzyx",
+                    )
+                self._track_overlapping_sync_targets[to_][self.prop] = from_
+
+    @util.memoized_property
+    def remote_columns(self) -> Set[ColumnElement[Any]]:
+        return self._gather_join_annotations("remote")
+
+    @util.memoized_property
+    def local_columns(self) -> Set[ColumnElement[Any]]:
+        return self._gather_join_annotations("local")
+
+    @util.memoized_property
+    def foreign_key_columns(self) -> Set[ColumnElement[Any]]:
+        return self._gather_join_annotations("foreign")
+
+    def _gather_join_annotations(
+        self, annotation: str
+    ) -> Set[ColumnElement[Any]]:
+        s = set(
+            self._gather_columns_with_annotation(self.primaryjoin, annotation)
+        )
+        if self.secondaryjoin is not None:
+            s.update(
+                self._gather_columns_with_annotation(
+                    self.secondaryjoin, annotation
+                )
+            )
+        return {x._deannotate() for x in s}
+
+    def _gather_columns_with_annotation(
+        self, clause: ColumnElement[Any], *annotation: Iterable[str]
+    ) -> Set[ColumnElement[Any]]:
+        annotation_set = set(annotation)
+        return {
+            cast(ColumnElement[Any], col)
+            for col in visitors.iterate(clause, {})
+            if annotation_set.issubset(col._annotations)
+        }
+
+    @util.memoized_property
+    def _secondary_lineage_set(self) -> FrozenSet[ColumnElement[Any]]:
+        if self.secondary is not None:
+            return frozenset(
+                itertools.chain(*[c.proxy_set for c in self.secondary.c])
+            )
+        else:
+            return util.EMPTY_SET
+
+    def join_targets(
+        self,
+        source_selectable: Optional[FromClause],
+        dest_selectable: FromClause,
+        aliased: bool,
+        single_crit: Optional[ColumnElement[bool]] = None,
+        extra_criteria: Tuple[ColumnElement[bool], ...] = (),
+    ) -> Tuple[
+        ColumnElement[bool],
+        Optional[ColumnElement[bool]],
+        Optional[FromClause],
+        Optional[ClauseAdapter],
+        FromClause,
+    ]:
+        """Given a source and destination selectable, create a
+        join between them.
+
+        This takes into account aliasing the join clause
+        to reference the appropriate corresponding columns
+        in the target objects, as well as the extra child
+        criterion, equivalent column sets, etc.
+
+        """
+        # place a barrier on the destination such that
+        # replacement traversals won't ever dig into it.
+        # its internal structure remains fixed
+        # regardless of context.
+        dest_selectable = _shallow_annotate(
+            dest_selectable, {"no_replacement_traverse": True}
+        )
+
+        primaryjoin, secondaryjoin, secondary = (
+            self.primaryjoin,
+            self.secondaryjoin,
+            self.secondary,
+        )
+
+        # adjust the join condition for single table inheritance,
+        # in the case that the join is to a subclass
+        # this is analogous to the
+        # "_adjust_for_single_table_inheritance()" method in Query.
+
+        if single_crit is not None:
+            if secondaryjoin is not None:
+                secondaryjoin = secondaryjoin & single_crit
+            else:
+                primaryjoin = primaryjoin & single_crit
+
+        if extra_criteria:
+
+            def mark_exclude_cols(
+                elem: SupportsAnnotations, annotations: _AnnotationDict
+            ) -> SupportsAnnotations:
+                """note unrelated columns in the "extra criteria" as either
+                should be adapted or not adapted, even though they are not
+                part of our "local" or "remote" side.
+
+                see #9779 for this case, as well as #11010 for a follow up
+
+                """
+
+                parentmapper_for_element = elem._annotations.get(
+                    "parentmapper", None
+                )
+
+                if (
+                    parentmapper_for_element is not self.prop.parent
+                    and parentmapper_for_element is not self.prop.mapper
+                    and elem not in self._secondary_lineage_set
+                ):
+                    return _safe_annotate(elem, annotations)
+                else:
+                    return elem
+
+            extra_criteria = tuple(
+                _deep_annotate(
+                    elem,
+                    {"should_not_adapt": True},
+                    annotate_callable=mark_exclude_cols,
+                )
+                for elem in extra_criteria
+            )
+
+            if secondaryjoin is not None:
+                secondaryjoin = secondaryjoin & sql.and_(*extra_criteria)
+            else:
+                primaryjoin = primaryjoin & sql.and_(*extra_criteria)
+
+        if aliased:
+            if secondary is not None:
+                secondary = secondary._anonymous_fromclause(flat=True)
+                primary_aliasizer = ClauseAdapter(
+                    secondary,
+                    exclude_fn=_local_col_exclude,
+                )
+                secondary_aliasizer = ClauseAdapter(
+                    dest_selectable, equivalents=self.child_equivalents
+                ).chain(primary_aliasizer)
+                if source_selectable is not None:
+                    primary_aliasizer = ClauseAdapter(
+                        secondary,
+                        exclude_fn=_local_col_exclude,
+                    ).chain(
+                        ClauseAdapter(
+                            source_selectable,
+                            equivalents=self.parent_equivalents,
+                        )
+                    )
+
+                secondaryjoin = secondary_aliasizer.traverse(secondaryjoin)
+            else:
+                primary_aliasizer = ClauseAdapter(
+                    dest_selectable,
+                    exclude_fn=_local_col_exclude,
+                    equivalents=self.child_equivalents,
+                )
+                if source_selectable is not None:
+                    primary_aliasizer.chain(
+                        ClauseAdapter(
+                            source_selectable,
+                            exclude_fn=_remote_col_exclude,
+                            equivalents=self.parent_equivalents,
+                        )
+                    )
+                secondary_aliasizer = None
+
+            primaryjoin = primary_aliasizer.traverse(primaryjoin)
+            target_adapter = secondary_aliasizer or primary_aliasizer
+            target_adapter.exclude_fn = None
+        else:
+            target_adapter = None
+        return (
+            primaryjoin,
+            secondaryjoin,
+            secondary,
+            target_adapter,
+            dest_selectable,
+        )
+
+    def create_lazy_clause(self, reverse_direction: bool = False) -> Tuple[
+        ColumnElement[bool],
+        Dict[str, ColumnElement[Any]],
+        Dict[ColumnElement[Any], ColumnElement[Any]],
+    ]:
+        binds: Dict[ColumnElement[Any], BindParameter[Any]] = {}
+        equated_columns: Dict[ColumnElement[Any], ColumnElement[Any]] = {}
+
+        has_secondary = self.secondaryjoin is not None
+
+        if has_secondary:
+            lookup = collections.defaultdict(list)
+            for l, r in self.local_remote_pairs:
+                lookup[l].append((l, r))
+                equated_columns[r] = l
+        elif not reverse_direction:
+            for l, r in self.local_remote_pairs:
+                equated_columns[r] = l
+        else:
+            for l, r in self.local_remote_pairs:
+                equated_columns[l] = r
+
+        def col_to_bind(
+            element: ColumnElement[Any], **kw: Any
+        ) -> Optional[BindParameter[Any]]:
+            if (
+                (not reverse_direction and "local" in element._annotations)
+                or reverse_direction
+                and (
+                    (has_secondary and element in lookup)
+                    or (not has_secondary and "remote" in element._annotations)
+                )
+            ):
+                if element not in binds:
+                    binds[element] = sql.bindparam(
+                        None, None, type_=element.type, unique=True
+                    )
+                return binds[element]
+            return None
+
+        lazywhere = self.primaryjoin
+        if self.secondaryjoin is None or not reverse_direction:
+            lazywhere = visitors.replacement_traverse(
+                lazywhere, {}, col_to_bind
+            )
+
+        if self.secondaryjoin is not None:
+            secondaryjoin = self.secondaryjoin
+            if reverse_direction:
+                secondaryjoin = visitors.replacement_traverse(
+                    secondaryjoin, {}, col_to_bind
+                )
+            lazywhere = sql.and_(lazywhere, secondaryjoin)
+
+        bind_to_col = {binds[col].key: col for col in binds}
+
+        return lazywhere, bind_to_col, equated_columns
+
+
+class _ColInAnnotations:
+    """Serializable object that tests for names in c._annotations.
+
+    TODO: does this need to be serializable anymore?  can we find what the
+    use case was for that?
+
+    """
+
+    __slots__ = ("names",)
+
+    def __init__(self, *names: str):
+        self.names = frozenset(names)
+
+    def __call__(self, c: ClauseElement) -> bool:
+        return bool(self.names.intersection(c._annotations))
+
+
+_local_col_exclude = _ColInAnnotations("local", "should_not_adapt")
+_remote_col_exclude = _ColInAnnotations("remote", "should_not_adapt")
+
+
+class Relationship(
+    RelationshipProperty[_T],
+    _DeclarativeMapped[_T],
+):
+    """Describes an object property that holds a single item or list
+    of items that correspond to a related database table.
+
+    Public constructor is the :func:`_orm.relationship` function.
+
+    .. seealso::
+
+        :ref:`relationship_config_toplevel`
+
+    .. versionchanged:: 2.0 Added :class:`_orm.Relationship` as a Declarative
+       compatible subclass for :class:`_orm.RelationshipProperty`.
+
+    """
+
+    inherit_cache = True
+    """:meta private:"""
+
+
+class _RelationshipDeclared(  # type: ignore[misc]
+    Relationship[_T],
+    WriteOnlyMapped[_T],  # not compatible with Mapped[_T]
+    DynamicMapped[_T],  # not compatible with Mapped[_T]
+):
+    """Relationship subclass used implicitly for declarative mapping."""
+
+    inherit_cache = True
+    """:meta private:"""
+
+    @classmethod
+    def _mapper_property_name(cls) -> str:
+        return "Relationship"
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py
new file mode 100644
index 00000000..a0e9f17e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/scoping.py
@@ -0,0 +1,2163 @@
+# orm/scoping.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .session import _S
+from .session import Session
+from .. import exc as sa_exc
+from .. import util
+from ..util import create_proxy_methods
+from ..util import ScopedRegistry
+from ..util import ThreadLocalRegistry
+from ..util import warn
+from ..util import warn_deprecated
+from ..util.typing import Protocol
+
+if TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _IdentityKeyType
+    from ._typing import OrmExecuteOptionsParameter
+    from .identity import IdentityMap
+    from .interfaces import ORMOption
+    from .mapper import Mapper
+    from .query import Query
+    from .query import RowReturningQuery
+    from .session import _BindArguments
+    from .session import _EntityBindKey
+    from .session import _PKIdentityArgument
+    from .session import _SessionBind
+    from .session import sessionmaker
+    from .session import SessionTransaction
+    from ..engine import Connection
+    from ..engine import CursorResult
+    from ..engine import Engine
+    from ..engine import Result
+    from ..engine import Row
+    from ..engine import RowMapping
+    from ..engine.interfaces import _CoreAnyExecuteParams
+    from ..engine.interfaces import _CoreSingleExecuteParams
+    from ..engine.interfaces import CoreExecuteOptionsParameter
+    from ..engine.result import ScalarResult
+    from ..sql._typing import _ColumnsClauseArgument
+    from ..sql._typing import _T0
+    from ..sql._typing import _T1
+    from ..sql._typing import _T2
+    from ..sql._typing import _T3
+    from ..sql._typing import _T4
+    from ..sql._typing import _T5
+    from ..sql._typing import _T6
+    from ..sql._typing import _T7
+    from ..sql._typing import _TypedColumnClauseArgument as _TCCA
+    from ..sql.base import Executable
+    from ..sql.dml import UpdateBase
+    from ..sql.elements import ClauseElement
+    from ..sql.roles import TypedColumnsClauseRole
+    from ..sql.selectable import ForUpdateParameter
+    from ..sql.selectable import TypedReturnsRows
+
+_T = TypeVar("_T", bound=Any)
+
+
+class QueryPropertyDescriptor(Protocol):
+    """Describes the type applied to a class-level
+    :meth:`_orm.scoped_session.query_property` attribute.
+
+    .. versionadded:: 2.0.5
+
+    """
+
+    def __get__(self, instance: Any, owner: Type[_T]) -> Query[_T]: ...
+
+
+_O = TypeVar("_O", bound=object)
+
+__all__ = ["scoped_session"]
+
+
+@create_proxy_methods(
+    Session,
+    ":class:`_orm.Session`",
+    ":class:`_orm.scoping.scoped_session`",
+    classmethods=["close_all", "object_session", "identity_key"],
+    methods=[
+        "__contains__",
+        "__iter__",
+        "add",
+        "add_all",
+        "begin",
+        "begin_nested",
+        "close",
+        "reset",
+        "commit",
+        "connection",
+        "delete",
+        "execute",
+        "expire",
+        "expire_all",
+        "expunge",
+        "expunge_all",
+        "flush",
+        "get",
+        "get_one",
+        "get_bind",
+        "is_modified",
+        "bulk_save_objects",
+        "bulk_insert_mappings",
+        "bulk_update_mappings",
+        "merge",
+        "query",
+        "refresh",
+        "rollback",
+        "scalar",
+        "scalars",
+    ],
+    attributes=[
+        "bind",
+        "dirty",
+        "deleted",
+        "new",
+        "identity_map",
+        "is_active",
+        "autoflush",
+        "no_autoflush",
+        "info",
+    ],
+)
+class scoped_session(Generic[_S]):
+    """Provides scoped management of :class:`.Session` objects.
+
+    See :ref:`unitofwork_contextual` for a tutorial.
+
+    .. note::
+
+       When using :ref:`asyncio_toplevel`, the async-compatible
+       :class:`_asyncio.async_scoped_session` class should be
+       used in place of :class:`.scoped_session`.
+
+    """
+
+    _support_async: bool = False
+
+    session_factory: sessionmaker[_S]
+    """The `session_factory` provided to `__init__` is stored in this
+    attribute and may be accessed at a later time.  This can be useful when
+    a new non-scoped :class:`.Session` is needed."""
+
+    registry: ScopedRegistry[_S]
+
+    def __init__(
+        self,
+        session_factory: sessionmaker[_S],
+        scopefunc: Optional[Callable[[], Any]] = None,
+    ):
+        """Construct a new :class:`.scoped_session`.
+
+        :param session_factory: a factory to create new :class:`.Session`
+         instances. This is usually, but not necessarily, an instance
+         of :class:`.sessionmaker`.
+        :param scopefunc: optional function which defines
+         the current scope.   If not passed, the :class:`.scoped_session`
+         object assumes "thread-local" scope, and will use
+         a Python ``threading.local()`` in order to maintain the current
+         :class:`.Session`.  If passed, the function should return
+         a hashable token; this token will be used as the key in a
+         dictionary in order to store and retrieve the current
+         :class:`.Session`.
+
+        """
+        self.session_factory = session_factory
+
+        if scopefunc:
+            self.registry = ScopedRegistry(session_factory, scopefunc)
+        else:
+            self.registry = ThreadLocalRegistry(session_factory)
+
+    @property
+    def _proxied(self) -> _S:
+        return self.registry()
+
+    def __call__(self, **kw: Any) -> _S:
+        r"""Return the current :class:`.Session`, creating it
+        using the :attr:`.scoped_session.session_factory` if not present.
+
+        :param \**kw: Keyword arguments will be passed to the
+         :attr:`.scoped_session.session_factory` callable, if an existing
+         :class:`.Session` is not present.  If the :class:`.Session` is present
+         and keyword arguments have been passed,
+         :exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
+
+        """
+        if kw:
+            if self.registry.has():
+                raise sa_exc.InvalidRequestError(
+                    "Scoped session is already present; "
+                    "no new arguments may be specified."
+                )
+            else:
+                sess = self.session_factory(**kw)
+                self.registry.set(sess)
+        else:
+            sess = self.registry()
+        if not self._support_async and sess._is_asyncio:
+            warn_deprecated(
+                "Using `scoped_session` with asyncio is deprecated and "
+                "will raise an error in a future version. "
+                "Please use `async_scoped_session` instead.",
+                "1.4.23",
+            )
+        return sess
+
+    def configure(self, **kwargs: Any) -> None:
+        """reconfigure the :class:`.sessionmaker` used by this
+        :class:`.scoped_session`.
+
+        See :meth:`.sessionmaker.configure`.
+
+        """
+
+        if self.registry.has():
+            warn(
+                "At least one scoped session is already present. "
+                " configure() can not affect sessions that have "
+                "already been created."
+            )
+
+        self.session_factory.configure(**kwargs)
+
+    def remove(self) -> None:
+        """Dispose of the current :class:`.Session`, if present.
+
+        This will first call :meth:`.Session.close` method
+        on the current :class:`.Session`, which releases any existing
+        transactional/connection resources still being held; transactions
+        specifically are rolled back.  The :class:`.Session` is then
+        discarded.   Upon next usage within the same scope,
+        the :class:`.scoped_session` will produce a new
+        :class:`.Session` object.
+
+        """
+
+        if self.registry.has():
+            self.registry().close()
+        self.registry.clear()
+
+    def query_property(
+        self, query_cls: Optional[Type[Query[_T]]] = None
+    ) -> QueryPropertyDescriptor:
+        """return a class property which produces a legacy
+        :class:`_query.Query` object against the class and the current
+        :class:`.Session` when called.
+
+        .. legacy:: The :meth:`_orm.scoped_session.query_property` accessor
+           is specific to the legacy :class:`.Query` object and is not
+           considered to be part of :term:`2.0-style` ORM use.
+
+        e.g.::
+
+            from sqlalchemy.orm import QueryPropertyDescriptor
+            from sqlalchemy.orm import scoped_session
+            from sqlalchemy.orm import sessionmaker
+
+            Session = scoped_session(sessionmaker())
+
+
+            class MyClass:
+                query: QueryPropertyDescriptor = Session.query_property()
+
+
+            # after mappers are defined
+            result = MyClass.query.filter(MyClass.name == "foo").all()
+
+        Produces instances of the session's configured query class by
+        default.  To override and use a custom implementation, provide
+        a ``query_cls`` callable.  The callable will be invoked with
+        the class's mapper as a positional argument and a session
+        keyword argument.
+
+        There is no limit to the number of query properties placed on
+        a class.
+
+        """
+
+        class query:
+            def __get__(s, instance: Any, owner: Type[_O]) -> Query[_O]:
+                if query_cls:
+                    # custom query class
+                    return query_cls(owner, session=self.registry())  # type: ignore  # noqa: E501
+                else:
+                    # session's configured query class
+                    return self.registry().query(owner)
+
+        return query()
+
+    # START PROXY METHODS scoped_session
+
+    # code within this block is **programmatically,
+    # statically generated** by tools/generate_proxy_methods.py
+
+    def __contains__(self, instance: object) -> bool:
+        r"""Return True if the instance is associated with this session.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The instance may be pending or persistent within the Session for a
+        result of True.
+
+
+        """  # noqa: E501
+
+        return self._proxied.__contains__(instance)
+
+    def __iter__(self) -> Iterator[object]:
+        r"""Iterate over all pending or persistent instances within this
+        Session.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+
+        """  # noqa: E501
+
+        return self._proxied.__iter__()
+
+    def add(self, instance: object, _warn: bool = True) -> None:
+        r"""Place an object into this :class:`_orm.Session`.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Objects that are in the :term:`transient` state when passed to the
+        :meth:`_orm.Session.add` method will move to the
+        :term:`pending` state, until the next flush, at which point they
+        will move to the :term:`persistent` state.
+
+        Objects that are in the :term:`detached` state when passed to the
+        :meth:`_orm.Session.add` method will move to the :term:`persistent`
+        state directly.
+
+        If the transaction used by the :class:`_orm.Session` is rolled back,
+        objects which were transient when they were passed to
+        :meth:`_orm.Session.add` will be moved back to the
+        :term:`transient` state, and will no longer be present within this
+        :class:`_orm.Session`.
+
+        .. seealso::
+
+            :meth:`_orm.Session.add_all`
+
+            :ref:`session_adding` - at :ref:`session_basics`
+
+
+        """  # noqa: E501
+
+        return self._proxied.add(instance, _warn=_warn)
+
+    def add_all(self, instances: Iterable[object]) -> None:
+        r"""Add the given collection of instances to this :class:`_orm.Session`.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        See the documentation for :meth:`_orm.Session.add` for a general
+        behavioral description.
+
+        .. seealso::
+
+            :meth:`_orm.Session.add`
+
+            :ref:`session_adding` - at :ref:`session_basics`
+
+
+        """  # noqa: E501
+
+        return self._proxied.add_all(instances)
+
+    def begin(self, nested: bool = False) -> SessionTransaction:
+        r"""Begin a transaction, or nested transaction,
+        on this :class:`.Session`, if one is not already begun.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The :class:`_orm.Session` object features **autobegin** behavior,
+        so that normally it is not necessary to call the
+        :meth:`_orm.Session.begin`
+        method explicitly. However, it may be used in order to control
+        the scope of when the transactional state is begun.
+
+        When used to begin the outermost transaction, an error is raised
+        if this :class:`.Session` is already inside of a transaction.
+
+        :param nested: if True, begins a SAVEPOINT transaction and is
+         equivalent to calling :meth:`~.Session.begin_nested`. For
+         documentation on SAVEPOINT transactions, please see
+         :ref:`session_begin_nested`.
+
+        :return: the :class:`.SessionTransaction` object.  Note that
+         :class:`.SessionTransaction`
+         acts as a Python context manager, allowing :meth:`.Session.begin`
+         to be used in a "with" block.  See :ref:`session_explicit_begin` for
+         an example.
+
+        .. seealso::
+
+            :ref:`session_autobegin`
+
+            :ref:`unitofwork_transaction`
+
+            :meth:`.Session.begin_nested`
+
+
+
+        """  # noqa: E501
+
+        return self._proxied.begin(nested=nested)
+
+    def begin_nested(self) -> SessionTransaction:
+        r"""Begin a "nested" transaction on this Session, e.g. SAVEPOINT.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The target database(s) and associated drivers must support SQL
+        SAVEPOINT for this method to function correctly.
+
+        For documentation on SAVEPOINT
+        transactions, please see :ref:`session_begin_nested`.
+
+        :return: the :class:`.SessionTransaction` object.  Note that
+         :class:`.SessionTransaction` acts as a context manager, allowing
+         :meth:`.Session.begin_nested` to be used in a "with" block.
+         See :ref:`session_begin_nested` for a usage example.
+
+        .. seealso::
+
+            :ref:`session_begin_nested`
+
+            :ref:`pysqlite_serializable` - special workarounds required
+            with the SQLite driver in order for SAVEPOINT to work
+            correctly. For asyncio use cases, see the section
+            :ref:`aiosqlite_serializable`.
+
+
+        """  # noqa: E501
+
+        return self._proxied.begin_nested()
+
+    def close(self) -> None:
+        r"""Close out the transactional resources and ORM objects used by this
+        :class:`_orm.Session`.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This expunges all ORM objects associated with this
+        :class:`_orm.Session`, ends any transaction in progress and
+        :term:`releases` any :class:`_engine.Connection` objects which this
+        :class:`_orm.Session` itself has checked out from associated
+        :class:`_engine.Engine` objects. The operation then leaves the
+        :class:`_orm.Session` in a state which it may be used again.
+
+        .. tip::
+
+            In the default running mode the :meth:`_orm.Session.close`
+            method **does not prevent the Session from being used again**.
+            The :class:`_orm.Session` itself does not actually have a
+            distinct "closed" state; it merely means
+            the :class:`_orm.Session` will release all database connections
+            and ORM objects.
+
+            Setting the parameter :paramref:`_orm.Session.close_resets_only`
+            to ``False`` will instead make the ``close`` final, meaning that
+            any further action on the session will be forbidden.
+
+        .. versionchanged:: 1.4  The :meth:`.Session.close` method does not
+           immediately create a new :class:`.SessionTransaction` object;
+           instead, the new :class:`.SessionTransaction` is created only if
+           the :class:`.Session` is used again for a database operation.
+
+        .. seealso::
+
+            :ref:`session_closing` - detail on the semantics of
+            :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`.
+
+            :meth:`_orm.Session.reset` - a similar method that behaves like
+            ``close()`` with  the parameter
+            :paramref:`_orm.Session.close_resets_only` set to ``True``.
+
+
+        """  # noqa: E501
+
+        return self._proxied.close()
+
+    def reset(self) -> None:
+        r"""Close out the transactional resources and ORM objects used by this
+        :class:`_orm.Session`, resetting the session to its initial state.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This method provides for same "reset-only" behavior that the
+        :meth:`_orm.Session.close` method has provided historically, where the
+        state of the :class:`_orm.Session` is reset as though the object were
+        brand new, and ready to be used again.
+        This method may then be useful for :class:`_orm.Session` objects
+        which set :paramref:`_orm.Session.close_resets_only` to ``False``,
+        so that "reset only" behavior is still available.
+
+        .. versionadded:: 2.0.22
+
+        .. seealso::
+
+            :ref:`session_closing` - detail on the semantics of
+            :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`.
+
+            :meth:`_orm.Session.close` - a similar method will additionally
+            prevent re-use of the Session when the parameter
+            :paramref:`_orm.Session.close_resets_only` is set to ``False``.
+
+        """  # noqa: E501
+
+        return self._proxied.reset()
+
+    def commit(self) -> None:
+        r"""Flush pending changes and commit the current transaction.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        When the COMMIT operation is complete, all objects are fully
+        :term:`expired`, erasing their internal contents, which will be
+        automatically re-loaded when the objects are next accessed. In the
+        interim, these objects are in an expired state and will not function if
+        they are :term:`detached` from the :class:`.Session`. Additionally,
+        this re-load operation is not supported when using asyncio-oriented
+        APIs. The :paramref:`.Session.expire_on_commit` parameter may be used
+        to disable this behavior.
+
+        When there is no transaction in place for the :class:`.Session`,
+        indicating that no operations were invoked on this :class:`.Session`
+        since the previous call to :meth:`.Session.commit`, the method will
+        begin and commit an internal-only "logical" transaction, that does not
+        normally affect the database unless pending flush changes were
+        detected, but will still invoke event handlers and object expiration
+        rules.
+
+        The outermost database transaction is committed unconditionally,
+        automatically releasing any SAVEPOINTs in effect.
+
+        .. seealso::
+
+            :ref:`session_committing`
+
+            :ref:`unitofwork_transaction`
+
+            :ref:`asyncio_orm_avoid_lazyloads`
+
+
+        """  # noqa: E501
+
+        return self._proxied.commit()
+
+    def connection(
+        self,
+        bind_arguments: Optional[_BindArguments] = None,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> Connection:
+        r"""Return a :class:`_engine.Connection` object corresponding to this
+        :class:`.Session` object's transactional state.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Either the :class:`_engine.Connection` corresponding to the current
+        transaction is returned, or if no transaction is in progress, a new
+        one is begun and the :class:`_engine.Connection`
+        returned (note that no
+        transactional state is established with the DBAPI until the first
+        SQL statement is emitted).
+
+        Ambiguity in multi-bind or unbound :class:`.Session` objects can be
+        resolved through any of the optional keyword arguments.   This
+        ultimately makes usage of the :meth:`.get_bind` method for resolution.
+
+        :param bind_arguments: dictionary of bind arguments.  May include
+         "mapper", "bind", "clause", other custom arguments that are passed
+         to :meth:`.Session.get_bind`.
+
+        :param execution_options: a dictionary of execution options that will
+         be passed to :meth:`_engine.Connection.execution_options`, **when the
+         connection is first procured only**.   If the connection is already
+         present within the :class:`.Session`, a warning is emitted and
+         the arguments are ignored.
+
+         .. seealso::
+
+            :ref:`session_transaction_isolation`
+
+
+        """  # noqa: E501
+
+        return self._proxied.connection(
+            bind_arguments=bind_arguments, execution_options=execution_options
+        )
+
+    def delete(self, instance: object) -> None:
+        r"""Mark an instance as deleted.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The object is assumed to be either :term:`persistent` or
+        :term:`detached` when passed; after the method is called, the
+        object will remain in the :term:`persistent` state until the next
+        flush proceeds.  During this time, the object will also be a member
+        of the :attr:`_orm.Session.deleted` collection.
+
+        When the next flush proceeds, the object will move to the
+        :term:`deleted` state, indicating a ``DELETE`` statement was emitted
+        for its row within the current transaction.   When the transaction
+        is successfully committed,
+        the deleted object is moved to the :term:`detached` state and is
+        no longer present within this :class:`_orm.Session`.
+
+        .. seealso::
+
+            :ref:`session_deleting` - at :ref:`session_basics`
+
+
+        """  # noqa: E501
+
+        return self._proxied.delete(instance)
+
+    @overload
+    def execute(
+        self,
+        statement: TypedReturnsRows[_T],
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> Result[_T]: ...
+
+    @overload
+    def execute(
+        self,
+        statement: UpdateBase,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> CursorResult[Any]: ...
+
+    @overload
+    def execute(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> Result[Any]: ...
+
+    def execute(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> Result[Any]:
+        r"""Execute a SQL expression construct.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Returns a :class:`_engine.Result` object representing
+        results of the statement execution.
+
+        E.g.::
+
+            from sqlalchemy import select
+
+            result = session.execute(select(User).where(User.id == 5))
+
+        The API contract of :meth:`_orm.Session.execute` is similar to that
+        of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version
+        of :class:`_engine.Connection`.
+
+        .. versionchanged:: 1.4 the :meth:`_orm.Session.execute` method is
+           now the primary point of ORM statement execution when using
+           :term:`2.0 style` ORM usage.
+
+        :param statement:
+            An executable statement (i.e. an :class:`.Executable` expression
+            such as :func:`_expression.select`).
+
+        :param params:
+            Optional dictionary, or list of dictionaries, containing
+            bound parameter values.   If a single dictionary, single-row
+            execution occurs; if a list of dictionaries, an
+            "executemany" will be invoked.  The keys in each dictionary
+            must correspond to parameter names present in the statement.
+
+        :param execution_options: optional dictionary of execution options,
+         which will be associated with the statement execution.  This
+         dictionary can provide a subset of the options that are accepted
+         by :meth:`_engine.Connection.execution_options`, and may also
+         provide additional options understood only in an ORM context.
+
+         .. seealso::
+
+            :ref:`orm_queryguide_execution_options` - ORM-specific execution
+            options
+
+        :param bind_arguments: dictionary of additional arguments to determine
+         the bind.  May include "mapper", "bind", or other custom arguments.
+         Contents of this dictionary are passed to the
+         :meth:`.Session.get_bind` method.
+
+        :return: a :class:`_engine.Result` object.
+
+
+
+        """  # noqa: E501
+
+        return self._proxied.execute(
+            statement,
+            params=params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            _parent_execute_state=_parent_execute_state,
+            _add_event=_add_event,
+        )
+
+    def expire(
+        self, instance: object, attribute_names: Optional[Iterable[str]] = None
+    ) -> None:
+        r"""Expire the attributes on an instance.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Marks the attributes of an instance as out of date. When an expired
+        attribute is next accessed, a query will be issued to the
+        :class:`.Session` object's current transactional context in order to
+        load all expired attributes for the given instance.   Note that
+        a highly isolated transaction will return the same values as were
+        previously read in that same transaction, regardless of changes
+        in database state outside of that transaction.
+
+        To expire all objects in the :class:`.Session` simultaneously,
+        use :meth:`Session.expire_all`.
+
+        The :class:`.Session` object's default behavior is to
+        expire all state whenever the :meth:`Session.rollback`
+        or :meth:`Session.commit` methods are called, so that new
+        state can be loaded for the new transaction.   For this reason,
+        calling :meth:`Session.expire` only makes sense for the specific
+        case that a non-ORM SQL statement was emitted in the current
+        transaction.
+
+        :param instance: The instance to be refreshed.
+        :param attribute_names: optional list of string attribute names
+          indicating a subset of attributes to be expired.
+
+        .. seealso::
+
+            :ref:`session_expire` - introductory material
+
+            :meth:`.Session.expire`
+
+            :meth:`.Session.refresh`
+
+            :meth:`_orm.Query.populate_existing`
+
+
+        """  # noqa: E501
+
+        return self._proxied.expire(instance, attribute_names=attribute_names)
+
+    def expire_all(self) -> None:
+        r"""Expires all persistent instances within this Session.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        When any attributes on a persistent instance is next accessed,
+        a query will be issued using the
+        :class:`.Session` object's current transactional context in order to
+        load all expired attributes for the given instance.   Note that
+        a highly isolated transaction will return the same values as were
+        previously read in that same transaction, regardless of changes
+        in database state outside of that transaction.
+
+        To expire individual objects and individual attributes
+        on those objects, use :meth:`Session.expire`.
+
+        The :class:`.Session` object's default behavior is to
+        expire all state whenever the :meth:`Session.rollback`
+        or :meth:`Session.commit` methods are called, so that new
+        state can be loaded for the new transaction.   For this reason,
+        calling :meth:`Session.expire_all` is not usually needed,
+        assuming the transaction is isolated.
+
+        .. seealso::
+
+            :ref:`session_expire` - introductory material
+
+            :meth:`.Session.expire`
+
+            :meth:`.Session.refresh`
+
+            :meth:`_orm.Query.populate_existing`
+
+
+        """  # noqa: E501
+
+        return self._proxied.expire_all()
+
+    def expunge(self, instance: object) -> None:
+        r"""Remove the `instance` from this ``Session``.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This will free all internal references to the instance.  Cascading
+        will be applied according to the *expunge* cascade rule.
+
+
+        """  # noqa: E501
+
+        return self._proxied.expunge(instance)
+
+    def expunge_all(self) -> None:
+        r"""Remove all object instances from this ``Session``.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This is equivalent to calling ``expunge(obj)`` on all objects in this
+        ``Session``.
+
+
+        """  # noqa: E501
+
+        return self._proxied.expunge_all()
+
+    def flush(self, objects: Optional[Sequence[Any]] = None) -> None:
+        r"""Flush all the object changes to the database.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Writes out all pending object creations, deletions and modifications
+        to the database as INSERTs, DELETEs, UPDATEs, etc.  Operations are
+        automatically ordered by the Session's unit of work dependency
+        solver.
+
+        Database operations will be issued in the current transactional
+        context and do not affect the state of the transaction, unless an
+        error occurs, in which case the entire transaction is rolled back.
+        You may flush() as often as you like within a transaction to move
+        changes from Python to the database's transaction buffer.
+
+        :param objects: Optional; restricts the flush operation to operate
+          only on elements that are in the given collection.
+
+          This feature is for an extremely narrow set of use cases where
+          particular objects may need to be operated upon before the
+          full flush() occurs.  It is not intended for general use.
+
+
+        """  # noqa: E501
+
+        return self._proxied.flush(objects=objects)
+
+    def get(
+        self,
+        entity: _EntityBindKey[_O],
+        ident: _PKIdentityArgument,
+        *,
+        options: Optional[Sequence[ORMOption]] = None,
+        populate_existing: bool = False,
+        with_for_update: ForUpdateParameter = None,
+        identity_token: Optional[Any] = None,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> Optional[_O]:
+        r"""Return an instance based on the given primary key identifier,
+        or ``None`` if not found.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        E.g.::
+
+            my_user = session.get(User, 5)
+
+            some_object = session.get(VersionedFoo, (5, 10))
+
+            some_object = session.get(VersionedFoo, {"id": 5, "version_id": 10})
+
+        .. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved
+           from the now legacy :meth:`_orm.Query.get` method.
+
+        :meth:`_orm.Session.get` is special in that it provides direct
+        access to the identity map of the :class:`.Session`.
+        If the given primary key identifier is present
+        in the local identity map, the object is returned
+        directly from this collection and no SQL is emitted,
+        unless the object has been marked fully expired.
+        If not present,
+        a SELECT is performed in order to locate the object.
+
+        :meth:`_orm.Session.get` also will perform a check if
+        the object is present in the identity map and
+        marked as expired - a SELECT
+        is emitted to refresh the object as well as to
+        ensure that the row is still present.
+        If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
+        :param entity: a mapped class or :class:`.Mapper` indicating the
+         type of entity to be loaded.
+
+        :param ident: A scalar, tuple, or dictionary representing the
+         primary key.  For a composite (e.g. multiple column) primary key,
+         a tuple or dictionary should be passed.
+
+         For a single-column primary key, the scalar calling form is typically
+         the most expedient.  If the primary key of a row is the value "5",
+         the call looks like::
+
+            my_object = session.get(SomeClass, 5)
+
+         The tuple form contains primary key values typically in
+         the order in which they correspond to the mapped
+         :class:`_schema.Table`
+         object's primary key columns, or if the
+         :paramref:`_orm.Mapper.primary_key` configuration parameter were
+         used, in
+         the order used for that parameter. For example, if the primary key
+         of a row is represented by the integer
+         digits "5, 10" the call would look like::
+
+             my_object = session.get(SomeClass, (5, 10))
+
+         The dictionary form should include as keys the mapped attribute names
+         corresponding to each element of the primary key.  If the mapped class
+         has the attributes ``id``, ``version_id`` as the attributes which
+         store the object's primary key value, the call would look like::
+
+            my_object = session.get(SomeClass, {"id": 5, "version_id": 10})
+
+        :param options: optional sequence of loader options which will be
+         applied to the query, if one is emitted.
+
+        :param populate_existing: causes the method to unconditionally emit
+         a SQL query and refresh the object with the newly loaded data,
+         regardless of whether or not the object is already present.
+
+        :param with_for_update: optional boolean ``True`` indicating FOR UPDATE
+          should be used, or may be a dictionary containing flags to
+          indicate a more specific set of FOR UPDATE flags for the SELECT;
+          flags should match the parameters of
+          :meth:`_query.Query.with_for_update`.
+          Supersedes the :paramref:`.Session.refresh.lockmode` parameter.
+
+        :param execution_options: optional dictionary of execution options,
+         which will be associated with the query execution if one is emitted.
+         This dictionary can provide a subset of the options that are
+         accepted by :meth:`_engine.Connection.execution_options`, and may
+         also provide additional options understood only in an ORM context.
+
+         .. versionadded:: 1.4.29
+
+         .. seealso::
+
+            :ref:`orm_queryguide_execution_options` - ORM-specific execution
+            options
+
+        :param bind_arguments: dictionary of additional arguments to determine
+         the bind.  May include "mapper", "bind", or other custom arguments.
+         Contents of this dictionary are passed to the
+         :meth:`.Session.get_bind` method.
+
+         .. versionadded: 2.0.0rc1
+
+        :return: The object instance, or ``None``.
+
+
+        """  # noqa: E501
+
+        return self._proxied.get(
+            entity,
+            ident,
+            options=options,
+            populate_existing=populate_existing,
+            with_for_update=with_for_update,
+            identity_token=identity_token,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+        )
+
+    def get_one(
+        self,
+        entity: _EntityBindKey[_O],
+        ident: _PKIdentityArgument,
+        *,
+        options: Optional[Sequence[ORMOption]] = None,
+        populate_existing: bool = False,
+        with_for_update: ForUpdateParameter = None,
+        identity_token: Optional[Any] = None,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> _O:
+        r"""Return exactly one instance based on the given primary key
+        identifier, or raise an exception if not found.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query
+        selects no rows.
+
+        For a detailed documentation of the arguments see the
+        method :meth:`.Session.get`.
+
+        .. versionadded:: 2.0.22
+
+        :return: The object instance.
+
+        .. seealso::
+
+            :meth:`.Session.get` - equivalent method that instead
+              returns ``None`` if no row was found with the provided primary
+              key
+
+
+        """  # noqa: E501
+
+        return self._proxied.get_one(
+            entity,
+            ident,
+            options=options,
+            populate_existing=populate_existing,
+            with_for_update=with_for_update,
+            identity_token=identity_token,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+        )
+
+    def get_bind(
+        self,
+        mapper: Optional[_EntityBindKey[_O]] = None,
+        *,
+        clause: Optional[ClauseElement] = None,
+        bind: Optional[_SessionBind] = None,
+        _sa_skip_events: Optional[bool] = None,
+        _sa_skip_for_implicit_returning: bool = False,
+        **kw: Any,
+    ) -> Union[Engine, Connection]:
+        r"""Return a "bind" to which this :class:`.Session` is bound.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The "bind" is usually an instance of :class:`_engine.Engine`,
+        except in the case where the :class:`.Session` has been
+        explicitly bound directly to a :class:`_engine.Connection`.
+
+        For a multiply-bound or unbound :class:`.Session`, the
+        ``mapper`` or ``clause`` arguments are used to determine the
+        appropriate bind to return.
+
+        Note that the "mapper" argument is usually present
+        when :meth:`.Session.get_bind` is called via an ORM
+        operation such as a :meth:`.Session.query`, each
+        individual INSERT/UPDATE/DELETE operation within a
+        :meth:`.Session.flush`, call, etc.
+
+        The order of resolution is:
+
+        1. if mapper given and :paramref:`.Session.binds` is present,
+           locate a bind based first on the mapper in use, then
+           on the mapped class in use, then on any base classes that are
+           present in the ``__mro__`` of the mapped class, from more specific
+           superclasses to more general.
+        2. if clause given and ``Session.binds`` is present,
+           locate a bind based on :class:`_schema.Table` objects
+           found in the given clause present in ``Session.binds``.
+        3. if ``Session.binds`` is present, return that.
+        4. if clause given, attempt to return a bind
+           linked to the :class:`_schema.MetaData` ultimately
+           associated with the clause.
+        5. if mapper given, attempt to return a bind
+           linked to the :class:`_schema.MetaData` ultimately
+           associated with the :class:`_schema.Table` or other
+           selectable to which the mapper is mapped.
+        6. No bind can be found, :exc:`~sqlalchemy.exc.UnboundExecutionError`
+           is raised.
+
+        Note that the :meth:`.Session.get_bind` method can be overridden on
+        a user-defined subclass of :class:`.Session` to provide any kind
+        of bind resolution scheme.  See the example at
+        :ref:`session_custom_partitioning`.
+
+        :param mapper:
+          Optional mapped class or corresponding :class:`_orm.Mapper` instance.
+          The bind can be derived from a :class:`_orm.Mapper` first by
+          consulting the "binds" map associated with this :class:`.Session`,
+          and secondly by consulting the :class:`_schema.MetaData` associated
+          with the :class:`_schema.Table` to which the :class:`_orm.Mapper` is
+          mapped for a bind.
+
+        :param clause:
+            A :class:`_expression.ClauseElement` (i.e.
+            :func:`_expression.select`,
+            :func:`_expression.text`,
+            etc.).  If the ``mapper`` argument is not present or could not
+            produce a bind, the given expression construct will be searched
+            for a bound element, typically a :class:`_schema.Table`
+            associated with
+            bound :class:`_schema.MetaData`.
+
+        .. seealso::
+
+             :ref:`session_partitioning`
+
+             :paramref:`.Session.binds`
+
+             :meth:`.Session.bind_mapper`
+
+             :meth:`.Session.bind_table`
+
+
+        """  # noqa: E501
+
+        return self._proxied.get_bind(
+            mapper=mapper,
+            clause=clause,
+            bind=bind,
+            _sa_skip_events=_sa_skip_events,
+            _sa_skip_for_implicit_returning=_sa_skip_for_implicit_returning,
+            **kw,
+        )
+
+    def is_modified(
+        self, instance: object, include_collections: bool = True
+    ) -> bool:
+        r"""Return ``True`` if the given instance has locally
+        modified attributes.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This method retrieves the history for each instrumented
+        attribute on the instance and performs a comparison of the current
+        value to its previously flushed or committed value, if any.
+
+        It is in effect a more expensive and accurate
+        version of checking for the given instance in the
+        :attr:`.Session.dirty` collection; a full test for
+        each attribute's net "dirty" status is performed.
+
+        E.g.::
+
+            return session.is_modified(someobject)
+
+        A few caveats to this method apply:
+
+        * Instances present in the :attr:`.Session.dirty` collection may
+          report ``False`` when tested with this method.  This is because
+          the object may have received change events via attribute mutation,
+          thus placing it in :attr:`.Session.dirty`, but ultimately the state
+          is the same as that loaded from the database, resulting in no net
+          change here.
+        * Scalar attributes may not have recorded the previously set
+          value when a new value was applied, if the attribute was not loaded,
+          or was expired, at the time the new value was received - in these
+          cases, the attribute is assumed to have a change, even if there is
+          ultimately no net change against its database value. SQLAlchemy in
+          most cases does not need the "old" value when a set event occurs, so
+          it skips the expense of a SQL call if the old value isn't present,
+          based on the assumption that an UPDATE of the scalar value is
+          usually needed, and in those few cases where it isn't, is less
+          expensive on average than issuing a defensive SELECT.
+
+          The "old" value is fetched unconditionally upon set only if the
+          attribute container has the ``active_history`` flag set to ``True``.
+          This flag is set typically for primary key attributes and scalar
+          object references that are not a simple many-to-one.  To set this
+          flag for any arbitrary mapped column, use the ``active_history``
+          argument with :func:`.column_property`.
+
+        :param instance: mapped instance to be tested for pending changes.
+        :param include_collections: Indicates if multivalued collections
+         should be included in the operation.  Setting this to ``False`` is a
+         way to detect only local-column based properties (i.e. scalar columns
+         or many-to-one foreign keys) that would result in an UPDATE for this
+         instance upon flush.
+
+
+        """  # noqa: E501
+
+        return self._proxied.is_modified(
+            instance, include_collections=include_collections
+        )
+
+    def bulk_save_objects(
+        self,
+        objects: Iterable[object],
+        return_defaults: bool = False,
+        update_changed_only: bool = True,
+        preserve_order: bool = True,
+    ) -> None:
+        r"""Perform a bulk save of the given list of objects.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        .. legacy::
+
+            This method is a legacy feature as of the 2.0 series of
+            SQLAlchemy.   For modern bulk INSERT and UPDATE, see
+            the sections :ref:`orm_queryguide_bulk_insert` and
+            :ref:`orm_queryguide_bulk_update`.
+
+            For general INSERT and UPDATE of existing ORM mapped objects,
+            prefer standard :term:`unit of work` data management patterns,
+            introduced in the :ref:`unified_tutorial` at
+            :ref:`tutorial_orm_data_manipulation`.  SQLAlchemy 2.0
+            now uses :ref:`engine_insertmanyvalues` with modern dialects
+            which solves previous issues of bulk INSERT slowness.
+
+        :param objects: a sequence of mapped object instances.  The mapped
+         objects are persisted as is, and are **not** associated with the
+         :class:`.Session` afterwards.
+
+         For each object, whether the object is sent as an INSERT or an
+         UPDATE is dependent on the same rules used by the :class:`.Session`
+         in traditional operation; if the object has the
+         :attr:`.InstanceState.key`
+         attribute set, then the object is assumed to be "detached" and
+         will result in an UPDATE.  Otherwise, an INSERT is used.
+
+         In the case of an UPDATE, statements are grouped based on which
+         attributes have changed, and are thus to be the subject of each
+         SET clause.  If ``update_changed_only`` is False, then all
+         attributes present within each object are applied to the UPDATE
+         statement, which may help in allowing the statements to be grouped
+         together into a larger executemany(), and will also reduce the
+         overhead of checking history on attributes.
+
+        :param return_defaults: when True, rows that are missing values which
+         generate defaults, namely integer primary key defaults and sequences,
+         will be inserted **one at a time**, so that the primary key value
+         is available.  In particular this will allow joined-inheritance
+         and other multi-table mappings to insert correctly without the need
+         to provide primary key values ahead of time; however,
+         :paramref:`.Session.bulk_save_objects.return_defaults` **greatly
+         reduces the performance gains** of the method overall.  It is strongly
+         advised to please use the standard :meth:`_orm.Session.add_all`
+         approach.
+
+        :param update_changed_only: when True, UPDATE statements are rendered
+         based on those attributes in each state that have logged changes.
+         When False, all attributes present are rendered into the SET clause
+         with the exception of primary key attributes.
+
+        :param preserve_order: when True, the order of inserts and updates
+         matches exactly the order in which the objects are given.   When
+         False, common types of objects are grouped into inserts
+         and updates, to allow for more batching opportunities.
+
+        .. seealso::
+
+            :doc:`queryguide/dml`
+
+            :meth:`.Session.bulk_insert_mappings`
+
+            :meth:`.Session.bulk_update_mappings`
+
+
+        """  # noqa: E501
+
+        return self._proxied.bulk_save_objects(
+            objects,
+            return_defaults=return_defaults,
+            update_changed_only=update_changed_only,
+            preserve_order=preserve_order,
+        )
+
+    def bulk_insert_mappings(
+        self,
+        mapper: Mapper[Any],
+        mappings: Iterable[Dict[str, Any]],
+        return_defaults: bool = False,
+        render_nulls: bool = False,
+    ) -> None:
+        r"""Perform a bulk insert of the given list of mapping dictionaries.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        .. legacy::
+
+            This method is a legacy feature as of the 2.0 series of
+            SQLAlchemy.   For modern bulk INSERT and UPDATE, see
+            the sections :ref:`orm_queryguide_bulk_insert` and
+            :ref:`orm_queryguide_bulk_update`.  The 2.0 API shares
+            implementation details with this method and adds new features
+            as well.
+
+        :param mapper: a mapped class, or the actual :class:`_orm.Mapper`
+         object,
+         representing the single kind of object represented within the mapping
+         list.
+
+        :param mappings: a sequence of dictionaries, each one containing the
+         state of the mapped row to be inserted, in terms of the attribute
+         names on the mapped class.   If the mapping refers to multiple tables,
+         such as a joined-inheritance mapping, each dictionary must contain all
+         keys to be populated into all tables.
+
+        :param return_defaults: when True, the INSERT process will be altered
+         to ensure that newly generated primary key values will be fetched.
+         The rationale for this parameter is typically to enable
+         :ref:`Joined Table Inheritance <joined_inheritance>` mappings to
+         be bulk inserted.
+
+         .. note:: for backends that don't support RETURNING, the
+            :paramref:`_orm.Session.bulk_insert_mappings.return_defaults`
+            parameter can significantly decrease performance as INSERT
+            statements can no longer be batched.   See
+            :ref:`engine_insertmanyvalues`
+            for background on which backends are affected.
+
+        :param render_nulls: When True, a value of ``None`` will result
+         in a NULL value being included in the INSERT statement, rather
+         than the column being omitted from the INSERT.   This allows all
+         the rows being INSERTed to have the identical set of columns which
+         allows the full set of rows to be batched to the DBAPI.  Normally,
+         each column-set that contains a different combination of NULL values
+         than the previous row must omit a different series of columns from
+         the rendered INSERT statement, which means it must be emitted as a
+         separate statement.   By passing this flag, the full set of rows
+         are guaranteed to be batchable into one batch; the cost however is
+         that server-side defaults which are invoked by an omitted column will
+         be skipped, so care must be taken to ensure that these are not
+         necessary.
+
+         .. warning::
+
+            When this flag is set, **server side default SQL values will
+            not be invoked** for those columns that are inserted as NULL;
+            the NULL value will be sent explicitly.   Care must be taken
+            to ensure that no server-side default functions need to be
+            invoked for the operation as a whole.
+
+        .. seealso::
+
+            :doc:`queryguide/dml`
+
+            :meth:`.Session.bulk_save_objects`
+
+            :meth:`.Session.bulk_update_mappings`
+
+
+        """  # noqa: E501
+
+        return self._proxied.bulk_insert_mappings(
+            mapper,
+            mappings,
+            return_defaults=return_defaults,
+            render_nulls=render_nulls,
+        )
+
+    def bulk_update_mappings(
+        self, mapper: Mapper[Any], mappings: Iterable[Dict[str, Any]]
+    ) -> None:
+        r"""Perform a bulk update of the given list of mapping dictionaries.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        .. legacy::
+
+            This method is a legacy feature as of the 2.0 series of
+            SQLAlchemy.   For modern bulk INSERT and UPDATE, see
+            the sections :ref:`orm_queryguide_bulk_insert` and
+            :ref:`orm_queryguide_bulk_update`.  The 2.0 API shares
+            implementation details with this method and adds new features
+            as well.
+
+        :param mapper: a mapped class, or the actual :class:`_orm.Mapper`
+         object,
+         representing the single kind of object represented within the mapping
+         list.
+
+        :param mappings: a sequence of dictionaries, each one containing the
+         state of the mapped row to be updated, in terms of the attribute names
+         on the mapped class.   If the mapping refers to multiple tables, such
+         as a joined-inheritance mapping, each dictionary may contain keys
+         corresponding to all tables.   All those keys which are present and
+         are not part of the primary key are applied to the SET clause of the
+         UPDATE statement; the primary key values, which are required, are
+         applied to the WHERE clause.
+
+
+        .. seealso::
+
+            :doc:`queryguide/dml`
+
+            :meth:`.Session.bulk_insert_mappings`
+
+            :meth:`.Session.bulk_save_objects`
+
+
+        """  # noqa: E501
+
+        return self._proxied.bulk_update_mappings(mapper, mappings)
+
+    def merge(
+        self,
+        instance: _O,
+        *,
+        load: bool = True,
+        options: Optional[Sequence[ORMOption]] = None,
+    ) -> _O:
+        r"""Copy the state of a given instance into a corresponding instance
+        within this :class:`.Session`.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        :meth:`.Session.merge` examines the primary key attributes of the
+        source instance, and attempts to reconcile it with an instance of the
+        same primary key in the session.   If not found locally, it attempts
+        to load the object from the database based on primary key, and if
+        none can be located, creates a new instance.  The state of each
+        attribute on the source instance is then copied to the target
+        instance.  The resulting target instance is then returned by the
+        method; the original source instance is left unmodified, and
+        un-associated with the :class:`.Session` if not already.
+
+        This operation cascades to associated instances if the association is
+        mapped with ``cascade="merge"``.
+
+        See :ref:`unitofwork_merging` for a detailed discussion of merging.
+
+        :param instance: Instance to be merged.
+        :param load: Boolean, when False, :meth:`.merge` switches into
+         a "high performance" mode which causes it to forego emitting history
+         events as well as all database access.  This flag is used for
+         cases such as transferring graphs of objects into a :class:`.Session`
+         from a second level cache, or to transfer just-loaded objects
+         into the :class:`.Session` owned by a worker thread or process
+         without re-querying the database.
+
+         The ``load=False`` use case adds the caveat that the given
+         object has to be in a "clean" state, that is, has no pending changes
+         to be flushed - even if the incoming object is detached from any
+         :class:`.Session`.   This is so that when
+         the merge operation populates local attributes and
+         cascades to related objects and
+         collections, the values can be "stamped" onto the
+         target object as is, without generating any history or attribute
+         events, and without the need to reconcile the incoming data with
+         any existing related objects or collections that might not
+         be loaded.  The resulting objects from ``load=False`` are always
+         produced as "clean", so it is only appropriate that the given objects
+         should be "clean" as well, else this suggests a mis-use of the
+         method.
+        :param options: optional sequence of loader options which will be
+         applied to the :meth:`_orm.Session.get` method when the merge
+         operation loads the existing version of the object from the database.
+
+         .. versionadded:: 1.4.24
+
+
+        .. seealso::
+
+            :func:`.make_transient_to_detached` - provides for an alternative
+            means of "merging" a single object into the :class:`.Session`
+
+
+        """  # noqa: E501
+
+        return self._proxied.merge(instance, load=load, options=options)
+
+    @overload
+    def query(self, _entity: _EntityType[_O]) -> Query[_O]: ...
+
+    @overload
+    def query(
+        self, _colexpr: TypedColumnsClauseRole[_T]
+    ) -> RowReturningQuery[Tuple[_T]]: ...
+
+    # START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8
+
+    # code within this block is **programmatically,
+    # statically generated** by tools/generate_tuple_map_overloads.py
+
+    @overload
+    def query(
+        self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
+    ) -> RowReturningQuery[Tuple[_T0, _T1]]: ...
+
+    @overload
+    def query(
+        self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+        __ent7: _TCCA[_T7],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ...
+
+    # END OVERLOADED FUNCTIONS self.query
+
+    @overload
+    def query(
+        self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
+    ) -> Query[Any]: ...
+
+    def query(
+        self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
+    ) -> Query[Any]:
+        r"""Return a new :class:`_query.Query` object corresponding to this
+        :class:`_orm.Session`.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Note that the :class:`_query.Query` object is legacy as of
+        SQLAlchemy 2.0; the :func:`_sql.select` construct is now used
+        to construct ORM queries.
+
+        .. seealso::
+
+            :ref:`unified_tutorial`
+
+            :ref:`queryguide_toplevel`
+
+            :ref:`query_api_toplevel` - legacy API doc
+
+
+        """  # noqa: E501
+
+        return self._proxied.query(*entities, **kwargs)
+
+    def refresh(
+        self,
+        instance: object,
+        attribute_names: Optional[Iterable[str]] = None,
+        with_for_update: ForUpdateParameter = None,
+    ) -> None:
+        r"""Expire and refresh attributes on the given instance.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The selected attributes will first be expired as they would when using
+        :meth:`_orm.Session.expire`; then a SELECT statement will be issued to
+        the database to refresh column-oriented attributes with the current
+        value available in the current transaction.
+
+        :func:`_orm.relationship` oriented attributes will also be immediately
+        loaded if they were already eagerly loaded on the object, using the
+        same eager loading strategy that they were loaded with originally.
+
+        .. versionadded:: 1.4 - the :meth:`_orm.Session.refresh` method
+           can also refresh eagerly loaded attributes.
+
+        :func:`_orm.relationship` oriented attributes that would normally
+        load using the ``select`` (or "lazy") loader strategy will also
+        load **if they are named explicitly in the attribute_names
+        collection**, emitting a SELECT statement for the attribute using the
+        ``immediate`` loader strategy.  If lazy-loaded relationships are not
+        named in :paramref:`_orm.Session.refresh.attribute_names`, then
+        they remain as "lazy loaded" attributes and are not implicitly
+        refreshed.
+
+        .. versionchanged:: 2.0.4  The :meth:`_orm.Session.refresh` method
+           will now refresh lazy-loaded :func:`_orm.relationship` oriented
+           attributes for those which are named explicitly in the
+           :paramref:`_orm.Session.refresh.attribute_names` collection.
+
+        .. tip::
+
+            While the :meth:`_orm.Session.refresh` method is capable of
+            refreshing both column and relationship oriented attributes, its
+            primary focus is on refreshing of local column-oriented attributes
+            on a single instance. For more open ended "refresh" functionality,
+            including the ability to refresh the attributes on many objects at
+            once while having explicit control over relationship loader
+            strategies, use the
+            :ref:`populate existing <orm_queryguide_populate_existing>` feature
+            instead.
+
+        Note that a highly isolated transaction will return the same values as
+        were previously read in that same transaction, regardless of changes
+        in database state outside of that transaction.   Refreshing
+        attributes usually only makes sense at the start of a transaction
+        where database rows have not yet been accessed.
+
+        :param attribute_names: optional.  An iterable collection of
+          string attribute names indicating a subset of attributes to
+          be refreshed.
+
+        :param with_for_update: optional boolean ``True`` indicating FOR UPDATE
+          should be used, or may be a dictionary containing flags to
+          indicate a more specific set of FOR UPDATE flags for the SELECT;
+          flags should match the parameters of
+          :meth:`_query.Query.with_for_update`.
+          Supersedes the :paramref:`.Session.refresh.lockmode` parameter.
+
+        .. seealso::
+
+            :ref:`session_expire` - introductory material
+
+            :meth:`.Session.expire`
+
+            :meth:`.Session.expire_all`
+
+            :ref:`orm_queryguide_populate_existing` - allows any ORM query
+            to refresh objects as they would be loaded normally.
+
+
+        """  # noqa: E501
+
+        return self._proxied.refresh(
+            instance,
+            attribute_names=attribute_names,
+            with_for_update=with_for_update,
+        )
+
+    def rollback(self) -> None:
+        r"""Rollback the current transaction in progress.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        If no transaction is in progress, this method is a pass-through.
+
+        The method always rolls back
+        the topmost database transaction, discarding any nested
+        transactions that may be in progress.
+
+        .. seealso::
+
+            :ref:`session_rollback`
+
+            :ref:`unitofwork_transaction`
+
+
+        """  # noqa: E501
+
+        return self._proxied.rollback()
+
+    @overload
+    def scalar(
+        self,
+        statement: TypedReturnsRows[Tuple[_T]],
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> Optional[_T]: ...
+
+    @overload
+    def scalar(
+        self,
+        statement: Executable,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> Any: ...
+
+    def scalar(
+        self,
+        statement: Executable,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> Any:
+        r"""Execute a statement and return a scalar result.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Usage and parameters are the same as that of
+        :meth:`_orm.Session.execute`; the return result is a scalar Python
+        value.
+
+
+        """  # noqa: E501
+
+        return self._proxied.scalar(
+            statement,
+            params=params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            **kw,
+        )
+
+    @overload
+    def scalars(
+        self,
+        statement: TypedReturnsRows[Tuple[_T]],
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> ScalarResult[_T]: ...
+
+    @overload
+    def scalars(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> ScalarResult[Any]: ...
+
+    def scalars(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> ScalarResult[Any]:
+        r"""Execute a statement and return the results as scalars.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        Usage and parameters are the same as that of
+        :meth:`_orm.Session.execute`; the return result is a
+        :class:`_result.ScalarResult` filtering object which
+        will return single elements rather than :class:`_row.Row` objects.
+
+        :return:  a :class:`_result.ScalarResult` object
+
+        .. versionadded:: 1.4.24 Added :meth:`_orm.Session.scalars`
+
+        .. versionadded:: 1.4.26 Added :meth:`_orm.scoped_session.scalars`
+
+        .. seealso::
+
+            :ref:`orm_queryguide_select_orm_entities` - contrasts the behavior
+            of :meth:`_orm.Session.execute` to :meth:`_orm.Session.scalars`
+
+
+        """  # noqa: E501
+
+        return self._proxied.scalars(
+            statement,
+            params=params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            **kw,
+        )
+
+    @property
+    def bind(self) -> Optional[Union[Engine, Connection]]:
+        r"""Proxy for the :attr:`_orm.Session.bind` attribute
+        on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        """  # noqa: E501
+
+        return self._proxied.bind
+
+    @bind.setter
+    def bind(self, attr: Optional[Union[Engine, Connection]]) -> None:
+        self._proxied.bind = attr
+
+    @property
+    def dirty(self) -> Any:
+        r"""The set of all persistent instances considered dirty.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class
+            on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        E.g.::
+
+            some_mapped_object in session.dirty
+
+        Instances are considered dirty when they were modified but not
+        deleted.
+
+        Note that this 'dirty' calculation is 'optimistic'; most
+        attribute-setting or collection modification operations will
+        mark an instance as 'dirty' and place it in this set, even if
+        there is no net change to the attribute's value.  At flush
+        time, the value of each attribute is compared to its
+        previously saved value, and if there's no net change, no SQL
+        operation will occur (this is a more expensive operation so
+        it's only done at flush time).
+
+        To check if an instance has actionable net changes to its
+        attributes, use the :meth:`.Session.is_modified` method.
+
+
+        """  # noqa: E501
+
+        return self._proxied.dirty
+
+    @property
+    def deleted(self) -> Any:
+        r"""The set of all instances marked as 'deleted' within this ``Session``
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class
+            on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        """  # noqa: E501
+
+        return self._proxied.deleted
+
+    @property
+    def new(self) -> Any:
+        r"""The set of all instances marked as 'new' within this ``Session``.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class
+            on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        """  # noqa: E501
+
+        return self._proxied.new
+
+    @property
+    def identity_map(self) -> IdentityMap:
+        r"""Proxy for the :attr:`_orm.Session.identity_map` attribute
+        on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        """  # noqa: E501
+
+        return self._proxied.identity_map
+
+    @identity_map.setter
+    def identity_map(self, attr: IdentityMap) -> None:
+        self._proxied.identity_map = attr
+
+    @property
+    def is_active(self) -> Any:
+        r"""True if this :class:`.Session` not in "partial rollback" state.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class
+            on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        .. versionchanged:: 1.4 The :class:`_orm.Session` no longer begins
+           a new transaction immediately, so this attribute will be False
+           when the :class:`_orm.Session` is first instantiated.
+
+        "partial rollback" state typically indicates that the flush process
+        of the :class:`_orm.Session` has failed, and that the
+        :meth:`_orm.Session.rollback` method must be emitted in order to
+        fully roll back the transaction.
+
+        If this :class:`_orm.Session` is not in a transaction at all, the
+        :class:`_orm.Session` will autobegin when it is first used, so in this
+        case :attr:`_orm.Session.is_active` will return True.
+
+        Otherwise, if this :class:`_orm.Session` is within a transaction,
+        and that transaction has not been rolled back internally, the
+        :attr:`_orm.Session.is_active` will also return True.
+
+        .. seealso::
+
+            :ref:`faq_session_rollback`
+
+            :meth:`_orm.Session.in_transaction`
+
+
+        """  # noqa: E501
+
+        return self._proxied.is_active
+
+    @property
+    def autoflush(self) -> bool:
+        r"""Proxy for the :attr:`_orm.Session.autoflush` attribute
+        on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        """  # noqa: E501
+
+        return self._proxied.autoflush
+
+    @autoflush.setter
+    def autoflush(self, attr: bool) -> None:
+        self._proxied.autoflush = attr
+
+    @property
+    def no_autoflush(self) -> Any:
+        r"""Return a context manager that disables autoflush.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class
+            on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        e.g.::
+
+            with session.no_autoflush:
+
+                some_object = SomeClass()
+                session.add(some_object)
+                # won't autoflush
+                some_object.related_thing = session.query(SomeRelated).first()
+
+        Operations that proceed within the ``with:`` block
+        will not be subject to flushes occurring upon query
+        access.  This is useful when initializing a series
+        of objects which involve existing database queries,
+        where the uncompleted object should not yet be flushed.
+
+
+        """  # noqa: E501
+
+        return self._proxied.no_autoflush
+
+    @property
+    def info(self) -> Any:
+        r"""A user-modifiable dictionary.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class
+            on behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        The initial value of this dictionary can be populated using the
+        ``info`` argument to the :class:`.Session` constructor or
+        :class:`.sessionmaker` constructor or factory methods.  The dictionary
+        here is always local to this :class:`.Session` and can be modified
+        independently of all other :class:`.Session` objects.
+
+
+        """  # noqa: E501
+
+        return self._proxied.info
+
+    @classmethod
+    def close_all(cls) -> None:
+        r"""Close *all* sessions in memory.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        .. deprecated:: 1.3 The :meth:`.Session.close_all` method is deprecated and will be removed in a future release.  Please refer to :func:`.session.close_all_sessions`.
+
+        """  # noqa: E501
+
+        return Session.close_all()
+
+    @classmethod
+    def object_session(cls, instance: object) -> Optional[Session]:
+        r"""Return the :class:`.Session` to which an object belongs.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This is an alias of :func:`.object_session`.
+
+
+        """  # noqa: E501
+
+        return Session.object_session(instance)
+
+    @classmethod
+    def identity_key(
+        cls,
+        class_: Optional[Type[Any]] = None,
+        ident: Union[Any, Tuple[Any, ...]] = None,
+        *,
+        instance: Optional[Any] = None,
+        row: Optional[Union[Row[Any], RowMapping]] = None,
+        identity_token: Optional[Any] = None,
+    ) -> _IdentityKeyType[Any]:
+        r"""Return an identity key.
+
+        .. container:: class_bases
+
+            Proxied for the :class:`_orm.Session` class on
+            behalf of the :class:`_orm.scoping.scoped_session` class.
+
+        This is an alias of :func:`.util.identity_key`.
+
+
+        """  # noqa: E501
+
+        return Session.identity_key(
+            class_=class_,
+            ident=ident,
+            instance=instance,
+            row=row,
+            identity_token=identity_token,
+        )
+
+    # END PROXY METHODS scoped_session
+
+
+ScopedSession = scoped_session
+"""Old name for backwards compatibility."""
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py
new file mode 100644
index 00000000..6cd7cd63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/session.py
@@ -0,0 +1,5302 @@
+# orm/session.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Provides the Session class and related utilities."""
+
+from __future__ import annotations
+
+import contextlib
+from enum import Enum
+import itertools
+import sys
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes
+from . import bulk_persistence
+from . import context
+from . import descriptor_props
+from . import exc
+from . import identity
+from . import loading
+from . import query
+from . import state as statelib
+from ._typing import _O
+from ._typing import insp_is_mapper
+from ._typing import is_composite_class
+from ._typing import is_orm_option
+from ._typing import is_user_defined_option
+from .base import _class_to_mapper
+from .base import _none_set
+from .base import _state_mapper
+from .base import instance_str
+from .base import LoaderCallableStatus
+from .base import object_mapper
+from .base import object_state
+from .base import PassiveFlag
+from .base import state_str
+from .context import FromStatement
+from .context import ORMCompileState
+from .identity import IdentityMap
+from .query import Query
+from .state import InstanceState
+from .state_changes import _StateChange
+from .state_changes import _StateChangeState
+from .state_changes import _StateChangeStates
+from .unitofwork import UOWTransaction
+from .. import engine
+from .. import exc as sa_exc
+from .. import sql
+from .. import util
+from ..engine import Connection
+from ..engine import Engine
+from ..engine.util import TransactionalContext
+from ..event import dispatcher
+from ..event import EventTarget
+from ..inspection import inspect
+from ..inspection import Inspectable
+from ..sql import coercions
+from ..sql import dml
+from ..sql import roles
+from ..sql import Select
+from ..sql import TableClause
+from ..sql import visitors
+from ..sql.base import _NoArg
+from ..sql.base import CompileState
+from ..sql.schema import Table
+from ..sql.selectable import ForUpdateArg
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+from ..util import IdentitySet
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _IdentityKeyType
+    from ._typing import _InstanceDict
+    from ._typing import OrmExecuteOptionsParameter
+    from .interfaces import ORMOption
+    from .interfaces import UserDefinedOption
+    from .mapper import Mapper
+    from .path_registry import PathRegistry
+    from .query import RowReturningQuery
+    from ..engine import CursorResult
+    from ..engine import Result
+    from ..engine import Row
+    from ..engine import RowMapping
+    from ..engine.base import Transaction
+    from ..engine.base import TwoPhaseTransaction
+    from ..engine.interfaces import _CoreAnyExecuteParams
+    from ..engine.interfaces import _CoreSingleExecuteParams
+    from ..engine.interfaces import _ExecuteOptions
+    from ..engine.interfaces import CoreExecuteOptionsParameter
+    from ..engine.result import ScalarResult
+    from ..event import _InstanceLevelDispatch
+    from ..sql._typing import _ColumnsClauseArgument
+    from ..sql._typing import _InfoType
+    from ..sql._typing import _T0
+    from ..sql._typing import _T1
+    from ..sql._typing import _T2
+    from ..sql._typing import _T3
+    from ..sql._typing import _T4
+    from ..sql._typing import _T5
+    from ..sql._typing import _T6
+    from ..sql._typing import _T7
+    from ..sql._typing import _TypedColumnClauseArgument as _TCCA
+    from ..sql.base import Executable
+    from ..sql.base import ExecutableOption
+    from ..sql.dml import UpdateBase
+    from ..sql.elements import ClauseElement
+    from ..sql.roles import TypedColumnsClauseRole
+    from ..sql.selectable import ForUpdateParameter
+    from ..sql.selectable import TypedReturnsRows
+
+_T = TypeVar("_T", bound=Any)
+
+__all__ = [
+    "Session",
+    "SessionTransaction",
+    "sessionmaker",
+    "ORMExecuteState",
+    "close_all_sessions",
+    "make_transient",
+    "make_transient_to_detached",
+    "object_session",
+]
+
+_sessions: weakref.WeakValueDictionary[int, Session] = (
+    weakref.WeakValueDictionary()
+)
+"""Weak-referencing dictionary of :class:`.Session` objects.
+"""
+
+statelib._sessions = _sessions
+
+_PKIdentityArgument = Union[Any, Tuple[Any, ...]]
+
+_BindArguments = Dict[str, Any]
+
+_EntityBindKey = Union[Type[_O], "Mapper[_O]"]
+_SessionBindKey = Union[Type[Any], "Mapper[Any]", "TableClause", str]
+_SessionBind = Union["Engine", "Connection"]
+
+JoinTransactionMode = Literal[
+    "conditional_savepoint",
+    "rollback_only",
+    "control_fully",
+    "create_savepoint",
+]
+
+
+class _ConnectionCallableProto(Protocol):
+    """a callable that returns a :class:`.Connection` given an instance.
+
+    This callable, when present on a :class:`.Session`, is called only from the
+    ORM's persistence mechanism (i.e. the unit of work flush process) to allow
+    for connection-per-instance schemes (i.e. horizontal sharding) to be used
+    as persistence time.
+
+    This callable is not present on a plain :class:`.Session`, however
+    is established when using the horizontal sharding extension.
+
+    """
+
+    def __call__(
+        self,
+        mapper: Optional[Mapper[Any]] = None,
+        instance: Optional[object] = None,
+        **kw: Any,
+    ) -> Connection: ...
+
+
+def _state_session(state: InstanceState[Any]) -> Optional[Session]:
+    """Given an :class:`.InstanceState`, return the :class:`.Session`
+    associated, if any.
+    """
+    return state.session
+
+
+class _SessionClassMethods:
+    """Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
+
+    @classmethod
+    @util.deprecated(
+        "1.3",
+        "The :meth:`.Session.close_all` method is deprecated and will be "
+        "removed in a future release.  Please refer to "
+        ":func:`.session.close_all_sessions`.",
+    )
+    def close_all(cls) -> None:
+        """Close *all* sessions in memory."""
+
+        close_all_sessions()
+
+    @classmethod
+    @util.preload_module("sqlalchemy.orm.util")
+    def identity_key(
+        cls,
+        class_: Optional[Type[Any]] = None,
+        ident: Union[Any, Tuple[Any, ...]] = None,
+        *,
+        instance: Optional[Any] = None,
+        row: Optional[Union[Row[Any], RowMapping]] = None,
+        identity_token: Optional[Any] = None,
+    ) -> _IdentityKeyType[Any]:
+        """Return an identity key.
+
+        This is an alias of :func:`.util.identity_key`.
+
+        """
+        return util.preloaded.orm_util.identity_key(
+            class_,
+            ident,
+            instance=instance,
+            row=row,
+            identity_token=identity_token,
+        )
+
+    @classmethod
+    def object_session(cls, instance: object) -> Optional[Session]:
+        """Return the :class:`.Session` to which an object belongs.
+
+        This is an alias of :func:`.object_session`.
+
+        """
+
+        return object_session(instance)
+
+
+class SessionTransactionState(_StateChangeState):
+    ACTIVE = 1
+    PREPARED = 2
+    COMMITTED = 3
+    DEACTIVE = 4
+    CLOSED = 5
+    PROVISIONING_CONNECTION = 6
+
+
+# backwards compatibility
+ACTIVE, PREPARED, COMMITTED, DEACTIVE, CLOSED, PROVISIONING_CONNECTION = tuple(
+    SessionTransactionState
+)
+
+
+class ORMExecuteState(util.MemoizedSlots):
+    """Represents a call to the :meth:`_orm.Session.execute` method, as passed
+    to the :meth:`.SessionEvents.do_orm_execute` event hook.
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`session_execute_events` - top level documentation on how
+        to use :meth:`_orm.SessionEvents.do_orm_execute`
+
+    """
+
+    __slots__ = (
+        "session",
+        "statement",
+        "parameters",
+        "execution_options",
+        "local_execution_options",
+        "bind_arguments",
+        "identity_token",
+        "_compile_state_cls",
+        "_starting_event_idx",
+        "_events_todo",
+        "_update_execution_options",
+    )
+
+    session: Session
+    """The :class:`_orm.Session` in use."""
+
+    statement: Executable
+    """The SQL statement being invoked.
+
+    For an ORM selection as would
+    be retrieved from :class:`_orm.Query`, this is an instance of
+    :class:`_sql.select` that was generated from the ORM query.
+    """
+
+    parameters: Optional[_CoreAnyExecuteParams]
+    """Dictionary of parameters that was passed to
+    :meth:`_orm.Session.execute`."""
+
+    execution_options: _ExecuteOptions
+    """The complete dictionary of current execution options.
+
+    This is a merge of the statement level options with the
+    locally passed execution options.
+
+    .. seealso::
+
+        :attr:`_orm.ORMExecuteState.local_execution_options`
+
+        :meth:`_sql.Executable.execution_options`
+
+        :ref:`orm_queryguide_execution_options`
+
+    """
+
+    local_execution_options: _ExecuteOptions
+    """Dictionary view of the execution options passed to the
+    :meth:`.Session.execute` method.
+
+    This does not include options that may be associated with the statement
+    being invoked.
+
+    .. seealso::
+
+        :attr:`_orm.ORMExecuteState.execution_options`
+
+    """
+
+    bind_arguments: _BindArguments
+    """The dictionary passed as the
+    :paramref:`_orm.Session.execute.bind_arguments` dictionary.
+
+    This dictionary may be used by extensions to :class:`_orm.Session` to pass
+    arguments that will assist in determining amongst a set of database
+    connections which one should be used to invoke this statement.
+
+    """
+
+    _compile_state_cls: Optional[Type[ORMCompileState]]
+    _starting_event_idx: int
+    _events_todo: List[Any]
+    _update_execution_options: Optional[_ExecuteOptions]
+
+    def __init__(
+        self,
+        session: Session,
+        statement: Executable,
+        parameters: Optional[_CoreAnyExecuteParams],
+        execution_options: _ExecuteOptions,
+        bind_arguments: _BindArguments,
+        compile_state_cls: Optional[Type[ORMCompileState]],
+        events_todo: List[_InstanceLevelDispatch[Session]],
+    ):
+        """Construct a new :class:`_orm.ORMExecuteState`.
+
+        this object is constructed internally.
+
+        """
+        self.session = session
+        self.statement = statement
+        self.parameters = parameters
+        self.local_execution_options = execution_options
+        self.execution_options = statement._execution_options.union(
+            execution_options
+        )
+        self.bind_arguments = bind_arguments
+        self._compile_state_cls = compile_state_cls
+        self._events_todo = list(events_todo)
+
+    def _remaining_events(self) -> List[_InstanceLevelDispatch[Session]]:
+        return self._events_todo[self._starting_event_idx + 1 :]
+
+    def invoke_statement(
+        self,
+        statement: Optional[Executable] = None,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        execution_options: Optional[OrmExecuteOptionsParameter] = None,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> Result[Any]:
+        """Execute the statement represented by this
+        :class:`.ORMExecuteState`, without re-invoking events that have
+        already proceeded.
+
+        This method essentially performs a re-entrant execution of the current
+        statement for which the :meth:`.SessionEvents.do_orm_execute` event is
+        being currently invoked.    The use case for this is for event handlers
+        that want to override how the ultimate
+        :class:`_engine.Result` object is returned, such as for schemes that
+        retrieve results from an offline cache or which concatenate results
+        from multiple executions.
+
+        When the :class:`_engine.Result` object is returned by the actual
+        handler function within :meth:`_orm.SessionEvents.do_orm_execute` and
+        is propagated to the calling
+        :meth:`_orm.Session.execute` method, the remainder of the
+        :meth:`_orm.Session.execute` method is preempted and the
+        :class:`_engine.Result` object is returned to the caller of
+        :meth:`_orm.Session.execute` immediately.
+
+        :param statement: optional statement to be invoked, in place of the
+         statement currently represented by :attr:`.ORMExecuteState.statement`.
+
+        :param params: optional dictionary of parameters or list of parameters
+         which will be merged into the existing
+         :attr:`.ORMExecuteState.parameters` of this :class:`.ORMExecuteState`.
+
+         .. versionchanged:: 2.0 a list of parameter dictionaries is accepted
+            for executemany executions.
+
+        :param execution_options: optional dictionary of execution options
+         will be merged into the existing
+         :attr:`.ORMExecuteState.execution_options` of this
+         :class:`.ORMExecuteState`.
+
+        :param bind_arguments: optional dictionary of bind_arguments
+         which will be merged amongst the current
+         :attr:`.ORMExecuteState.bind_arguments`
+         of this :class:`.ORMExecuteState`.
+
+        :return: a :class:`_engine.Result` object with ORM-level results.
+
+        .. seealso::
+
+            :ref:`do_orm_execute_re_executing` - background and examples on the
+            appropriate usage of :meth:`_orm.ORMExecuteState.invoke_statement`.
+
+
+        """
+
+        if statement is None:
+            statement = self.statement
+
+        _bind_arguments = dict(self.bind_arguments)
+        if bind_arguments:
+            _bind_arguments.update(bind_arguments)
+        _bind_arguments["_sa_skip_events"] = True
+
+        _params: Optional[_CoreAnyExecuteParams]
+        if params:
+            if self.is_executemany:
+                _params = []
+                exec_many_parameters = cast(
+                    "List[Dict[str, Any]]", self.parameters
+                )
+                for _existing_params, _new_params in itertools.zip_longest(
+                    exec_many_parameters,
+                    cast("List[Dict[str, Any]]", params),
+                ):
+                    if _existing_params is None or _new_params is None:
+                        raise sa_exc.InvalidRequestError(
+                            f"Can't apply executemany parameters to "
+                            f"statement; number of parameter sets passed to "
+                            f"Session.execute() ({len(exec_many_parameters)}) "
+                            f"does not match number of parameter sets given "
+                            f"to ORMExecuteState.invoke_statement() "
+                            f"({len(params)})"
+                        )
+                    _existing_params = dict(_existing_params)
+                    _existing_params.update(_new_params)
+                    _params.append(_existing_params)
+            else:
+                _params = dict(cast("Dict[str, Any]", self.parameters))
+                _params.update(cast("Dict[str, Any]", params))
+        else:
+            _params = self.parameters
+
+        _execution_options = self.local_execution_options
+        if execution_options:
+            _execution_options = _execution_options.union(execution_options)
+
+        return self.session._execute_internal(
+            statement,
+            _params,
+            execution_options=_execution_options,
+            bind_arguments=_bind_arguments,
+            _parent_execute_state=self,
+        )
+
+    @property
+    def bind_mapper(self) -> Optional[Mapper[Any]]:
+        """Return the :class:`_orm.Mapper` that is the primary "bind" mapper.
+
+        For an :class:`_orm.ORMExecuteState` object invoking an ORM
+        statement, that is, the :attr:`_orm.ORMExecuteState.is_orm_statement`
+        attribute is ``True``, this attribute will return the
+        :class:`_orm.Mapper` that is considered to be the "primary" mapper
+        of the statement.   The term "bind mapper" refers to the fact that
+        a :class:`_orm.Session` object may be "bound" to multiple
+        :class:`_engine.Engine` objects keyed to mapped classes, and the
+        "bind mapper" determines which of those :class:`_engine.Engine` objects
+        would be selected.
+
+        For a statement that is invoked against a single mapped class,
+        :attr:`_orm.ORMExecuteState.bind_mapper` is intended to be a reliable
+        way of getting this mapper.
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :attr:`_orm.ORMExecuteState.all_mappers`
+
+
+        """
+        mp: Optional[Mapper[Any]] = self.bind_arguments.get("mapper", None)
+        return mp
+
+    @property
+    def all_mappers(self) -> Sequence[Mapper[Any]]:
+        """Return a sequence of all :class:`_orm.Mapper` objects that are
+        involved at the top level of this statement.
+
+        By "top level" we mean those :class:`_orm.Mapper` objects that would
+        be represented in the result set rows for a :func:`_sql.select`
+        query, or for a :func:`_dml.update` or :func:`_dml.delete` query,
+        the mapper that is the main subject of the UPDATE or DELETE.
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :attr:`_orm.ORMExecuteState.bind_mapper`
+
+
+
+        """
+        if not self.is_orm_statement:
+            return []
+        elif isinstance(self.statement, (Select, FromStatement)):
+            result = []
+            seen = set()
+            for d in self.statement.column_descriptions:
+                ent = d["entity"]
+                if ent:
+                    insp = inspect(ent, raiseerr=False)
+                    if insp and insp.mapper and insp.mapper not in seen:
+                        seen.add(insp.mapper)
+                        result.append(insp.mapper)
+            return result
+        elif self.statement.is_dml and self.bind_mapper:
+            return [self.bind_mapper]
+        else:
+            return []
+
+    @property
+    def is_orm_statement(self) -> bool:
+        """return True if the operation is an ORM statement.
+
+        This indicates that the select(), insert(), update(), or delete()
+        being invoked contains ORM entities as subjects.   For a statement
+        that does not have ORM entities and instead refers only to
+        :class:`.Table` metadata, it is invoked as a Core SQL statement
+        and no ORM-level automation takes place.
+
+        """
+        return self._compile_state_cls is not None
+
+    @property
+    def is_executemany(self) -> bool:
+        """return True if the parameters are a multi-element list of
+        dictionaries with more than one dictionary.
+
+        .. versionadded:: 2.0
+
+        """
+        return isinstance(self.parameters, list)
+
+    @property
+    def is_select(self) -> bool:
+        """return True if this is a SELECT operation.
+
+        .. versionchanged:: 2.0.30 - the attribute is also True for a
+           :meth:`_sql.Select.from_statement` construct that is itself against
+           a :class:`_sql.Select` construct, such as
+           ``select(Entity).from_statement(select(..))``
+
+        """
+        return self.statement.is_select
+
+    @property
+    def is_from_statement(self) -> bool:
+        """return True if this operation is a
+        :meth:`_sql.Select.from_statement` operation.
+
+        This is independent from :attr:`_orm.ORMExecuteState.is_select`, as a
+        ``select().from_statement()`` construct can be used with
+        INSERT/UPDATE/DELETE RETURNING types of statements as well.
+        :attr:`_orm.ORMExecuteState.is_select` will only be set if the
+        :meth:`_sql.Select.from_statement` is itself against a
+        :class:`_sql.Select` construct.
+
+        .. versionadded:: 2.0.30
+
+        """
+        return self.statement.is_from_statement
+
+    @property
+    def is_insert(self) -> bool:
+        """return True if this is an INSERT operation.
+
+        .. versionchanged:: 2.0.30 - the attribute is also True for a
+           :meth:`_sql.Select.from_statement` construct that is itself against
+           a :class:`_sql.Insert` construct, such as
+           ``select(Entity).from_statement(insert(..))``
+
+        """
+        return self.statement.is_dml and self.statement.is_insert
+
+    @property
+    def is_update(self) -> bool:
+        """return True if this is an UPDATE operation.
+
+        .. versionchanged:: 2.0.30 - the attribute is also True for a
+           :meth:`_sql.Select.from_statement` construct that is itself against
+           a :class:`_sql.Update` construct, such as
+           ``select(Entity).from_statement(update(..))``
+
+        """
+        return self.statement.is_dml and self.statement.is_update
+
+    @property
+    def is_delete(self) -> bool:
+        """return True if this is a DELETE operation.
+
+        .. versionchanged:: 2.0.30 - the attribute is also True for a
+           :meth:`_sql.Select.from_statement` construct that is itself against
+           a :class:`_sql.Delete` construct, such as
+           ``select(Entity).from_statement(delete(..))``
+
+        """
+        return self.statement.is_dml and self.statement.is_delete
+
+    @property
+    def _is_crud(self) -> bool:
+        return isinstance(self.statement, (dml.Update, dml.Delete))
+
+    def update_execution_options(self, **opts: Any) -> None:
+        """Update the local execution options with new values."""
+        self.local_execution_options = self.local_execution_options.union(opts)
+
+    def _orm_compile_options(
+        self,
+    ) -> Optional[
+        Union[
+            context.ORMCompileState.default_compile_options,
+            Type[context.ORMCompileState.default_compile_options],
+        ]
+    ]:
+        if not self.is_select:
+            return None
+        try:
+            opts = self.statement._compile_options
+        except AttributeError:
+            return None
+
+        if opts is not None and opts.isinstance(
+            context.ORMCompileState.default_compile_options
+        ):
+            return opts  # type: ignore
+        else:
+            return None
+
+    @property
+    def lazy_loaded_from(self) -> Optional[InstanceState[Any]]:
+        """An :class:`.InstanceState` that is using this statement execution
+        for a lazy load operation.
+
+        The primary rationale for this attribute is to support the horizontal
+        sharding extension, where it is available within specific query
+        execution time hooks created by this extension.   To that end, the
+        attribute is only intended to be meaningful at **query execution
+        time**, and importantly not any time prior to that, including query
+        compilation time.
+
+        """
+        return self.load_options._lazy_loaded_from
+
+    @property
+    def loader_strategy_path(self) -> Optional[PathRegistry]:
+        """Return the :class:`.PathRegistry` for the current load path.
+
+        This object represents the "path" in a query along relationships
+        when a particular object or collection is being loaded.
+
+        """
+        opts = self._orm_compile_options()
+        if opts is not None:
+            return opts._current_path
+        else:
+            return None
+
+    @property
+    def is_column_load(self) -> bool:
+        """Return True if the operation is refreshing column-oriented
+        attributes on an existing ORM object.
+
+        This occurs during operations such as :meth:`_orm.Session.refresh`,
+        as well as when an attribute deferred by :func:`_orm.defer` is
+        being loaded, or an attribute that was expired either directly
+        by :meth:`_orm.Session.expire` or via a commit operation is being
+        loaded.
+
+        Handlers will very likely not want to add any options to queries
+        when such an operation is occurring as the query should be a straight
+        primary key fetch which should not have any additional WHERE criteria,
+        and loader options travelling with the instance
+        will have already been added to the query.
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :attr:`_orm.ORMExecuteState.is_relationship_load`
+
+        """
+        opts = self._orm_compile_options()
+        return opts is not None and opts._for_refresh_state
+
+    @property
+    def is_relationship_load(self) -> bool:
+        """Return True if this load is loading objects on behalf of a
+        relationship.
+
+        This means, the loader in effect is either a LazyLoader,
+        SelectInLoader, SubqueryLoader, or similar, and the entire
+        SELECT statement being emitted is on behalf of a relationship
+        load.
+
+        Handlers will very likely not want to add any options to queries
+        when such an operation is occurring, as loader options are already
+        capable of being propagated to relationship loaders and should
+        be already present.
+
+        .. seealso::
+
+            :attr:`_orm.ORMExecuteState.is_column_load`
+
+        """
+        opts = self._orm_compile_options()
+        if opts is None:
+            return False
+        path = self.loader_strategy_path
+        return path is not None and not path.is_root
+
+    @property
+    def load_options(
+        self,
+    ) -> Union[
+        context.QueryContext.default_load_options,
+        Type[context.QueryContext.default_load_options],
+    ]:
+        """Return the load_options that will be used for this execution."""
+
+        if not self.is_select:
+            raise sa_exc.InvalidRequestError(
+                "This ORM execution is not against a SELECT statement "
+                "so there are no load options."
+            )
+
+        lo: Union[
+            context.QueryContext.default_load_options,
+            Type[context.QueryContext.default_load_options],
+        ] = self.execution_options.get(
+            "_sa_orm_load_options", context.QueryContext.default_load_options
+        )
+        return lo
+
+    @property
+    def update_delete_options(
+        self,
+    ) -> Union[
+        bulk_persistence.BulkUDCompileState.default_update_options,
+        Type[bulk_persistence.BulkUDCompileState.default_update_options],
+    ]:
+        """Return the update_delete_options that will be used for this
+        execution."""
+
+        if not self._is_crud:
+            raise sa_exc.InvalidRequestError(
+                "This ORM execution is not against an UPDATE or DELETE "
+                "statement so there are no update options."
+            )
+        uo: Union[
+            bulk_persistence.BulkUDCompileState.default_update_options,
+            Type[bulk_persistence.BulkUDCompileState.default_update_options],
+        ] = self.execution_options.get(
+            "_sa_orm_update_options",
+            bulk_persistence.BulkUDCompileState.default_update_options,
+        )
+        return uo
+
+    @property
+    def _non_compile_orm_options(self) -> Sequence[ORMOption]:
+        return [
+            opt
+            for opt in self.statement._with_options
+            if is_orm_option(opt) and not opt._is_compile_state
+        ]
+
+    @property
+    def user_defined_options(self) -> Sequence[UserDefinedOption]:
+        """The sequence of :class:`.UserDefinedOptions` that have been
+        associated with the statement being invoked.
+
+        """
+        return [
+            opt
+            for opt in self.statement._with_options
+            if is_user_defined_option(opt)
+        ]
+
+
+class SessionTransactionOrigin(Enum):
+    """indicates the origin of a :class:`.SessionTransaction`.
+
+    This enumeration is present on the
+    :attr:`.SessionTransaction.origin` attribute of any
+    :class:`.SessionTransaction` object.
+
+    .. versionadded:: 2.0
+
+    """
+
+    AUTOBEGIN = 0
+    """transaction were started by autobegin"""
+
+    BEGIN = 1
+    """transaction were started by calling :meth:`_orm.Session.begin`"""
+
+    BEGIN_NESTED = 2
+    """tranaction were started by :meth:`_orm.Session.begin_nested`"""
+
+    SUBTRANSACTION = 3
+    """transaction is an internal "subtransaction" """
+
+
+class SessionTransaction(_StateChange, TransactionalContext):
+    """A :class:`.Session`-level transaction.
+
+    :class:`.SessionTransaction` is produced from the
+    :meth:`_orm.Session.begin`
+    and :meth:`_orm.Session.begin_nested` methods.   It's largely an internal
+    object that in modern use provides a context manager for session
+    transactions.
+
+    Documentation on interacting with :class:`_orm.SessionTransaction` is
+    at: :ref:`unitofwork_transaction`.
+
+
+    .. versionchanged:: 1.4  The scoping and API methods to work with the
+       :class:`_orm.SessionTransaction` object directly have been simplified.
+
+    .. seealso::
+
+        :ref:`unitofwork_transaction`
+
+        :meth:`.Session.begin`
+
+        :meth:`.Session.begin_nested`
+
+        :meth:`.Session.rollback`
+
+        :meth:`.Session.commit`
+
+        :meth:`.Session.in_transaction`
+
+        :meth:`.Session.in_nested_transaction`
+
+        :meth:`.Session.get_transaction`
+
+        :meth:`.Session.get_nested_transaction`
+
+
+    """
+
+    _rollback_exception: Optional[BaseException] = None
+
+    _connections: Dict[
+        Union[Engine, Connection], Tuple[Connection, Transaction, bool, bool]
+    ]
+    session: Session
+    _parent: Optional[SessionTransaction]
+
+    _state: SessionTransactionState
+
+    _new: weakref.WeakKeyDictionary[InstanceState[Any], object]
+    _deleted: weakref.WeakKeyDictionary[InstanceState[Any], object]
+    _dirty: weakref.WeakKeyDictionary[InstanceState[Any], object]
+    _key_switches: weakref.WeakKeyDictionary[
+        InstanceState[Any], Tuple[Any, Any]
+    ]
+
+    origin: SessionTransactionOrigin
+    """Origin of this :class:`_orm.SessionTransaction`.
+
+    Refers to a :class:`.SessionTransactionOrigin` instance which is an
+    enumeration indicating the source event that led to constructing
+    this :class:`_orm.SessionTransaction`.
+
+    .. versionadded:: 2.0
+
+    """
+
+    nested: bool = False
+    """Indicates if this is a nested, or SAVEPOINT, transaction.
+
+    When :attr:`.SessionTransaction.nested` is True, it is expected
+    that :attr:`.SessionTransaction.parent` will be present as well,
+    linking to the enclosing :class:`.SessionTransaction`.
+
+    .. seealso::
+
+        :attr:`.SessionTransaction.origin`
+
+    """
+
+    def __init__(
+        self,
+        session: Session,
+        origin: SessionTransactionOrigin,
+        parent: Optional[SessionTransaction] = None,
+    ):
+        TransactionalContext._trans_ctx_check(session)
+
+        self.session = session
+        self._connections = {}
+        self._parent = parent
+        self.nested = nested = origin is SessionTransactionOrigin.BEGIN_NESTED
+        self.origin = origin
+
+        if session._close_state is _SessionCloseState.CLOSED:
+            raise sa_exc.InvalidRequestError(
+                "This Session has been permanently closed and is unable "
+                "to handle any more transaction requests."
+            )
+
+        if nested:
+            if not parent:
+                raise sa_exc.InvalidRequestError(
+                    "Can't start a SAVEPOINT transaction when no existing "
+                    "transaction is in progress"
+                )
+
+            self._previous_nested_transaction = session._nested_transaction
+        elif origin is SessionTransactionOrigin.SUBTRANSACTION:
+            assert parent is not None
+        else:
+            assert parent is None
+
+        self._state = SessionTransactionState.ACTIVE
+
+        self._take_snapshot()
+
+        # make sure transaction is assigned before we call the
+        # dispatch
+        self.session._transaction = self
+
+        self.session.dispatch.after_transaction_create(self.session, self)
+
+    def _raise_for_prerequisite_state(
+        self, operation_name: str, state: _StateChangeState
+    ) -> NoReturn:
+        if state is SessionTransactionState.DEACTIVE:
+            if self._rollback_exception:
+                raise sa_exc.PendingRollbackError(
+                    "This Session's transaction has been rolled back "
+                    "due to a previous exception during flush."
+                    " To begin a new transaction with this Session, "
+                    "first issue Session.rollback()."
+                    f" Original exception was: {self._rollback_exception}",
+                    code="7s2a",
+                )
+            else:
+                raise sa_exc.InvalidRequestError(
+                    "This session is in 'inactive' state, due to the "
+                    "SQL transaction being rolled back; no further SQL "
+                    "can be emitted within this transaction."
+                )
+        elif state is SessionTransactionState.CLOSED:
+            raise sa_exc.ResourceClosedError("This transaction is closed")
+        elif state is SessionTransactionState.PROVISIONING_CONNECTION:
+            raise sa_exc.InvalidRequestError(
+                "This session is provisioning a new connection; concurrent "
+                "operations are not permitted",
+                code="isce",
+            )
+        else:
+            raise sa_exc.InvalidRequestError(
+                f"This session is in '{state.name.lower()}' state; no "
+                "further SQL can be emitted within this transaction."
+            )
+
+    @property
+    def parent(self) -> Optional[SessionTransaction]:
+        """The parent :class:`.SessionTransaction` of this
+        :class:`.SessionTransaction`.
+
+        If this attribute is ``None``, indicates this
+        :class:`.SessionTransaction` is at the top of the stack, and
+        corresponds to a real "COMMIT"/"ROLLBACK"
+        block.  If non-``None``, then this is either a "subtransaction"
+        (an internal marker object used by the flush process) or a
+        "nested" / SAVEPOINT transaction.  If the
+        :attr:`.SessionTransaction.nested` attribute is ``True``, then
+        this is a SAVEPOINT, and if ``False``, indicates this a subtransaction.
+
+        """
+        return self._parent
+
+    @property
+    def is_active(self) -> bool:
+        return (
+            self.session is not None
+            and self._state is SessionTransactionState.ACTIVE
+        )
+
+    @property
+    def _is_transaction_boundary(self) -> bool:
+        return self.nested or not self._parent
+
+    @_StateChange.declare_states(
+        (SessionTransactionState.ACTIVE,), _StateChangeStates.NO_CHANGE
+    )
+    def connection(
+        self,
+        bindkey: Optional[Mapper[Any]],
+        execution_options: Optional[_ExecuteOptions] = None,
+        **kwargs: Any,
+    ) -> Connection:
+        bind = self.session.get_bind(bindkey, **kwargs)
+        return self._connection_for_bind(bind, execution_options)
+
+    @_StateChange.declare_states(
+        (SessionTransactionState.ACTIVE,), _StateChangeStates.NO_CHANGE
+    )
+    def _begin(self, nested: bool = False) -> SessionTransaction:
+        return SessionTransaction(
+            self.session,
+            (
+                SessionTransactionOrigin.BEGIN_NESTED
+                if nested
+                else SessionTransactionOrigin.SUBTRANSACTION
+            ),
+            self,
+        )
+
+    def _iterate_self_and_parents(
+        self, upto: Optional[SessionTransaction] = None
+    ) -> Iterable[SessionTransaction]:
+        current = self
+        result: Tuple[SessionTransaction, ...] = ()
+        while current:
+            result += (current,)
+            if current._parent is upto:
+                break
+            elif current._parent is None:
+                raise sa_exc.InvalidRequestError(
+                    "Transaction %s is not on the active transaction list"
+                    % (upto)
+                )
+            else:
+                current = current._parent
+
+        return result
+
+    def _take_snapshot(self) -> None:
+        if not self._is_transaction_boundary:
+            parent = self._parent
+            assert parent is not None
+            self._new = parent._new
+            self._deleted = parent._deleted
+            self._dirty = parent._dirty
+            self._key_switches = parent._key_switches
+            return
+
+        is_begin = self.origin in (
+            SessionTransactionOrigin.BEGIN,
+            SessionTransactionOrigin.AUTOBEGIN,
+        )
+        if not is_begin and not self.session._flushing:
+            self.session.flush()
+
+        self._new = weakref.WeakKeyDictionary()
+        self._deleted = weakref.WeakKeyDictionary()
+        self._dirty = weakref.WeakKeyDictionary()
+        self._key_switches = weakref.WeakKeyDictionary()
+
+    def _restore_snapshot(self, dirty_only: bool = False) -> None:
+        """Restore the restoration state taken before a transaction began.
+
+        Corresponds to a rollback.
+
+        """
+        assert self._is_transaction_boundary
+
+        to_expunge = set(self._new).union(self.session._new)
+        self.session._expunge_states(to_expunge, to_transient=True)
+
+        for s, (oldkey, newkey) in self._key_switches.items():
+            # we probably can do this conditionally based on
+            # if we expunged or not, but safe_discard does that anyway
+            self.session.identity_map.safe_discard(s)
+
+            # restore the old key
+            s.key = oldkey
+
+            # now restore the object, but only if we didn't expunge
+            if s not in to_expunge:
+                self.session.identity_map.replace(s)
+
+        for s in set(self._deleted).union(self.session._deleted):
+            self.session._update_impl(s, revert_deletion=True)
+
+        assert not self.session._deleted
+
+        for s in self.session.identity_map.all_states():
+            if not dirty_only or s.modified or s in self._dirty:
+                s._expire(s.dict, self.session.identity_map._modified)
+
+    def _remove_snapshot(self) -> None:
+        """Remove the restoration state taken before a transaction began.
+
+        Corresponds to a commit.
+
+        """
+        assert self._is_transaction_boundary
+
+        if not self.nested and self.session.expire_on_commit:
+            for s in self.session.identity_map.all_states():
+                s._expire(s.dict, self.session.identity_map._modified)
+
+            statelib.InstanceState._detach_states(
+                list(self._deleted), self.session
+            )
+            self._deleted.clear()
+        elif self.nested:
+            parent = self._parent
+            assert parent is not None
+            parent._new.update(self._new)
+            parent._dirty.update(self._dirty)
+            parent._deleted.update(self._deleted)
+            parent._key_switches.update(self._key_switches)
+
+    @_StateChange.declare_states(
+        (SessionTransactionState.ACTIVE,), _StateChangeStates.NO_CHANGE
+    )
+    def _connection_for_bind(
+        self,
+        bind: _SessionBind,
+        execution_options: Optional[CoreExecuteOptionsParameter],
+    ) -> Connection:
+        if bind in self._connections:
+            if execution_options:
+                util.warn(
+                    "Connection is already established for the "
+                    "given bind; execution_options ignored"
+                )
+            return self._connections[bind][0]
+
+        self._state = SessionTransactionState.PROVISIONING_CONNECTION
+
+        local_connect = False
+        should_commit = True
+
+        try:
+            if self._parent:
+                conn = self._parent._connection_for_bind(
+                    bind, execution_options
+                )
+                if not self.nested:
+                    return conn
+            else:
+                if isinstance(bind, engine.Connection):
+                    conn = bind
+                    if conn.engine in self._connections:
+                        raise sa_exc.InvalidRequestError(
+                            "Session already has a Connection associated "
+                            "for the given Connection's Engine"
+                        )
+                else:
+                    conn = bind.connect()
+                    local_connect = True
+
+            try:
+                if execution_options:
+                    conn = conn.execution_options(**execution_options)
+
+                transaction: Transaction
+                if self.session.twophase and self._parent is None:
+                    # TODO: shouldn't we only be here if not
+                    # conn.in_transaction() ?
+                    # if twophase is set and conn.in_transaction(), validate
+                    # that it is in fact twophase.
+                    transaction = conn.begin_twophase()
+                elif self.nested:
+                    transaction = conn.begin_nested()
+                elif conn.in_transaction():
+                    join_transaction_mode = self.session.join_transaction_mode
+
+                    if join_transaction_mode == "conditional_savepoint":
+                        if conn.in_nested_transaction():
+                            join_transaction_mode = "create_savepoint"
+                        else:
+                            join_transaction_mode = "rollback_only"
+
+                        if local_connect:
+                            util.warn(
+                                "The engine provided as bind produced a "
+                                "connection that is already in a transaction. "
+                                "This is usually caused by a core event, "
+                                "such as 'engine_connect', that has left a "
+                                "transaction open. The effective join "
+                                "transaction mode used by this session is "
+                                f"{join_transaction_mode!r}. To silence this "
+                                "warning, do not leave transactions open"
+                            )
+                    if join_transaction_mode in (
+                        "control_fully",
+                        "rollback_only",
+                    ):
+                        if conn.in_nested_transaction():
+                            transaction = (
+                                conn._get_required_nested_transaction()
+                            )
+                        else:
+                            transaction = conn._get_required_transaction()
+                        if join_transaction_mode == "rollback_only":
+                            should_commit = False
+                    elif join_transaction_mode == "create_savepoint":
+                        transaction = conn.begin_nested()
+                    else:
+                        assert False, join_transaction_mode
+                else:
+                    transaction = conn.begin()
+            except:
+                # connection will not not be associated with this Session;
+                # close it immediately so that it isn't closed under GC
+                if local_connect:
+                    conn.close()
+                raise
+            else:
+                bind_is_connection = isinstance(bind, engine.Connection)
+
+                self._connections[conn] = self._connections[conn.engine] = (
+                    conn,
+                    transaction,
+                    should_commit,
+                    not bind_is_connection,
+                )
+                self.session.dispatch.after_begin(self.session, self, conn)
+                return conn
+        finally:
+            self._state = SessionTransactionState.ACTIVE
+
+    def prepare(self) -> None:
+        if self._parent is not None or not self.session.twophase:
+            raise sa_exc.InvalidRequestError(
+                "'twophase' mode not enabled, or not root transaction; "
+                "can't prepare."
+            )
+        self._prepare_impl()
+
+    @_StateChange.declare_states(
+        (SessionTransactionState.ACTIVE,), SessionTransactionState.PREPARED
+    )
+    def _prepare_impl(self) -> None:
+        if self._parent is None or self.nested:
+            self.session.dispatch.before_commit(self.session)
+
+        stx = self.session._transaction
+        assert stx is not None
+        if stx is not self:
+            for subtransaction in stx._iterate_self_and_parents(upto=self):
+                subtransaction.commit()
+
+        if not self.session._flushing:
+            for _flush_guard in range(100):
+                if self.session._is_clean():
+                    break
+                self.session.flush()
+            else:
+                raise exc.FlushError(
+                    "Over 100 subsequent flushes have occurred within "
+                    "session.commit() - is an after_flush() hook "
+                    "creating new objects?"
+                )
+
+        if self._parent is None and self.session.twophase:
+            try:
+                for t in set(self._connections.values()):
+                    cast("TwoPhaseTransaction", t[1]).prepare()
+            except:
+                with util.safe_reraise():
+                    self.rollback()
+
+        self._state = SessionTransactionState.PREPARED
+
+    @_StateChange.declare_states(
+        (SessionTransactionState.ACTIVE, SessionTransactionState.PREPARED),
+        SessionTransactionState.CLOSED,
+    )
+    def commit(self, _to_root: bool = False) -> None:
+        if self._state is not SessionTransactionState.PREPARED:
+            with self._expect_state(SessionTransactionState.PREPARED):
+                self._prepare_impl()
+
+        if self._parent is None or self.nested:
+            for conn, trans, should_commit, autoclose in set(
+                self._connections.values()
+            ):
+                if should_commit:
+                    trans.commit()
+
+            self._state = SessionTransactionState.COMMITTED
+            self.session.dispatch.after_commit(self.session)
+
+            self._remove_snapshot()
+
+        with self._expect_state(SessionTransactionState.CLOSED):
+            self.close()
+
+        if _to_root and self._parent:
+            self._parent.commit(_to_root=True)
+
+    @_StateChange.declare_states(
+        (
+            SessionTransactionState.ACTIVE,
+            SessionTransactionState.DEACTIVE,
+            SessionTransactionState.PREPARED,
+        ),
+        SessionTransactionState.CLOSED,
+    )
+    def rollback(
+        self, _capture_exception: bool = False, _to_root: bool = False
+    ) -> None:
+        stx = self.session._transaction
+        assert stx is not None
+        if stx is not self:
+            for subtransaction in stx._iterate_self_and_parents(upto=self):
+                subtransaction.close()
+
+        boundary = self
+        rollback_err = None
+        if self._state in (
+            SessionTransactionState.ACTIVE,
+            SessionTransactionState.PREPARED,
+        ):
+            for transaction in self._iterate_self_and_parents():
+                if transaction._parent is None or transaction.nested:
+                    try:
+                        for t in set(transaction._connections.values()):
+                            t[1].rollback()
+
+                        transaction._state = SessionTransactionState.DEACTIVE
+                        self.session.dispatch.after_rollback(self.session)
+                    except:
+                        rollback_err = sys.exc_info()
+                    finally:
+                        transaction._state = SessionTransactionState.DEACTIVE
+                        transaction._restore_snapshot(
+                            dirty_only=transaction.nested
+                        )
+                    boundary = transaction
+                    break
+                else:
+                    transaction._state = SessionTransactionState.DEACTIVE
+
+        sess = self.session
+
+        if not rollback_err and not sess._is_clean():
+            # if items were added, deleted, or mutated
+            # here, we need to re-restore the snapshot
+            util.warn(
+                "Session's state has been changed on "
+                "a non-active transaction - this state "
+                "will be discarded."
+            )
+            boundary._restore_snapshot(dirty_only=boundary.nested)
+
+        with self._expect_state(SessionTransactionState.CLOSED):
+            self.close()
+
+        if self._parent and _capture_exception:
+            self._parent._rollback_exception = sys.exc_info()[1]
+
+        if rollback_err and rollback_err[1]:
+            raise rollback_err[1].with_traceback(rollback_err[2])
+
+        sess.dispatch.after_soft_rollback(sess, self)
+
+        if _to_root and self._parent:
+            self._parent.rollback(_to_root=True)
+
+    @_StateChange.declare_states(
+        _StateChangeStates.ANY, SessionTransactionState.CLOSED
+    )
+    def close(self, invalidate: bool = False) -> None:
+        if self.nested:
+            self.session._nested_transaction = (
+                self._previous_nested_transaction
+            )
+
+        self.session._transaction = self._parent
+
+        for connection, transaction, should_commit, autoclose in set(
+            self._connections.values()
+        ):
+            if invalidate and self._parent is None:
+                connection.invalidate()
+            if should_commit and transaction.is_active:
+                transaction.close()
+            if autoclose and self._parent is None:
+                connection.close()
+
+        self._state = SessionTransactionState.CLOSED
+        sess = self.session
+
+        # TODO: these two None sets were historically after the
+        # event hook below, and in 2.0 I changed it this way for some reason,
+        # and I remember there being a reason, but not what it was.
+        # Why do we need to get rid of them at all?  test_memusage::CycleTest
+        # passes with these commented out.
+        # self.session = None  # type: ignore
+        # self._connections = None  # type: ignore
+
+        sess.dispatch.after_transaction_end(sess, self)
+
+    def _get_subject(self) -> Session:
+        return self.session
+
+    def _transaction_is_active(self) -> bool:
+        return self._state is SessionTransactionState.ACTIVE
+
+    def _transaction_is_closed(self) -> bool:
+        return self._state is SessionTransactionState.CLOSED
+
+    def _rollback_can_be_called(self) -> bool:
+        return self._state not in (COMMITTED, CLOSED)
+
+
+class _SessionCloseState(Enum):
+    ACTIVE = 1
+    CLOSED = 2
+    CLOSE_IS_RESET = 3
+
+
+class Session(_SessionClassMethods, EventTarget):
+    """Manages persistence operations for ORM-mapped objects.
+
+    The :class:`_orm.Session` is **not safe for use in concurrent threads.**.
+    See :ref:`session_faq_threadsafe` for background.
+
+    The Session's usage paradigm is described at :doc:`/orm/session`.
+
+
+    """
+
+    _is_asyncio = False
+
+    dispatch: dispatcher[Session]
+
+    identity_map: IdentityMap
+    """A mapping of object identities to objects themselves.
+
+    Iterating through ``Session.identity_map.values()`` provides
+    access to the full set of persistent objects (i.e., those
+    that have row identity) currently in the session.
+
+    .. seealso::
+
+        :func:`.identity_key` - helper function to produce the keys used
+        in this dictionary.
+
+    """
+
+    _new: Dict[InstanceState[Any], Any]
+    _deleted: Dict[InstanceState[Any], Any]
+    bind: Optional[Union[Engine, Connection]]
+    __binds: Dict[_SessionBindKey, _SessionBind]
+    _flushing: bool
+    _warn_on_events: bool
+    _transaction: Optional[SessionTransaction]
+    _nested_transaction: Optional[SessionTransaction]
+    hash_key: int
+    autoflush: bool
+    expire_on_commit: bool
+    enable_baked_queries: bool
+    twophase: bool
+    join_transaction_mode: JoinTransactionMode
+    _query_cls: Type[Query[Any]]
+    _close_state: _SessionCloseState
+
+    def __init__(
+        self,
+        bind: Optional[_SessionBind] = None,
+        *,
+        autoflush: bool = True,
+        future: Literal[True] = True,
+        expire_on_commit: bool = True,
+        autobegin: bool = True,
+        twophase: bool = False,
+        binds: Optional[Dict[_SessionBindKey, _SessionBind]] = None,
+        enable_baked_queries: bool = True,
+        info: Optional[_InfoType] = None,
+        query_cls: Optional[Type[Query[Any]]] = None,
+        autocommit: Literal[False] = False,
+        join_transaction_mode: JoinTransactionMode = "conditional_savepoint",
+        close_resets_only: Union[bool, _NoArg] = _NoArg.NO_ARG,
+    ):
+        r"""Construct a new :class:`_orm.Session`.
+
+        See also the :class:`.sessionmaker` function which is used to
+        generate a :class:`.Session`-producing callable with a given
+        set of arguments.
+
+        :param autoflush: When ``True``, all query operations will issue a
+           :meth:`~.Session.flush` call to this ``Session`` before proceeding.
+           This is a convenience feature so that :meth:`~.Session.flush` need
+           not be called repeatedly in order for database queries to retrieve
+           results.
+
+           .. seealso::
+
+               :ref:`session_flushing` - additional background on autoflush
+
+        :param autobegin: Automatically start transactions (i.e. equivalent to
+           invoking :meth:`_orm.Session.begin`) when database access is
+           requested by an operation.   Defaults to ``True``.    Set to
+           ``False`` to prevent a :class:`_orm.Session` from implicitly
+           beginning transactions after construction, as well as after any of
+           the :meth:`_orm.Session.rollback`, :meth:`_orm.Session.commit`,
+           or :meth:`_orm.Session.close` methods are called.
+
+           .. versionadded:: 2.0
+
+           .. seealso::
+
+                :ref:`session_autobegin_disable`
+
+        :param bind: An optional :class:`_engine.Engine` or
+           :class:`_engine.Connection` to
+           which this ``Session`` should be bound. When specified, all SQL
+           operations performed by this session will execute via this
+           connectable.
+
+        :param binds: A dictionary which may specify any number of
+           :class:`_engine.Engine` or :class:`_engine.Connection`
+           objects as the source of
+           connectivity for SQL operations on a per-entity basis.   The keys
+           of the dictionary consist of any series of mapped classes,
+           arbitrary Python classes that are bases for mapped classes,
+           :class:`_schema.Table` objects and :class:`_orm.Mapper` objects.
+           The
+           values of the dictionary are then instances of
+           :class:`_engine.Engine`
+           or less commonly :class:`_engine.Connection` objects.
+           Operations which
+           proceed relative to a particular mapped class will consult this
+           dictionary for the closest matching entity in order to determine
+           which :class:`_engine.Engine` should be used for a particular SQL
+           operation.    The complete heuristics for resolution are
+           described at :meth:`.Session.get_bind`.  Usage looks like::
+
+            Session = sessionmaker(
+                binds={
+                    SomeMappedClass: create_engine("postgresql+psycopg2://engine1"),
+                    SomeDeclarativeBase: create_engine(
+                        "postgresql+psycopg2://engine2"
+                    ),
+                    some_mapper: create_engine("postgresql+psycopg2://engine3"),
+                    some_table: create_engine("postgresql+psycopg2://engine4"),
+                }
+            )
+
+           .. seealso::
+
+                :ref:`session_partitioning`
+
+                :meth:`.Session.bind_mapper`
+
+                :meth:`.Session.bind_table`
+
+                :meth:`.Session.get_bind`
+
+
+        :param \class_: Specify an alternate class other than
+           ``sqlalchemy.orm.session.Session`` which should be used by the
+           returned class. This is the only argument that is local to the
+           :class:`.sessionmaker` function, and is not sent directly to the
+           constructor for ``Session``.
+
+        :param enable_baked_queries: legacy; defaults to ``True``.
+           A parameter consumed
+           by the :mod:`sqlalchemy.ext.baked` extension to determine if
+           "baked queries" should be cached, as is the normal operation
+           of this extension.  When set to ``False``, caching as used by
+           this particular extension is disabled.
+
+           .. versionchanged:: 1.4 The ``sqlalchemy.ext.baked`` extension is
+              legacy and is not used by any of SQLAlchemy's internals. This
+              flag therefore only affects applications that are making explicit
+              use of this extension within their own code.
+
+        :param expire_on_commit:  Defaults to ``True``. When ``True``, all
+           instances will be fully expired after each :meth:`~.commit`,
+           so that all attribute/object access subsequent to a completed
+           transaction will load from the most recent database state.
+
+            .. seealso::
+
+                :ref:`session_committing`
+
+        :param future: Deprecated; this flag is always True.
+
+          .. seealso::
+
+            :ref:`migration_20_toplevel`
+
+        :param info: optional dictionary of arbitrary data to be associated
+           with this :class:`.Session`.  Is available via the
+           :attr:`.Session.info` attribute.  Note the dictionary is copied at
+           construction time so that modifications to the per-
+           :class:`.Session` dictionary will be local to that
+           :class:`.Session`.
+
+        :param query_cls:  Class which should be used to create new Query
+          objects, as returned by the :meth:`~.Session.query` method.
+          Defaults to :class:`_query.Query`.
+
+        :param twophase:  When ``True``, all transactions will be started as
+            a "two phase" transaction, i.e. using the "two phase" semantics
+            of the database in use along with an XID.  During a
+            :meth:`~.commit`, after :meth:`~.flush` has been issued for all
+            attached databases, the :meth:`~.TwoPhaseTransaction.prepare`
+            method on each database's :class:`.TwoPhaseTransaction` will be
+            called. This allows each database to roll back the entire
+            transaction, before each transaction is committed.
+
+        :param autocommit: the "autocommit" keyword is present for backwards
+            compatibility but must remain at its default value of ``False``.
+
+        :param join_transaction_mode: Describes the transactional behavior to
+          take when a given bind is a :class:`_engine.Connection` that
+          has already begun a transaction outside the scope of this
+          :class:`_orm.Session`; in other words the
+          :meth:`_engine.Connection.in_transaction()` method returns True.
+
+          The following behaviors only take effect when the :class:`_orm.Session`
+          **actually makes use of the connection given**; that is, a method
+          such as :meth:`_orm.Session.execute`, :meth:`_orm.Session.connection`,
+          etc. are actually invoked:
+
+          * ``"conditional_savepoint"`` - this is the default.  if the given
+            :class:`_engine.Connection` is begun within a transaction but
+            does not have a SAVEPOINT, then ``"rollback_only"`` is used.
+            If the :class:`_engine.Connection` is additionally within
+            a SAVEPOINT, in other words
+            :meth:`_engine.Connection.in_nested_transaction()` method returns
+            True, then ``"create_savepoint"`` is used.
+
+            ``"conditional_savepoint"`` behavior attempts to make use of
+            savepoints in order to keep the state of the existing transaction
+            unchanged, but only if there is already a savepoint in progress;
+            otherwise, it is not assumed that the backend in use has adequate
+            support for SAVEPOINT, as availability of this feature varies.
+            ``"conditional_savepoint"`` also seeks to establish approximate
+            backwards compatibility with previous :class:`_orm.Session`
+            behavior, for applications that are not setting a specific mode. It
+            is recommended that one of the explicit settings be used.
+
+          * ``"create_savepoint"`` - the :class:`_orm.Session` will use
+            :meth:`_engine.Connection.begin_nested()` in all cases to create
+            its own transaction.  This transaction by its nature rides
+            "on top" of any existing transaction that's opened on the given
+            :class:`_engine.Connection`; if the underlying database and
+            the driver in use has full, non-broken support for SAVEPOINT, the
+            external transaction will remain unaffected throughout the
+            lifespan of the :class:`_orm.Session`.
+
+            The ``"create_savepoint"`` mode is the most useful for integrating
+            a :class:`_orm.Session` into a test suite where an externally
+            initiated transaction should remain unaffected; however, it relies
+            on proper SAVEPOINT support from the underlying driver and
+            database.
+
+            .. tip:: When using SQLite, the SQLite driver included through
+               Python 3.11 does not handle SAVEPOINTs correctly in all cases
+               without workarounds. See the sections
+               :ref:`pysqlite_serializable` and :ref:`aiosqlite_serializable`
+               for details on current workarounds.
+
+          * ``"control_fully"`` - the :class:`_orm.Session` will take
+            control of the given transaction as its own;
+            :meth:`_orm.Session.commit` will call ``.commit()`` on the
+            transaction, :meth:`_orm.Session.rollback` will call
+            ``.rollback()`` on the transaction, :meth:`_orm.Session.close` will
+            call ``.rollback`` on the transaction.
+
+            .. tip:: This mode of use is equivalent to how SQLAlchemy 1.4 would
+               handle a :class:`_engine.Connection` given with an existing
+               SAVEPOINT (i.e. :meth:`_engine.Connection.begin_nested`); the
+               :class:`_orm.Session` would take full control of the existing
+               SAVEPOINT.
+
+          * ``"rollback_only"`` - the :class:`_orm.Session` will take control
+            of the given transaction for ``.rollback()`` calls only;
+            ``.commit()`` calls will not be propagated to the given
+            transaction.  ``.close()`` calls will have no effect on the
+            given transaction.
+
+            .. tip:: This mode of use is equivalent to how SQLAlchemy 1.4 would
+               handle a :class:`_engine.Connection` given with an existing
+               regular database transaction (i.e.
+               :meth:`_engine.Connection.begin`); the :class:`_orm.Session`
+               would propagate :meth:`_orm.Session.rollback` calls to the
+               underlying transaction, but not :meth:`_orm.Session.commit` or
+               :meth:`_orm.Session.close` calls.
+
+          .. versionadded:: 2.0.0rc1
+
+        :param close_resets_only: Defaults to ``True``. Determines if
+          the session should reset itself after calling ``.close()``
+          or should pass in a no longer usable state, disabling re-use.
+
+          .. versionadded:: 2.0.22 added flag ``close_resets_only``.
+            A future SQLAlchemy version may change the default value of
+            this flag to ``False``.
+
+          .. seealso::
+
+            :ref:`session_closing` - Detail on the semantics of
+            :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`.
+
+        """  # noqa
+
+        # considering allowing the "autocommit" keyword to still be accepted
+        # as long as it's False, so that external test suites, oslo.db etc
+        # continue to function as the argument appears to be passed in lots
+        # of cases including in our own test suite
+        if autocommit:
+            raise sa_exc.ArgumentError(
+                "autocommit=True is no longer supported"
+            )
+        self.identity_map = identity.WeakInstanceDict()
+
+        if not future:
+            raise sa_exc.ArgumentError(
+                "The 'future' parameter passed to "
+                "Session() may only be set to True."
+            )
+
+        self._new = {}  # InstanceState->object, strong refs object
+        self._deleted = {}  # same
+        self.bind = bind
+        self.__binds = {}
+        self._flushing = False
+        self._warn_on_events = False
+        self._transaction = None
+        self._nested_transaction = None
+        self.hash_key = _new_sessionid()
+        self.autobegin = autobegin
+        self.autoflush = autoflush
+        self.expire_on_commit = expire_on_commit
+        self.enable_baked_queries = enable_baked_queries
+
+        # the idea is that at some point NO_ARG will warn that in the future
+        # the default will switch to close_resets_only=False.
+        if close_resets_only in (True, _NoArg.NO_ARG):
+            self._close_state = _SessionCloseState.CLOSE_IS_RESET
+        else:
+            self._close_state = _SessionCloseState.ACTIVE
+        if (
+            join_transaction_mode
+            and join_transaction_mode
+            not in JoinTransactionMode.__args__  # type: ignore
+        ):
+            raise sa_exc.ArgumentError(
+                f"invalid selection for join_transaction_mode: "
+                f'"{join_transaction_mode}"'
+            )
+        self.join_transaction_mode = join_transaction_mode
+
+        self.twophase = twophase
+        self._query_cls = query_cls if query_cls else query.Query
+        if info:
+            self.info.update(info)
+
+        if binds is not None:
+            for key, bind in binds.items():
+                self._add_bind(key, bind)
+
+        _sessions[self.hash_key] = self
+
+    # used by sqlalchemy.engine.util.TransactionalContext
+    _trans_context_manager: Optional[TransactionalContext] = None
+
+    connection_callable: Optional[_ConnectionCallableProto] = None
+
+    def __enter__(self: _S) -> _S:
+        return self
+
+    def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
+        self.close()
+
+    @contextlib.contextmanager
+    def _maker_context_manager(self: _S) -> Iterator[_S]:
+        with self:
+            with self.begin():
+                yield self
+
+    def in_transaction(self) -> bool:
+        """Return True if this :class:`_orm.Session` has begun a transaction.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_orm.Session.is_active`
+
+
+        """
+        return self._transaction is not None
+
+    def in_nested_transaction(self) -> bool:
+        """Return True if this :class:`_orm.Session` has begun a nested
+        transaction, e.g. SAVEPOINT.
+
+        .. versionadded:: 1.4
+
+        """
+        return self._nested_transaction is not None
+
+    def get_transaction(self) -> Optional[SessionTransaction]:
+        """Return the current root transaction in progress, if any.
+
+        .. versionadded:: 1.4
+
+        """
+        trans = self._transaction
+        while trans is not None and trans._parent is not None:
+            trans = trans._parent
+        return trans
+
+    def get_nested_transaction(self) -> Optional[SessionTransaction]:
+        """Return the current nested transaction in progress, if any.
+
+        .. versionadded:: 1.4
+
+        """
+
+        return self._nested_transaction
+
+    @util.memoized_property
+    def info(self) -> _InfoType:
+        """A user-modifiable dictionary.
+
+        The initial value of this dictionary can be populated using the
+        ``info`` argument to the :class:`.Session` constructor or
+        :class:`.sessionmaker` constructor or factory methods.  The dictionary
+        here is always local to this :class:`.Session` and can be modified
+        independently of all other :class:`.Session` objects.
+
+        """
+        return {}
+
+    def _autobegin_t(self, begin: bool = False) -> SessionTransaction:
+        if self._transaction is None:
+            if not begin and not self.autobegin:
+                raise sa_exc.InvalidRequestError(
+                    "Autobegin is disabled on this Session; please call "
+                    "session.begin() to start a new transaction"
+                )
+            trans = SessionTransaction(
+                self,
+                (
+                    SessionTransactionOrigin.BEGIN
+                    if begin
+                    else SessionTransactionOrigin.AUTOBEGIN
+                ),
+            )
+            assert self._transaction is trans
+            return trans
+
+        return self._transaction
+
+    def begin(self, nested: bool = False) -> SessionTransaction:
+        """Begin a transaction, or nested transaction,
+        on this :class:`.Session`, if one is not already begun.
+
+        The :class:`_orm.Session` object features **autobegin** behavior,
+        so that normally it is not necessary to call the
+        :meth:`_orm.Session.begin`
+        method explicitly. However, it may be used in order to control
+        the scope of when the transactional state is begun.
+
+        When used to begin the outermost transaction, an error is raised
+        if this :class:`.Session` is already inside of a transaction.
+
+        :param nested: if True, begins a SAVEPOINT transaction and is
+         equivalent to calling :meth:`~.Session.begin_nested`. For
+         documentation on SAVEPOINT transactions, please see
+         :ref:`session_begin_nested`.
+
+        :return: the :class:`.SessionTransaction` object.  Note that
+         :class:`.SessionTransaction`
+         acts as a Python context manager, allowing :meth:`.Session.begin`
+         to be used in a "with" block.  See :ref:`session_explicit_begin` for
+         an example.
+
+        .. seealso::
+
+            :ref:`session_autobegin`
+
+            :ref:`unitofwork_transaction`
+
+            :meth:`.Session.begin_nested`
+
+
+        """
+
+        trans = self._transaction
+        if trans is None:
+            trans = self._autobegin_t(begin=True)
+
+            if not nested:
+                return trans
+
+        assert trans is not None
+
+        if nested:
+            trans = trans._begin(nested=nested)
+            assert self._transaction is trans
+            self._nested_transaction = trans
+        else:
+            raise sa_exc.InvalidRequestError(
+                "A transaction is already begun on this Session."
+            )
+
+        return trans  # needed for __enter__/__exit__ hook
+
+    def begin_nested(self) -> SessionTransaction:
+        """Begin a "nested" transaction on this Session, e.g. SAVEPOINT.
+
+        The target database(s) and associated drivers must support SQL
+        SAVEPOINT for this method to function correctly.
+
+        For documentation on SAVEPOINT
+        transactions, please see :ref:`session_begin_nested`.
+
+        :return: the :class:`.SessionTransaction` object.  Note that
+         :class:`.SessionTransaction` acts as a context manager, allowing
+         :meth:`.Session.begin_nested` to be used in a "with" block.
+         See :ref:`session_begin_nested` for a usage example.
+
+        .. seealso::
+
+            :ref:`session_begin_nested`
+
+            :ref:`pysqlite_serializable` - special workarounds required
+            with the SQLite driver in order for SAVEPOINT to work
+            correctly. For asyncio use cases, see the section
+            :ref:`aiosqlite_serializable`.
+
+        """
+        return self.begin(nested=True)
+
+    def rollback(self) -> None:
+        """Rollback the current transaction in progress.
+
+        If no transaction is in progress, this method is a pass-through.
+
+        The method always rolls back
+        the topmost database transaction, discarding any nested
+        transactions that may be in progress.
+
+        .. seealso::
+
+            :ref:`session_rollback`
+
+            :ref:`unitofwork_transaction`
+
+        """
+        if self._transaction is None:
+            pass
+        else:
+            self._transaction.rollback(_to_root=True)
+
+    def commit(self) -> None:
+        """Flush pending changes and commit the current transaction.
+
+        When the COMMIT operation is complete, all objects are fully
+        :term:`expired`, erasing their internal contents, which will be
+        automatically re-loaded when the objects are next accessed. In the
+        interim, these objects are in an expired state and will not function if
+        they are :term:`detached` from the :class:`.Session`. Additionally,
+        this re-load operation is not supported when using asyncio-oriented
+        APIs. The :paramref:`.Session.expire_on_commit` parameter may be used
+        to disable this behavior.
+
+        When there is no transaction in place for the :class:`.Session`,
+        indicating that no operations were invoked on this :class:`.Session`
+        since the previous call to :meth:`.Session.commit`, the method will
+        begin and commit an internal-only "logical" transaction, that does not
+        normally affect the database unless pending flush changes were
+        detected, but will still invoke event handlers and object expiration
+        rules.
+
+        The outermost database transaction is committed unconditionally,
+        automatically releasing any SAVEPOINTs in effect.
+
+        .. seealso::
+
+            :ref:`session_committing`
+
+            :ref:`unitofwork_transaction`
+
+            :ref:`asyncio_orm_avoid_lazyloads`
+
+        """
+        trans = self._transaction
+        if trans is None:
+            trans = self._autobegin_t()
+
+        trans.commit(_to_root=True)
+
+    def prepare(self) -> None:
+        """Prepare the current transaction in progress for two phase commit.
+
+        If no transaction is in progress, this method raises an
+        :exc:`~sqlalchemy.exc.InvalidRequestError`.
+
+        Only root transactions of two phase sessions can be prepared. If the
+        current transaction is not such, an
+        :exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
+
+        """
+        trans = self._transaction
+        if trans is None:
+            trans = self._autobegin_t()
+
+        trans.prepare()
+
+    def connection(
+        self,
+        bind_arguments: Optional[_BindArguments] = None,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> Connection:
+        r"""Return a :class:`_engine.Connection` object corresponding to this
+        :class:`.Session` object's transactional state.
+
+        Either the :class:`_engine.Connection` corresponding to the current
+        transaction is returned, or if no transaction is in progress, a new
+        one is begun and the :class:`_engine.Connection`
+        returned (note that no
+        transactional state is established with the DBAPI until the first
+        SQL statement is emitted).
+
+        Ambiguity in multi-bind or unbound :class:`.Session` objects can be
+        resolved through any of the optional keyword arguments.   This
+        ultimately makes usage of the :meth:`.get_bind` method for resolution.
+
+        :param bind_arguments: dictionary of bind arguments.  May include
+         "mapper", "bind", "clause", other custom arguments that are passed
+         to :meth:`.Session.get_bind`.
+
+        :param execution_options: a dictionary of execution options that will
+         be passed to :meth:`_engine.Connection.execution_options`, **when the
+         connection is first procured only**.   If the connection is already
+         present within the :class:`.Session`, a warning is emitted and
+         the arguments are ignored.
+
+         .. seealso::
+
+            :ref:`session_transaction_isolation`
+
+        """
+
+        if bind_arguments:
+            bind = bind_arguments.pop("bind", None)
+
+            if bind is None:
+                bind = self.get_bind(**bind_arguments)
+        else:
+            bind = self.get_bind()
+
+        return self._connection_for_bind(
+            bind,
+            execution_options=execution_options,
+        )
+
+    def _connection_for_bind(
+        self,
+        engine: _SessionBind,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+        **kw: Any,
+    ) -> Connection:
+        TransactionalContext._trans_ctx_check(self)
+
+        trans = self._transaction
+        if trans is None:
+            trans = self._autobegin_t()
+        return trans._connection_for_bind(engine, execution_options)
+
+    @overload
+    def _execute_internal(
+        self,
+        statement: Executable,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+        _scalar_result: Literal[True] = ...,
+    ) -> Any: ...
+
+    @overload
+    def _execute_internal(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+        _scalar_result: bool = ...,
+    ) -> Result[Any]: ...
+
+    def _execute_internal(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+        _scalar_result: bool = False,
+    ) -> Any:
+        statement = coercions.expect(roles.StatementRole, statement)
+
+        if not bind_arguments:
+            bind_arguments = {}
+        else:
+            bind_arguments = dict(bind_arguments)
+
+        if (
+            statement._propagate_attrs.get("compile_state_plugin", None)
+            == "orm"
+        ):
+            compile_state_cls = CompileState._get_plugin_class_for_plugin(
+                statement, "orm"
+            )
+            if TYPE_CHECKING:
+                assert isinstance(
+                    compile_state_cls, context.AbstractORMCompileState
+                )
+        else:
+            compile_state_cls = None
+            bind_arguments.setdefault("clause", statement)
+
+        execution_options = util.coerce_to_immutabledict(execution_options)
+
+        if _parent_execute_state:
+            events_todo = _parent_execute_state._remaining_events()
+        else:
+            events_todo = self.dispatch.do_orm_execute
+            if _add_event:
+                events_todo = list(events_todo) + [_add_event]
+
+        if events_todo:
+            if compile_state_cls is not None:
+                # for event handlers, do the orm_pre_session_exec
+                # pass ahead of the event handlers, so that things like
+                # .load_options, .update_delete_options etc. are populated.
+                # is_pre_event=True allows the hook to hold off on things
+                # it doesn't want to do twice, including autoflush as well
+                # as "pre fetch" for DML, etc.
+                (
+                    statement,
+                    execution_options,
+                ) = compile_state_cls.orm_pre_session_exec(
+                    self,
+                    statement,
+                    params,
+                    execution_options,
+                    bind_arguments,
+                    True,
+                )
+
+            orm_exec_state = ORMExecuteState(
+                self,
+                statement,
+                params,
+                execution_options,
+                bind_arguments,
+                compile_state_cls,
+                events_todo,
+            )
+            for idx, fn in enumerate(events_todo):
+                orm_exec_state._starting_event_idx = idx
+                fn_result: Optional[Result[Any]] = fn(orm_exec_state)
+                if fn_result:
+                    if _scalar_result:
+                        return fn_result.scalar()
+                    else:
+                        return fn_result
+
+            statement = orm_exec_state.statement
+            execution_options = orm_exec_state.local_execution_options
+
+        if compile_state_cls is not None:
+            # now run orm_pre_session_exec() "for real".   if there were
+            # event hooks, this will re-run the steps that interpret
+            # new execution_options into load_options / update_delete_options,
+            # which we assume the event hook might have updated.
+            # autoflush will also be invoked in this step if enabled.
+            (
+                statement,
+                execution_options,
+            ) = compile_state_cls.orm_pre_session_exec(
+                self,
+                statement,
+                params,
+                execution_options,
+                bind_arguments,
+                False,
+            )
+
+        bind = self.get_bind(**bind_arguments)
+
+        conn = self._connection_for_bind(bind)
+
+        if _scalar_result and not compile_state_cls:
+            if TYPE_CHECKING:
+                params = cast(_CoreSingleExecuteParams, params)
+            return conn.scalar(
+                statement, params or {}, execution_options=execution_options
+            )
+
+        if compile_state_cls:
+            result: Result[Any] = compile_state_cls.orm_execute_statement(
+                self,
+                statement,
+                params or {},
+                execution_options,
+                bind_arguments,
+                conn,
+            )
+        else:
+            result = conn.execute(
+                statement, params or {}, execution_options=execution_options
+            )
+
+        if _scalar_result:
+            return result.scalar()
+        else:
+            return result
+
+    @overload
+    def execute(
+        self,
+        statement: TypedReturnsRows[_T],
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> Result[_T]: ...
+
+    @overload
+    def execute(
+        self,
+        statement: UpdateBase,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> CursorResult[Any]: ...
+
+    @overload
+    def execute(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> Result[Any]: ...
+
+    def execute(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        _parent_execute_state: Optional[Any] = None,
+        _add_event: Optional[Any] = None,
+    ) -> Result[Any]:
+        r"""Execute a SQL expression construct.
+
+        Returns a :class:`_engine.Result` object representing
+        results of the statement execution.
+
+        E.g.::
+
+            from sqlalchemy import select
+
+            result = session.execute(select(User).where(User.id == 5))
+
+        The API contract of :meth:`_orm.Session.execute` is similar to that
+        of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version
+        of :class:`_engine.Connection`.
+
+        .. versionchanged:: 1.4 the :meth:`_orm.Session.execute` method is
+           now the primary point of ORM statement execution when using
+           :term:`2.0 style` ORM usage.
+
+        :param statement:
+            An executable statement (i.e. an :class:`.Executable` expression
+            such as :func:`_expression.select`).
+
+        :param params:
+            Optional dictionary, or list of dictionaries, containing
+            bound parameter values.   If a single dictionary, single-row
+            execution occurs; if a list of dictionaries, an
+            "executemany" will be invoked.  The keys in each dictionary
+            must correspond to parameter names present in the statement.
+
+        :param execution_options: optional dictionary of execution options,
+         which will be associated with the statement execution.  This
+         dictionary can provide a subset of the options that are accepted
+         by :meth:`_engine.Connection.execution_options`, and may also
+         provide additional options understood only in an ORM context.
+
+         .. seealso::
+
+            :ref:`orm_queryguide_execution_options` - ORM-specific execution
+            options
+
+        :param bind_arguments: dictionary of additional arguments to determine
+         the bind.  May include "mapper", "bind", or other custom arguments.
+         Contents of this dictionary are passed to the
+         :meth:`.Session.get_bind` method.
+
+        :return: a :class:`_engine.Result` object.
+
+
+        """
+        return self._execute_internal(
+            statement,
+            params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            _parent_execute_state=_parent_execute_state,
+            _add_event=_add_event,
+        )
+
+    @overload
+    def scalar(
+        self,
+        statement: TypedReturnsRows[Tuple[_T]],
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> Optional[_T]: ...
+
+    @overload
+    def scalar(
+        self,
+        statement: Executable,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> Any: ...
+
+    def scalar(
+        self,
+        statement: Executable,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> Any:
+        """Execute a statement and return a scalar result.
+
+        Usage and parameters are the same as that of
+        :meth:`_orm.Session.execute`; the return result is a scalar Python
+        value.
+
+        """
+
+        return self._execute_internal(
+            statement,
+            params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            _scalar_result=True,
+            **kw,
+        )
+
+    @overload
+    def scalars(
+        self,
+        statement: TypedReturnsRows[Tuple[_T]],
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> ScalarResult[_T]: ...
+
+    @overload
+    def scalars(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> ScalarResult[Any]: ...
+
+    def scalars(
+        self,
+        statement: Executable,
+        params: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+        **kw: Any,
+    ) -> ScalarResult[Any]:
+        """Execute a statement and return the results as scalars.
+
+        Usage and parameters are the same as that of
+        :meth:`_orm.Session.execute`; the return result is a
+        :class:`_result.ScalarResult` filtering object which
+        will return single elements rather than :class:`_row.Row` objects.
+
+        :return:  a :class:`_result.ScalarResult` object
+
+        .. versionadded:: 1.4.24 Added :meth:`_orm.Session.scalars`
+
+        .. versionadded:: 1.4.26 Added :meth:`_orm.scoped_session.scalars`
+
+        .. seealso::
+
+            :ref:`orm_queryguide_select_orm_entities` - contrasts the behavior
+            of :meth:`_orm.Session.execute` to :meth:`_orm.Session.scalars`
+
+        """
+
+        return self._execute_internal(
+            statement,
+            params=params,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+            _scalar_result=False,  # mypy appreciates this
+            **kw,
+        ).scalars()
+
+    def close(self) -> None:
+        """Close out the transactional resources and ORM objects used by this
+        :class:`_orm.Session`.
+
+        This expunges all ORM objects associated with this
+        :class:`_orm.Session`, ends any transaction in progress and
+        :term:`releases` any :class:`_engine.Connection` objects which this
+        :class:`_orm.Session` itself has checked out from associated
+        :class:`_engine.Engine` objects. The operation then leaves the
+        :class:`_orm.Session` in a state which it may be used again.
+
+        .. tip::
+
+            In the default running mode the :meth:`_orm.Session.close`
+            method **does not prevent the Session from being used again**.
+            The :class:`_orm.Session` itself does not actually have a
+            distinct "closed" state; it merely means
+            the :class:`_orm.Session` will release all database connections
+            and ORM objects.
+
+            Setting the parameter :paramref:`_orm.Session.close_resets_only`
+            to ``False`` will instead make the ``close`` final, meaning that
+            any further action on the session will be forbidden.
+
+        .. versionchanged:: 1.4  The :meth:`.Session.close` method does not
+           immediately create a new :class:`.SessionTransaction` object;
+           instead, the new :class:`.SessionTransaction` is created only if
+           the :class:`.Session` is used again for a database operation.
+
+        .. seealso::
+
+            :ref:`session_closing` - detail on the semantics of
+            :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`.
+
+            :meth:`_orm.Session.reset` - a similar method that behaves like
+            ``close()`` with  the parameter
+            :paramref:`_orm.Session.close_resets_only` set to ``True``.
+
+        """
+        self._close_impl(invalidate=False)
+
+    def reset(self) -> None:
+        """Close out the transactional resources and ORM objects used by this
+        :class:`_orm.Session`, resetting the session to its initial state.
+
+        This method provides for same "reset-only" behavior that the
+        :meth:`_orm.Session.close` method has provided historically, where the
+        state of the :class:`_orm.Session` is reset as though the object were
+        brand new, and ready to be used again.
+        This method may then be useful for :class:`_orm.Session` objects
+        which set :paramref:`_orm.Session.close_resets_only` to ``False``,
+        so that "reset only" behavior is still available.
+
+        .. versionadded:: 2.0.22
+
+        .. seealso::
+
+            :ref:`session_closing` - detail on the semantics of
+            :meth:`_orm.Session.close` and :meth:`_orm.Session.reset`.
+
+            :meth:`_orm.Session.close` - a similar method will additionally
+            prevent re-use of the Session when the parameter
+            :paramref:`_orm.Session.close_resets_only` is set to ``False``.
+        """
+        self._close_impl(invalidate=False, is_reset=True)
+
+    def invalidate(self) -> None:
+        """Close this Session, using connection invalidation.
+
+        This is a variant of :meth:`.Session.close` that will additionally
+        ensure that the :meth:`_engine.Connection.invalidate`
+        method will be called on each :class:`_engine.Connection` object
+        that is currently in use for a transaction (typically there is only
+        one connection unless the :class:`_orm.Session` is used with
+        multiple engines).
+
+        This can be called when the database is known to be in a state where
+        the connections are no longer safe to be used.
+
+        Below illustrates a scenario when using `gevent
+        <https://www.gevent.org/>`_, which can produce ``Timeout`` exceptions
+        that may mean the underlying connection should be discarded::
+
+            import gevent
+
+            try:
+                sess = Session()
+                sess.add(User())
+                sess.commit()
+            except gevent.Timeout:
+                sess.invalidate()
+                raise
+            except:
+                sess.rollback()
+                raise
+
+        The method additionally does everything that :meth:`_orm.Session.close`
+        does, including that all ORM objects are expunged.
+
+        """
+        self._close_impl(invalidate=True)
+
+    def _close_impl(self, invalidate: bool, is_reset: bool = False) -> None:
+        if not is_reset and self._close_state is _SessionCloseState.ACTIVE:
+            self._close_state = _SessionCloseState.CLOSED
+        self.expunge_all()
+        if self._transaction is not None:
+            for transaction in self._transaction._iterate_self_and_parents():
+                transaction.close(invalidate)
+
+    def expunge_all(self) -> None:
+        """Remove all object instances from this ``Session``.
+
+        This is equivalent to calling ``expunge(obj)`` on all objects in this
+        ``Session``.
+
+        """
+
+        all_states = self.identity_map.all_states() + list(self._new)
+        self.identity_map._kill()
+        self.identity_map = identity.WeakInstanceDict()
+        self._new = {}
+        self._deleted = {}
+
+        statelib.InstanceState._detach_states(all_states, self)
+
+    def _add_bind(self, key: _SessionBindKey, bind: _SessionBind) -> None:
+        try:
+            insp = inspect(key)
+        except sa_exc.NoInspectionAvailable as err:
+            if not isinstance(key, type):
+                raise sa_exc.ArgumentError(
+                    "Not an acceptable bind target: %s" % key
+                ) from err
+            else:
+                self.__binds[key] = bind
+        else:
+            if TYPE_CHECKING:
+                assert isinstance(insp, Inspectable)
+
+            if isinstance(insp, TableClause):
+                self.__binds[insp] = bind
+            elif insp_is_mapper(insp):
+                self.__binds[insp.class_] = bind
+                for _selectable in insp._all_tables:
+                    self.__binds[_selectable] = bind
+            else:
+                raise sa_exc.ArgumentError(
+                    "Not an acceptable bind target: %s" % key
+                )
+
+    def bind_mapper(
+        self, mapper: _EntityBindKey[_O], bind: _SessionBind
+    ) -> None:
+        """Associate a :class:`_orm.Mapper` or arbitrary Python class with a
+        "bind", e.g. an :class:`_engine.Engine` or
+        :class:`_engine.Connection`.
+
+        The given entity is added to a lookup used by the
+        :meth:`.Session.get_bind` method.
+
+        :param mapper: a :class:`_orm.Mapper` object,
+         or an instance of a mapped
+         class, or any Python class that is the base of a set of mapped
+         classes.
+
+        :param bind: an :class:`_engine.Engine` or :class:`_engine.Connection`
+                    object.
+
+        .. seealso::
+
+            :ref:`session_partitioning`
+
+            :paramref:`.Session.binds`
+
+            :meth:`.Session.bind_table`
+
+
+        """
+        self._add_bind(mapper, bind)
+
+    def bind_table(self, table: TableClause, bind: _SessionBind) -> None:
+        """Associate a :class:`_schema.Table` with a "bind", e.g. an
+        :class:`_engine.Engine`
+        or :class:`_engine.Connection`.
+
+        The given :class:`_schema.Table` is added to a lookup used by the
+        :meth:`.Session.get_bind` method.
+
+        :param table: a :class:`_schema.Table` object,
+         which is typically the target
+         of an ORM mapping, or is present within a selectable that is
+         mapped.
+
+        :param bind: an :class:`_engine.Engine` or :class:`_engine.Connection`
+         object.
+
+        .. seealso::
+
+            :ref:`session_partitioning`
+
+            :paramref:`.Session.binds`
+
+            :meth:`.Session.bind_mapper`
+
+
+        """
+        self._add_bind(table, bind)
+
+    def get_bind(
+        self,
+        mapper: Optional[_EntityBindKey[_O]] = None,
+        *,
+        clause: Optional[ClauseElement] = None,
+        bind: Optional[_SessionBind] = None,
+        _sa_skip_events: Optional[bool] = None,
+        _sa_skip_for_implicit_returning: bool = False,
+        **kw: Any,
+    ) -> Union[Engine, Connection]:
+        """Return a "bind" to which this :class:`.Session` is bound.
+
+        The "bind" is usually an instance of :class:`_engine.Engine`,
+        except in the case where the :class:`.Session` has been
+        explicitly bound directly to a :class:`_engine.Connection`.
+
+        For a multiply-bound or unbound :class:`.Session`, the
+        ``mapper`` or ``clause`` arguments are used to determine the
+        appropriate bind to return.
+
+        Note that the "mapper" argument is usually present
+        when :meth:`.Session.get_bind` is called via an ORM
+        operation such as a :meth:`.Session.query`, each
+        individual INSERT/UPDATE/DELETE operation within a
+        :meth:`.Session.flush`, call, etc.
+
+        The order of resolution is:
+
+        1. if mapper given and :paramref:`.Session.binds` is present,
+           locate a bind based first on the mapper in use, then
+           on the mapped class in use, then on any base classes that are
+           present in the ``__mro__`` of the mapped class, from more specific
+           superclasses to more general.
+        2. if clause given and ``Session.binds`` is present,
+           locate a bind based on :class:`_schema.Table` objects
+           found in the given clause present in ``Session.binds``.
+        3. if ``Session.binds`` is present, return that.
+        4. if clause given, attempt to return a bind
+           linked to the :class:`_schema.MetaData` ultimately
+           associated with the clause.
+        5. if mapper given, attempt to return a bind
+           linked to the :class:`_schema.MetaData` ultimately
+           associated with the :class:`_schema.Table` or other
+           selectable to which the mapper is mapped.
+        6. No bind can be found, :exc:`~sqlalchemy.exc.UnboundExecutionError`
+           is raised.
+
+        Note that the :meth:`.Session.get_bind` method can be overridden on
+        a user-defined subclass of :class:`.Session` to provide any kind
+        of bind resolution scheme.  See the example at
+        :ref:`session_custom_partitioning`.
+
+        :param mapper:
+          Optional mapped class or corresponding :class:`_orm.Mapper` instance.
+          The bind can be derived from a :class:`_orm.Mapper` first by
+          consulting the "binds" map associated with this :class:`.Session`,
+          and secondly by consulting the :class:`_schema.MetaData` associated
+          with the :class:`_schema.Table` to which the :class:`_orm.Mapper` is
+          mapped for a bind.
+
+        :param clause:
+            A :class:`_expression.ClauseElement` (i.e.
+            :func:`_expression.select`,
+            :func:`_expression.text`,
+            etc.).  If the ``mapper`` argument is not present or could not
+            produce a bind, the given expression construct will be searched
+            for a bound element, typically a :class:`_schema.Table`
+            associated with
+            bound :class:`_schema.MetaData`.
+
+        .. seealso::
+
+             :ref:`session_partitioning`
+
+             :paramref:`.Session.binds`
+
+             :meth:`.Session.bind_mapper`
+
+             :meth:`.Session.bind_table`
+
+        """
+
+        # this function is documented as a subclassing hook, so we have
+        # to call this method even if the return is simple
+        if bind:
+            return bind
+        elif not self.__binds and self.bind:
+            # simplest and most common case, we have a bind and no
+            # per-mapper/table binds, we're done
+            return self.bind
+
+        # we don't have self.bind and either have self.__binds
+        # or we don't have self.__binds (which is legacy).  Look at the
+        # mapper and the clause
+        if mapper is None and clause is None:
+            if self.bind:
+                return self.bind
+            else:
+                raise sa_exc.UnboundExecutionError(
+                    "This session is not bound to a single Engine or "
+                    "Connection, and no context was provided to locate "
+                    "a binding."
+                )
+
+        # look more closely at the mapper.
+        if mapper is not None:
+            try:
+                inspected_mapper = inspect(mapper)
+            except sa_exc.NoInspectionAvailable as err:
+                if isinstance(mapper, type):
+                    raise exc.UnmappedClassError(mapper) from err
+                else:
+                    raise
+        else:
+            inspected_mapper = None
+
+        # match up the mapper or clause in the __binds
+        if self.__binds:
+            # matching mappers and selectables to entries in the
+            # binds dictionary; supported use case.
+            if inspected_mapper:
+                for cls in inspected_mapper.class_.__mro__:
+                    if cls in self.__binds:
+                        return self.__binds[cls]
+                if clause is None:
+                    clause = inspected_mapper.persist_selectable
+
+            if clause is not None:
+                plugin_subject = clause._propagate_attrs.get(
+                    "plugin_subject", None
+                )
+
+                if plugin_subject is not None:
+                    for cls in plugin_subject.mapper.class_.__mro__:
+                        if cls in self.__binds:
+                            return self.__binds[cls]
+
+                for obj in visitors.iterate(clause):
+                    if obj in self.__binds:
+                        if TYPE_CHECKING:
+                            assert isinstance(obj, Table)
+                        return self.__binds[obj]
+
+        # none of the __binds matched, but we have a fallback bind.
+        # return that
+        if self.bind:
+            return self.bind
+
+        context = []
+        if inspected_mapper is not None:
+            context.append(f"mapper {inspected_mapper}")
+        if clause is not None:
+            context.append("SQL expression")
+
+        raise sa_exc.UnboundExecutionError(
+            f"Could not locate a bind configured on "
+            f'{", ".join(context)} or this Session.'
+        )
+
+    @overload
+    def query(self, _entity: _EntityType[_O]) -> Query[_O]: ...
+
+    @overload
+    def query(
+        self, _colexpr: TypedColumnsClauseRole[_T]
+    ) -> RowReturningQuery[Tuple[_T]]: ...
+
+    # START OVERLOADED FUNCTIONS self.query RowReturningQuery 2-8
+
+    # code within this block is **programmatically,
+    # statically generated** by tools/generate_tuple_map_overloads.py
+
+    @overload
+    def query(
+        self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
+    ) -> RowReturningQuery[Tuple[_T0, _T1]]: ...
+
+    @overload
+    def query(
+        self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+    @overload
+    def query(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+        __ent7: _TCCA[_T7],
+    ) -> RowReturningQuery[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ...
+
+    # END OVERLOADED FUNCTIONS self.query
+
+    @overload
+    def query(
+        self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
+    ) -> Query[Any]: ...
+
+    def query(
+        self, *entities: _ColumnsClauseArgument[Any], **kwargs: Any
+    ) -> Query[Any]:
+        """Return a new :class:`_query.Query` object corresponding to this
+        :class:`_orm.Session`.
+
+        Note that the :class:`_query.Query` object is legacy as of
+        SQLAlchemy 2.0; the :func:`_sql.select` construct is now used
+        to construct ORM queries.
+
+        .. seealso::
+
+            :ref:`unified_tutorial`
+
+            :ref:`queryguide_toplevel`
+
+            :ref:`query_api_toplevel` - legacy API doc
+
+        """
+
+        return self._query_cls(entities, self, **kwargs)
+
+    def _identity_lookup(
+        self,
+        mapper: Mapper[_O],
+        primary_key_identity: Union[Any, Tuple[Any, ...]],
+        identity_token: Any = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+        lazy_loaded_from: Optional[InstanceState[Any]] = None,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> Union[Optional[_O], LoaderCallableStatus]:
+        """Locate an object in the identity map.
+
+        Given a primary key identity, constructs an identity key and then
+        looks in the session's identity map.  If present, the object may
+        be run through unexpiration rules (e.g. load unloaded attributes,
+        check if was deleted).
+
+        e.g.::
+
+            obj = session._identity_lookup(inspect(SomeClass), (1,))
+
+        :param mapper: mapper in use
+        :param primary_key_identity: the primary key we are searching for, as
+         a tuple.
+        :param identity_token: identity token that should be used to create
+         the identity key.  Used as is, however overriding subclasses can
+         repurpose this in order to interpret the value in a special way,
+         such as if None then look among multiple target tokens.
+        :param passive: passive load flag passed to
+         :func:`.loading.get_from_identity`, which impacts the behavior if
+         the object is found; the object may be validated and/or unexpired
+         if the flag allows for SQL to be emitted.
+        :param lazy_loaded_from: an :class:`.InstanceState` that is
+         specifically asking for this identity as a related identity.  Used
+         for sharding schemes where there is a correspondence between an object
+         and a related object being lazy-loaded (or otherwise
+         relationship-loaded).
+
+        :return: None if the object is not found in the identity map, *or*
+         if the object was unexpired and found to have been deleted.
+         if passive flags disallow SQL and the object is expired, returns
+         PASSIVE_NO_RESULT.   In all other cases the instance is returned.
+
+        .. versionchanged:: 1.4.0 - the :meth:`.Session._identity_lookup`
+           method was moved from :class:`_query.Query` to
+           :class:`.Session`, to avoid having to instantiate the
+           :class:`_query.Query` object.
+
+
+        """
+
+        key = mapper.identity_key_from_primary_key(
+            primary_key_identity, identity_token=identity_token
+        )
+
+        # work around: https://github.com/python/typing/discussions/1143
+        return_value = loading.get_from_identity(self, mapper, key, passive)
+        return return_value
+
+    @util.non_memoized_property
+    @contextlib.contextmanager
+    def no_autoflush(self) -> Iterator[Session]:
+        """Return a context manager that disables autoflush.
+
+        e.g.::
+
+            with session.no_autoflush:
+
+                some_object = SomeClass()
+                session.add(some_object)
+                # won't autoflush
+                some_object.related_thing = session.query(SomeRelated).first()
+
+        Operations that proceed within the ``with:`` block
+        will not be subject to flushes occurring upon query
+        access.  This is useful when initializing a series
+        of objects which involve existing database queries,
+        where the uncompleted object should not yet be flushed.
+
+        """
+        autoflush = self.autoflush
+        self.autoflush = False
+        try:
+            yield self
+        finally:
+            self.autoflush = autoflush
+
+    @util.langhelpers.tag_method_for_warnings(
+        "This warning originated from the Session 'autoflush' process, "
+        "which was invoked automatically in response to a user-initiated "
+        "operation. Consider using ``no_autoflush`` context manager if this "
+        "warning happended while initializing objects.",
+        sa_exc.SAWarning,
+    )
+    def _autoflush(self) -> None:
+        if self.autoflush and not self._flushing:
+            try:
+                self.flush()
+            except sa_exc.StatementError as e:
+                # note we are reraising StatementError as opposed to
+                # raising FlushError with "chaining" to remain compatible
+                # with code that catches StatementError, IntegrityError,
+                # etc.
+                e.add_detail(
+                    "raised as a result of Query-invoked autoflush; "
+                    "consider using a session.no_autoflush block if this "
+                    "flush is occurring prematurely"
+                )
+                raise e.with_traceback(sys.exc_info()[2])
+
+    def refresh(
+        self,
+        instance: object,
+        attribute_names: Optional[Iterable[str]] = None,
+        with_for_update: ForUpdateParameter = None,
+    ) -> None:
+        """Expire and refresh attributes on the given instance.
+
+        The selected attributes will first be expired as they would when using
+        :meth:`_orm.Session.expire`; then a SELECT statement will be issued to
+        the database to refresh column-oriented attributes with the current
+        value available in the current transaction.
+
+        :func:`_orm.relationship` oriented attributes will also be immediately
+        loaded if they were already eagerly loaded on the object, using the
+        same eager loading strategy that they were loaded with originally.
+
+        .. versionadded:: 1.4 - the :meth:`_orm.Session.refresh` method
+           can also refresh eagerly loaded attributes.
+
+        :func:`_orm.relationship` oriented attributes that would normally
+        load using the ``select`` (or "lazy") loader strategy will also
+        load **if they are named explicitly in the attribute_names
+        collection**, emitting a SELECT statement for the attribute using the
+        ``immediate`` loader strategy.  If lazy-loaded relationships are not
+        named in :paramref:`_orm.Session.refresh.attribute_names`, then
+        they remain as "lazy loaded" attributes and are not implicitly
+        refreshed.
+
+        .. versionchanged:: 2.0.4  The :meth:`_orm.Session.refresh` method
+           will now refresh lazy-loaded :func:`_orm.relationship` oriented
+           attributes for those which are named explicitly in the
+           :paramref:`_orm.Session.refresh.attribute_names` collection.
+
+        .. tip::
+
+            While the :meth:`_orm.Session.refresh` method is capable of
+            refreshing both column and relationship oriented attributes, its
+            primary focus is on refreshing of local column-oriented attributes
+            on a single instance. For more open ended "refresh" functionality,
+            including the ability to refresh the attributes on many objects at
+            once while having explicit control over relationship loader
+            strategies, use the
+            :ref:`populate existing <orm_queryguide_populate_existing>` feature
+            instead.
+
+        Note that a highly isolated transaction will return the same values as
+        were previously read in that same transaction, regardless of changes
+        in database state outside of that transaction.   Refreshing
+        attributes usually only makes sense at the start of a transaction
+        where database rows have not yet been accessed.
+
+        :param attribute_names: optional.  An iterable collection of
+          string attribute names indicating a subset of attributes to
+          be refreshed.
+
+        :param with_for_update: optional boolean ``True`` indicating FOR UPDATE
+          should be used, or may be a dictionary containing flags to
+          indicate a more specific set of FOR UPDATE flags for the SELECT;
+          flags should match the parameters of
+          :meth:`_query.Query.with_for_update`.
+          Supersedes the :paramref:`.Session.refresh.lockmode` parameter.
+
+        .. seealso::
+
+            :ref:`session_expire` - introductory material
+
+            :meth:`.Session.expire`
+
+            :meth:`.Session.expire_all`
+
+            :ref:`orm_queryguide_populate_existing` - allows any ORM query
+            to refresh objects as they would be loaded normally.
+
+        """
+        try:
+            state = attributes.instance_state(instance)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(instance) from err
+
+        self._expire_state(state, attribute_names)
+
+        # this autoflush previously used to occur as a secondary effect
+        # of the load_on_ident below.   Meaning we'd organize the SELECT
+        # based on current DB pks, then flush, then if pks changed in that
+        # flush, crash.  this was unticketed but discovered as part of
+        # #8703.  So here, autoflush up front, dont autoflush inside
+        # load_on_ident.
+        self._autoflush()
+
+        if with_for_update == {}:
+            raise sa_exc.ArgumentError(
+                "with_for_update should be the boolean value "
+                "True, or a dictionary with options.  "
+                "A blank dictionary is ambiguous."
+            )
+
+        with_for_update = ForUpdateArg._from_argument(with_for_update)
+
+        stmt: Select[Any] = sql.select(object_mapper(instance))
+        if (
+            loading.load_on_ident(
+                self,
+                stmt,
+                state.key,
+                refresh_state=state,
+                with_for_update=with_for_update,
+                only_load_props=attribute_names,
+                require_pk_cols=True,
+                # technically unnecessary as we just did autoflush
+                # above, however removes the additional unnecessary
+                # call to _autoflush()
+                no_autoflush=True,
+                is_user_refresh=True,
+            )
+            is None
+        ):
+            raise sa_exc.InvalidRequestError(
+                "Could not refresh instance '%s'" % instance_str(instance)
+            )
+
+    def expire_all(self) -> None:
+        """Expires all persistent instances within this Session.
+
+        When any attributes on a persistent instance is next accessed,
+        a query will be issued using the
+        :class:`.Session` object's current transactional context in order to
+        load all expired attributes for the given instance.   Note that
+        a highly isolated transaction will return the same values as were
+        previously read in that same transaction, regardless of changes
+        in database state outside of that transaction.
+
+        To expire individual objects and individual attributes
+        on those objects, use :meth:`Session.expire`.
+
+        The :class:`.Session` object's default behavior is to
+        expire all state whenever the :meth:`Session.rollback`
+        or :meth:`Session.commit` methods are called, so that new
+        state can be loaded for the new transaction.   For this reason,
+        calling :meth:`Session.expire_all` is not usually needed,
+        assuming the transaction is isolated.
+
+        .. seealso::
+
+            :ref:`session_expire` - introductory material
+
+            :meth:`.Session.expire`
+
+            :meth:`.Session.refresh`
+
+            :meth:`_orm.Query.populate_existing`
+
+        """
+        for state in self.identity_map.all_states():
+            state._expire(state.dict, self.identity_map._modified)
+
+    def expire(
+        self, instance: object, attribute_names: Optional[Iterable[str]] = None
+    ) -> None:
+        """Expire the attributes on an instance.
+
+        Marks the attributes of an instance as out of date. When an expired
+        attribute is next accessed, a query will be issued to the
+        :class:`.Session` object's current transactional context in order to
+        load all expired attributes for the given instance.   Note that
+        a highly isolated transaction will return the same values as were
+        previously read in that same transaction, regardless of changes
+        in database state outside of that transaction.
+
+        To expire all objects in the :class:`.Session` simultaneously,
+        use :meth:`Session.expire_all`.
+
+        The :class:`.Session` object's default behavior is to
+        expire all state whenever the :meth:`Session.rollback`
+        or :meth:`Session.commit` methods are called, so that new
+        state can be loaded for the new transaction.   For this reason,
+        calling :meth:`Session.expire` only makes sense for the specific
+        case that a non-ORM SQL statement was emitted in the current
+        transaction.
+
+        :param instance: The instance to be refreshed.
+        :param attribute_names: optional list of string attribute names
+          indicating a subset of attributes to be expired.
+
+        .. seealso::
+
+            :ref:`session_expire` - introductory material
+
+            :meth:`.Session.expire`
+
+            :meth:`.Session.refresh`
+
+            :meth:`_orm.Query.populate_existing`
+
+        """
+        try:
+            state = attributes.instance_state(instance)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(instance) from err
+        self._expire_state(state, attribute_names)
+
+    def _expire_state(
+        self,
+        state: InstanceState[Any],
+        attribute_names: Optional[Iterable[str]],
+    ) -> None:
+        self._validate_persistent(state)
+        if attribute_names:
+            state._expire_attributes(state.dict, attribute_names)
+        else:
+            # pre-fetch the full cascade since the expire is going to
+            # remove associations
+            cascaded = list(
+                state.manager.mapper.cascade_iterator("refresh-expire", state)
+            )
+            self._conditional_expire(state)
+            for o, m, st_, dct_ in cascaded:
+                self._conditional_expire(st_)
+
+    def _conditional_expire(
+        self, state: InstanceState[Any], autoflush: Optional[bool] = None
+    ) -> None:
+        """Expire a state if persistent, else expunge if pending"""
+
+        if state.key:
+            state._expire(state.dict, self.identity_map._modified)
+        elif state in self._new:
+            self._new.pop(state)
+            state._detach(self)
+
+    def expunge(self, instance: object) -> None:
+        """Remove the `instance` from this ``Session``.
+
+        This will free all internal references to the instance.  Cascading
+        will be applied according to the *expunge* cascade rule.
+
+        """
+        try:
+            state = attributes.instance_state(instance)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(instance) from err
+        if state.session_id is not self.hash_key:
+            raise sa_exc.InvalidRequestError(
+                "Instance %s is not present in this Session" % state_str(state)
+            )
+
+        cascaded = list(
+            state.manager.mapper.cascade_iterator("expunge", state)
+        )
+        self._expunge_states([state] + [st_ for o, m, st_, dct_ in cascaded])
+
+    def _expunge_states(
+        self, states: Iterable[InstanceState[Any]], to_transient: bool = False
+    ) -> None:
+        for state in states:
+            if state in self._new:
+                self._new.pop(state)
+            elif self.identity_map.contains_state(state):
+                self.identity_map.safe_discard(state)
+                self._deleted.pop(state, None)
+            elif self._transaction:
+                # state is "detached" from being deleted, but still present
+                # in the transaction snapshot
+                self._transaction._deleted.pop(state, None)
+        statelib.InstanceState._detach_states(
+            states, self, to_transient=to_transient
+        )
+
+    def _register_persistent(self, states: Set[InstanceState[Any]]) -> None:
+        """Register all persistent objects from a flush.
+
+        This is used both for pending objects moving to the persistent
+        state as well as already persistent objects.
+
+        """
+
+        pending_to_persistent = self.dispatch.pending_to_persistent or None
+        for state in states:
+            mapper = _state_mapper(state)
+
+            # prevent against last minute dereferences of the object
+            obj = state.obj()
+            if obj is not None:
+                instance_key = mapper._identity_key_from_state(state)
+
+                if (
+                    _none_set.intersection(instance_key[1])
+                    and not mapper.allow_partial_pks
+                    or _none_set.issuperset(instance_key[1])
+                ):
+                    raise exc.FlushError(
+                        "Instance %s has a NULL identity key.  If this is an "
+                        "auto-generated value, check that the database table "
+                        "allows generation of new primary key values, and "
+                        "that the mapped Column object is configured to "
+                        "expect these generated values.  Ensure also that "
+                        "this flush() is not occurring at an inappropriate "
+                        "time, such as within a load() event."
+                        % state_str(state)
+                    )
+
+                if state.key is None:
+                    state.key = instance_key
+                elif state.key != instance_key:
+                    # primary key switch. use safe_discard() in case another
+                    # state has already replaced this one in the identity
+                    # map (see test/orm/test_naturalpks.py ReversePKsTest)
+                    self.identity_map.safe_discard(state)
+                    trans = self._transaction
+                    assert trans is not None
+                    if state in trans._key_switches:
+                        orig_key = trans._key_switches[state][0]
+                    else:
+                        orig_key = state.key
+                    trans._key_switches[state] = (
+                        orig_key,
+                        instance_key,
+                    )
+                    state.key = instance_key
+
+                # there can be an existing state in the identity map
+                # that is replaced when the primary keys of two instances
+                # are swapped; see test/orm/test_naturalpks.py -> test_reverse
+                old = self.identity_map.replace(state)
+                if (
+                    old is not None
+                    and mapper._identity_key_from_state(old) == instance_key
+                    and old.obj() is not None
+                ):
+                    util.warn(
+                        "Identity map already had an identity for %s, "
+                        "replacing it with newly flushed object.   Are there "
+                        "load operations occurring inside of an event handler "
+                        "within the flush?" % (instance_key,)
+                    )
+                state._orphaned_outside_of_session = False
+
+        statelib.InstanceState._commit_all_states(
+            ((state, state.dict) for state in states), self.identity_map
+        )
+
+        self._register_altered(states)
+
+        if pending_to_persistent is not None:
+            for state in states.intersection(self._new):
+                pending_to_persistent(self, state)
+
+        # remove from new last, might be the last strong ref
+        for state in set(states).intersection(self._new):
+            self._new.pop(state)
+
+    def _register_altered(self, states: Iterable[InstanceState[Any]]) -> None:
+        if self._transaction:
+            for state in states:
+                if state in self._new:
+                    self._transaction._new[state] = True
+                else:
+                    self._transaction._dirty[state] = True
+
+    def _remove_newly_deleted(
+        self, states: Iterable[InstanceState[Any]]
+    ) -> None:
+        persistent_to_deleted = self.dispatch.persistent_to_deleted or None
+        for state in states:
+            if self._transaction:
+                self._transaction._deleted[state] = True
+
+            if persistent_to_deleted is not None:
+                # get a strong reference before we pop out of
+                # self._deleted
+                obj = state.obj()  # noqa
+
+            self.identity_map.safe_discard(state)
+            self._deleted.pop(state, None)
+            state._deleted = True
+            # can't call state._detach() here, because this state
+            # is still in the transaction snapshot and needs to be
+            # tracked as part of that
+            if persistent_to_deleted is not None:
+                persistent_to_deleted(self, state)
+
+    def add(self, instance: object, _warn: bool = True) -> None:
+        """Place an object into this :class:`_orm.Session`.
+
+        Objects that are in the :term:`transient` state when passed to the
+        :meth:`_orm.Session.add` method will move to the
+        :term:`pending` state, until the next flush, at which point they
+        will move to the :term:`persistent` state.
+
+        Objects that are in the :term:`detached` state when passed to the
+        :meth:`_orm.Session.add` method will move to the :term:`persistent`
+        state directly.
+
+        If the transaction used by the :class:`_orm.Session` is rolled back,
+        objects which were transient when they were passed to
+        :meth:`_orm.Session.add` will be moved back to the
+        :term:`transient` state, and will no longer be present within this
+        :class:`_orm.Session`.
+
+        .. seealso::
+
+            :meth:`_orm.Session.add_all`
+
+            :ref:`session_adding` - at :ref:`session_basics`
+
+        """
+        if _warn and self._warn_on_events:
+            self._flush_warning("Session.add()")
+
+        try:
+            state = attributes.instance_state(instance)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(instance) from err
+
+        self._save_or_update_state(state)
+
+    def add_all(self, instances: Iterable[object]) -> None:
+        """Add the given collection of instances to this :class:`_orm.Session`.
+
+        See the documentation for :meth:`_orm.Session.add` for a general
+        behavioral description.
+
+        .. seealso::
+
+            :meth:`_orm.Session.add`
+
+            :ref:`session_adding` - at :ref:`session_basics`
+
+        """
+
+        if self._warn_on_events:
+            self._flush_warning("Session.add_all()")
+
+        for instance in instances:
+            self.add(instance, _warn=False)
+
+    def _save_or_update_state(self, state: InstanceState[Any]) -> None:
+        state._orphaned_outside_of_session = False
+        self._save_or_update_impl(state)
+
+        mapper = _state_mapper(state)
+        for o, m, st_, dct_ in mapper.cascade_iterator(
+            "save-update", state, halt_on=self._contains_state
+        ):
+            self._save_or_update_impl(st_)
+
+    def delete(self, instance: object) -> None:
+        """Mark an instance as deleted.
+
+        The object is assumed to be either :term:`persistent` or
+        :term:`detached` when passed; after the method is called, the
+        object will remain in the :term:`persistent` state until the next
+        flush proceeds.  During this time, the object will also be a member
+        of the :attr:`_orm.Session.deleted` collection.
+
+        When the next flush proceeds, the object will move to the
+        :term:`deleted` state, indicating a ``DELETE`` statement was emitted
+        for its row within the current transaction.   When the transaction
+        is successfully committed,
+        the deleted object is moved to the :term:`detached` state and is
+        no longer present within this :class:`_orm.Session`.
+
+        .. seealso::
+
+            :ref:`session_deleting` - at :ref:`session_basics`
+
+        """
+        if self._warn_on_events:
+            self._flush_warning("Session.delete()")
+
+        try:
+            state = attributes.instance_state(instance)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(instance) from err
+
+        self._delete_impl(state, instance, head=True)
+
+    def _delete_impl(
+        self, state: InstanceState[Any], obj: object, head: bool
+    ) -> None:
+        if state.key is None:
+            if head:
+                raise sa_exc.InvalidRequestError(
+                    "Instance '%s' is not persisted" % state_str(state)
+                )
+            else:
+                return
+
+        to_attach = self._before_attach(state, obj)
+
+        if state in self._deleted:
+            return
+
+        self.identity_map.add(state)
+
+        if to_attach:
+            self._after_attach(state, obj)
+
+        if head:
+            # grab the cascades before adding the item to the deleted list
+            # so that autoflush does not delete the item
+            # the strong reference to the instance itself is significant here
+            cascade_states = list(
+                state.manager.mapper.cascade_iterator("delete", state)
+            )
+        else:
+            cascade_states = None
+
+        self._deleted[state] = obj
+
+        if head:
+            if TYPE_CHECKING:
+                assert cascade_states is not None
+            for o, m, st_, dct_ in cascade_states:
+                self._delete_impl(st_, o, False)
+
+    def get(
+        self,
+        entity: _EntityBindKey[_O],
+        ident: _PKIdentityArgument,
+        *,
+        options: Optional[Sequence[ORMOption]] = None,
+        populate_existing: bool = False,
+        with_for_update: ForUpdateParameter = None,
+        identity_token: Optional[Any] = None,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> Optional[_O]:
+        """Return an instance based on the given primary key identifier,
+        or ``None`` if not found.
+
+        E.g.::
+
+            my_user = session.get(User, 5)
+
+            some_object = session.get(VersionedFoo, (5, 10))
+
+            some_object = session.get(VersionedFoo, {"id": 5, "version_id": 10})
+
+        .. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved
+           from the now legacy :meth:`_orm.Query.get` method.
+
+        :meth:`_orm.Session.get` is special in that it provides direct
+        access to the identity map of the :class:`.Session`.
+        If the given primary key identifier is present
+        in the local identity map, the object is returned
+        directly from this collection and no SQL is emitted,
+        unless the object has been marked fully expired.
+        If not present,
+        a SELECT is performed in order to locate the object.
+
+        :meth:`_orm.Session.get` also will perform a check if
+        the object is present in the identity map and
+        marked as expired - a SELECT
+        is emitted to refresh the object as well as to
+        ensure that the row is still present.
+        If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
+        :param entity: a mapped class or :class:`.Mapper` indicating the
+         type of entity to be loaded.
+
+        :param ident: A scalar, tuple, or dictionary representing the
+         primary key.  For a composite (e.g. multiple column) primary key,
+         a tuple or dictionary should be passed.
+
+         For a single-column primary key, the scalar calling form is typically
+         the most expedient.  If the primary key of a row is the value "5",
+         the call looks like::
+
+            my_object = session.get(SomeClass, 5)
+
+         The tuple form contains primary key values typically in
+         the order in which they correspond to the mapped
+         :class:`_schema.Table`
+         object's primary key columns, or if the
+         :paramref:`_orm.Mapper.primary_key` configuration parameter were
+         used, in
+         the order used for that parameter. For example, if the primary key
+         of a row is represented by the integer
+         digits "5, 10" the call would look like::
+
+             my_object = session.get(SomeClass, (5, 10))
+
+         The dictionary form should include as keys the mapped attribute names
+         corresponding to each element of the primary key.  If the mapped class
+         has the attributes ``id``, ``version_id`` as the attributes which
+         store the object's primary key value, the call would look like::
+
+            my_object = session.get(SomeClass, {"id": 5, "version_id": 10})
+
+        :param options: optional sequence of loader options which will be
+         applied to the query, if one is emitted.
+
+        :param populate_existing: causes the method to unconditionally emit
+         a SQL query and refresh the object with the newly loaded data,
+         regardless of whether or not the object is already present.
+
+        :param with_for_update: optional boolean ``True`` indicating FOR UPDATE
+          should be used, or may be a dictionary containing flags to
+          indicate a more specific set of FOR UPDATE flags for the SELECT;
+          flags should match the parameters of
+          :meth:`_query.Query.with_for_update`.
+          Supersedes the :paramref:`.Session.refresh.lockmode` parameter.
+
+        :param execution_options: optional dictionary of execution options,
+         which will be associated with the query execution if one is emitted.
+         This dictionary can provide a subset of the options that are
+         accepted by :meth:`_engine.Connection.execution_options`, and may
+         also provide additional options understood only in an ORM context.
+
+         .. versionadded:: 1.4.29
+
+         .. seealso::
+
+            :ref:`orm_queryguide_execution_options` - ORM-specific execution
+            options
+
+        :param bind_arguments: dictionary of additional arguments to determine
+         the bind.  May include "mapper", "bind", or other custom arguments.
+         Contents of this dictionary are passed to the
+         :meth:`.Session.get_bind` method.
+
+         .. versionadded: 2.0.0rc1
+
+        :return: The object instance, or ``None``.
+
+        """  # noqa: E501
+        return self._get_impl(
+            entity,
+            ident,
+            loading.load_on_pk_identity,
+            options=options,
+            populate_existing=populate_existing,
+            with_for_update=with_for_update,
+            identity_token=identity_token,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+        )
+
+    def get_one(
+        self,
+        entity: _EntityBindKey[_O],
+        ident: _PKIdentityArgument,
+        *,
+        options: Optional[Sequence[ORMOption]] = None,
+        populate_existing: bool = False,
+        with_for_update: ForUpdateParameter = None,
+        identity_token: Optional[Any] = None,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> _O:
+        """Return exactly one instance based on the given primary key
+        identifier, or raise an exception if not found.
+
+        Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query
+        selects no rows.
+
+        For a detailed documentation of the arguments see the
+        method :meth:`.Session.get`.
+
+        .. versionadded:: 2.0.22
+
+        :return: The object instance.
+
+        .. seealso::
+
+            :meth:`.Session.get` - equivalent method that instead
+              returns ``None`` if no row was found with the provided primary
+              key
+
+        """
+
+        instance = self.get(
+            entity,
+            ident,
+            options=options,
+            populate_existing=populate_existing,
+            with_for_update=with_for_update,
+            identity_token=identity_token,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+        )
+
+        if instance is None:
+            raise sa_exc.NoResultFound(
+                "No row was found when one was required"
+            )
+
+        return instance
+
+    def _get_impl(
+        self,
+        entity: _EntityBindKey[_O],
+        primary_key_identity: _PKIdentityArgument,
+        db_load_fn: Callable[..., _O],
+        *,
+        options: Optional[Sequence[ExecutableOption]] = None,
+        populate_existing: bool = False,
+        with_for_update: ForUpdateParameter = None,
+        identity_token: Optional[Any] = None,
+        execution_options: OrmExecuteOptionsParameter = util.EMPTY_DICT,
+        bind_arguments: Optional[_BindArguments] = None,
+    ) -> Optional[_O]:
+        # convert composite types to individual args
+        if (
+            is_composite_class(primary_key_identity)
+            and type(primary_key_identity)
+            in descriptor_props._composite_getters
+        ):
+            getter = descriptor_props._composite_getters[
+                type(primary_key_identity)
+            ]
+            primary_key_identity = getter(primary_key_identity)
+
+        mapper: Optional[Mapper[_O]] = inspect(entity)
+
+        if mapper is None or not mapper.is_mapper:
+            raise sa_exc.ArgumentError(
+                "Expected mapped class or mapper, got: %r" % entity
+            )
+
+        is_dict = isinstance(primary_key_identity, dict)
+        if not is_dict:
+            primary_key_identity = util.to_list(
+                primary_key_identity, default=[None]
+            )
+
+        if len(primary_key_identity) != len(mapper.primary_key):
+            raise sa_exc.InvalidRequestError(
+                "Incorrect number of values in identifier to formulate "
+                "primary key for session.get(); primary key columns "
+                "are %s" % ",".join("'%s'" % c for c in mapper.primary_key)
+            )
+
+        if is_dict:
+            pk_synonyms = mapper._pk_synonyms
+
+            if pk_synonyms:
+                correct_keys = set(pk_synonyms).intersection(
+                    primary_key_identity
+                )
+
+                if correct_keys:
+                    primary_key_identity = dict(primary_key_identity)
+                    for k in correct_keys:
+                        primary_key_identity[pk_synonyms[k]] = (
+                            primary_key_identity[k]
+                        )
+
+            try:
+                primary_key_identity = list(
+                    primary_key_identity[prop.key]
+                    for prop in mapper._identity_key_props
+                )
+
+            except KeyError as err:
+                raise sa_exc.InvalidRequestError(
+                    "Incorrect names of values in identifier to formulate "
+                    "primary key for session.get(); primary key attribute "
+                    "names are %s (synonym names are also accepted)"
+                    % ",".join(
+                        "'%s'" % prop.key
+                        for prop in mapper._identity_key_props
+                    )
+                ) from err
+
+        if (
+            not populate_existing
+            and not mapper.always_refresh
+            and with_for_update is None
+        ):
+            instance = self._identity_lookup(
+                mapper,
+                primary_key_identity,
+                identity_token=identity_token,
+                execution_options=execution_options,
+                bind_arguments=bind_arguments,
+            )
+
+            if instance is not None:
+                # reject calls for id in identity map but class
+                # mismatch.
+                if not isinstance(instance, mapper.class_):
+                    return None
+                return instance
+
+            # TODO: this was being tested before, but this is not possible
+            assert instance is not LoaderCallableStatus.PASSIVE_CLASS_MISMATCH
+
+        # set_label_style() not strictly necessary, however this will ensure
+        # that tablename_colname style is used which at the moment is
+        # asserted in a lot of unit tests :)
+
+        load_options = context.QueryContext.default_load_options
+
+        if populate_existing:
+            load_options += {"_populate_existing": populate_existing}
+        statement = sql.select(mapper).set_label_style(
+            LABEL_STYLE_TABLENAME_PLUS_COL
+        )
+        if with_for_update is not None:
+            statement._for_update_arg = ForUpdateArg._from_argument(
+                with_for_update
+            )
+
+        if options:
+            statement = statement.options(*options)
+        return db_load_fn(
+            self,
+            statement,
+            primary_key_identity,
+            load_options=load_options,
+            identity_token=identity_token,
+            execution_options=execution_options,
+            bind_arguments=bind_arguments,
+        )
+
+    def merge(
+        self,
+        instance: _O,
+        *,
+        load: bool = True,
+        options: Optional[Sequence[ORMOption]] = None,
+    ) -> _O:
+        """Copy the state of a given instance into a corresponding instance
+        within this :class:`.Session`.
+
+        :meth:`.Session.merge` examines the primary key attributes of the
+        source instance, and attempts to reconcile it with an instance of the
+        same primary key in the session.   If not found locally, it attempts
+        to load the object from the database based on primary key, and if
+        none can be located, creates a new instance.  The state of each
+        attribute on the source instance is then copied to the target
+        instance.  The resulting target instance is then returned by the
+        method; the original source instance is left unmodified, and
+        un-associated with the :class:`.Session` if not already.
+
+        This operation cascades to associated instances if the association is
+        mapped with ``cascade="merge"``.
+
+        See :ref:`unitofwork_merging` for a detailed discussion of merging.
+
+        :param instance: Instance to be merged.
+        :param load: Boolean, when False, :meth:`.merge` switches into
+         a "high performance" mode which causes it to forego emitting history
+         events as well as all database access.  This flag is used for
+         cases such as transferring graphs of objects into a :class:`.Session`
+         from a second level cache, or to transfer just-loaded objects
+         into the :class:`.Session` owned by a worker thread or process
+         without re-querying the database.
+
+         The ``load=False`` use case adds the caveat that the given
+         object has to be in a "clean" state, that is, has no pending changes
+         to be flushed - even if the incoming object is detached from any
+         :class:`.Session`.   This is so that when
+         the merge operation populates local attributes and
+         cascades to related objects and
+         collections, the values can be "stamped" onto the
+         target object as is, without generating any history or attribute
+         events, and without the need to reconcile the incoming data with
+         any existing related objects or collections that might not
+         be loaded.  The resulting objects from ``load=False`` are always
+         produced as "clean", so it is only appropriate that the given objects
+         should be "clean" as well, else this suggests a mis-use of the
+         method.
+        :param options: optional sequence of loader options which will be
+         applied to the :meth:`_orm.Session.get` method when the merge
+         operation loads the existing version of the object from the database.
+
+         .. versionadded:: 1.4.24
+
+
+        .. seealso::
+
+            :func:`.make_transient_to_detached` - provides for an alternative
+            means of "merging" a single object into the :class:`.Session`
+
+        """
+
+        if self._warn_on_events:
+            self._flush_warning("Session.merge()")
+
+        _recursive: Dict[InstanceState[Any], object] = {}
+        _resolve_conflict_map: Dict[_IdentityKeyType[Any], object] = {}
+
+        if load:
+            # flush current contents if we expect to load data
+            self._autoflush()
+
+        object_mapper(instance)  # verify mapped
+        autoflush = self.autoflush
+        try:
+            self.autoflush = False
+            return self._merge(
+                attributes.instance_state(instance),
+                attributes.instance_dict(instance),
+                load=load,
+                options=options,
+                _recursive=_recursive,
+                _resolve_conflict_map=_resolve_conflict_map,
+            )
+        finally:
+            self.autoflush = autoflush
+
+    def _merge(
+        self,
+        state: InstanceState[_O],
+        state_dict: _InstanceDict,
+        *,
+        options: Optional[Sequence[ORMOption]] = None,
+        load: bool,
+        _recursive: Dict[Any, object],
+        _resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
+    ) -> _O:
+        mapper: Mapper[_O] = _state_mapper(state)
+        if state in _recursive:
+            return cast(_O, _recursive[state])
+
+        new_instance = False
+        key = state.key
+
+        merged: Optional[_O]
+
+        if key is None:
+            if state in self._new:
+                util.warn(
+                    "Instance %s is already pending in this Session yet is "
+                    "being merged again; this is probably not what you want "
+                    "to do" % state_str(state)
+                )
+
+            if not load:
+                raise sa_exc.InvalidRequestError(
+                    "merge() with load=False option does not support "
+                    "objects transient (i.e. unpersisted) objects.  flush() "
+                    "all changes on mapped instances before merging with "
+                    "load=False."
+                )
+            key = mapper._identity_key_from_state(state)
+            key_is_persistent = LoaderCallableStatus.NEVER_SET not in key[
+                1
+            ] and (
+                not _none_set.intersection(key[1])
+                or (
+                    mapper.allow_partial_pks
+                    and not _none_set.issuperset(key[1])
+                )
+            )
+        else:
+            key_is_persistent = True
+
+        if key in self.identity_map:
+            try:
+                merged = self.identity_map[key]
+            except KeyError:
+                # object was GC'ed right as we checked for it
+                merged = None
+        else:
+            merged = None
+
+        if merged is None:
+            if key_is_persistent and key in _resolve_conflict_map:
+                merged = cast(_O, _resolve_conflict_map[key])
+
+            elif not load:
+                if state.modified:
+                    raise sa_exc.InvalidRequestError(
+                        "merge() with load=False option does not support "
+                        "objects marked as 'dirty'.  flush() all changes on "
+                        "mapped instances before merging with load=False."
+                    )
+                merged = mapper.class_manager.new_instance()
+                merged_state = attributes.instance_state(merged)
+                merged_state.key = key
+                self._update_impl(merged_state)
+                new_instance = True
+
+            elif key_is_persistent:
+                merged = self.get(
+                    mapper.class_,
+                    key[1],
+                    identity_token=key[2],
+                    options=options,
+                )
+
+        if merged is None:
+            merged = mapper.class_manager.new_instance()
+            merged_state = attributes.instance_state(merged)
+            merged_dict = attributes.instance_dict(merged)
+            new_instance = True
+            self._save_or_update_state(merged_state)
+        else:
+            merged_state = attributes.instance_state(merged)
+            merged_dict = attributes.instance_dict(merged)
+
+        _recursive[state] = merged
+        _resolve_conflict_map[key] = merged
+
+        # check that we didn't just pull the exact same
+        # state out.
+        if state is not merged_state:
+            # version check if applicable
+            if mapper.version_id_col is not None:
+                existing_version = mapper._get_state_attr_by_column(
+                    state,
+                    state_dict,
+                    mapper.version_id_col,
+                    passive=PassiveFlag.PASSIVE_NO_INITIALIZE,
+                )
+
+                merged_version = mapper._get_state_attr_by_column(
+                    merged_state,
+                    merged_dict,
+                    mapper.version_id_col,
+                    passive=PassiveFlag.PASSIVE_NO_INITIALIZE,
+                )
+
+                if (
+                    existing_version
+                    is not LoaderCallableStatus.PASSIVE_NO_RESULT
+                    and merged_version
+                    is not LoaderCallableStatus.PASSIVE_NO_RESULT
+                    and existing_version != merged_version
+                ):
+                    raise exc.StaleDataError(
+                        "Version id '%s' on merged state %s "
+                        "does not match existing version '%s'. "
+                        "Leave the version attribute unset when "
+                        "merging to update the most recent version."
+                        % (
+                            existing_version,
+                            state_str(merged_state),
+                            merged_version,
+                        )
+                    )
+
+            merged_state.load_path = state.load_path
+            merged_state.load_options = state.load_options
+
+            # since we are copying load_options, we need to copy
+            # the callables_ that would have been generated by those
+            # load_options.
+            # assumes that the callables we put in state.callables_
+            # are not instance-specific (which they should not be)
+            merged_state._copy_callables(state)
+
+            for prop in mapper.iterate_properties:
+                prop.merge(
+                    self,
+                    state,
+                    state_dict,
+                    merged_state,
+                    merged_dict,
+                    load,
+                    _recursive,
+                    _resolve_conflict_map,
+                )
+
+        if not load:
+            # remove any history
+            merged_state._commit_all(merged_dict, self.identity_map)
+            merged_state.manager.dispatch._sa_event_merge_wo_load(
+                merged_state, None
+            )
+
+        if new_instance:
+            merged_state.manager.dispatch.load(merged_state, None)
+
+        return merged
+
+    def _validate_persistent(self, state: InstanceState[Any]) -> None:
+        if not self.identity_map.contains_state(state):
+            raise sa_exc.InvalidRequestError(
+                "Instance '%s' is not persistent within this Session"
+                % state_str(state)
+            )
+
+    def _save_impl(self, state: InstanceState[Any]) -> None:
+        if state.key is not None:
+            raise sa_exc.InvalidRequestError(
+                "Object '%s' already has an identity - "
+                "it can't be registered as pending" % state_str(state)
+            )
+
+        obj = state.obj()
+        to_attach = self._before_attach(state, obj)
+        if state not in self._new:
+            self._new[state] = obj
+            state.insert_order = len(self._new)
+        if to_attach:
+            self._after_attach(state, obj)
+
+    def _update_impl(
+        self, state: InstanceState[Any], revert_deletion: bool = False
+    ) -> None:
+        if state.key is None:
+            raise sa_exc.InvalidRequestError(
+                "Instance '%s' is not persisted" % state_str(state)
+            )
+
+        if state._deleted:
+            if revert_deletion:
+                if not state._attached:
+                    return
+                del state._deleted
+            else:
+                raise sa_exc.InvalidRequestError(
+                    "Instance '%s' has been deleted.  "
+                    "Use the make_transient() "
+                    "function to send this object back "
+                    "to the transient state." % state_str(state)
+                )
+
+        obj = state.obj()
+
+        # check for late gc
+        if obj is None:
+            return
+
+        to_attach = self._before_attach(state, obj)
+
+        self._deleted.pop(state, None)
+        if revert_deletion:
+            self.identity_map.replace(state)
+        else:
+            self.identity_map.add(state)
+
+        if to_attach:
+            self._after_attach(state, obj)
+        elif revert_deletion:
+            self.dispatch.deleted_to_persistent(self, state)
+
+    def _save_or_update_impl(self, state: InstanceState[Any]) -> None:
+        if state.key is None:
+            self._save_impl(state)
+        else:
+            self._update_impl(state)
+
+    def enable_relationship_loading(self, obj: object) -> None:
+        """Associate an object with this :class:`.Session` for related
+        object loading.
+
+        .. warning::
+
+            :meth:`.enable_relationship_loading` exists to serve special
+            use cases and is not recommended for general use.
+
+        Accesses of attributes mapped with :func:`_orm.relationship`
+        will attempt to load a value from the database using this
+        :class:`.Session` as the source of connectivity.  The values
+        will be loaded based on foreign key and primary key values
+        present on this object - if not present, then those relationships
+        will be unavailable.
+
+        The object will be attached to this session, but will
+        **not** participate in any persistence operations; its state
+        for almost all purposes will remain either "transient" or
+        "detached", except for the case of relationship loading.
+
+        Also note that backrefs will often not work as expected.
+        Altering a relationship-bound attribute on the target object
+        may not fire off a backref event, if the effective value
+        is what was already loaded from a foreign-key-holding value.
+
+        The :meth:`.Session.enable_relationship_loading` method is
+        similar to the ``load_on_pending`` flag on :func:`_orm.relationship`.
+        Unlike that flag, :meth:`.Session.enable_relationship_loading` allows
+        an object to remain transient while still being able to load
+        related items.
+
+        To make a transient object associated with a :class:`.Session`
+        via :meth:`.Session.enable_relationship_loading` pending, add
+        it to the :class:`.Session` using :meth:`.Session.add` normally.
+        If the object instead represents an existing identity in the database,
+        it should be merged using :meth:`.Session.merge`.
+
+        :meth:`.Session.enable_relationship_loading` does not improve
+        behavior when the ORM is used normally - object references should be
+        constructed at the object level, not at the foreign key level, so
+        that they are present in an ordinary way before flush()
+        proceeds.  This method is not intended for general use.
+
+        .. seealso::
+
+            :paramref:`_orm.relationship.load_on_pending` - this flag
+            allows per-relationship loading of many-to-ones on items that
+            are pending.
+
+            :func:`.make_transient_to_detached` - allows for an object to
+            be added to a :class:`.Session` without SQL emitted, which then
+            will unexpire attributes on access.
+
+        """
+        try:
+            state = attributes.instance_state(obj)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(obj) from err
+
+        to_attach = self._before_attach(state, obj)
+        state._load_pending = True
+        if to_attach:
+            self._after_attach(state, obj)
+
+    def _before_attach(self, state: InstanceState[Any], obj: object) -> bool:
+        self._autobegin_t()
+
+        if state.session_id == self.hash_key:
+            return False
+
+        if state.session_id and state.session_id in _sessions:
+            raise sa_exc.InvalidRequestError(
+                "Object '%s' is already attached to session '%s' "
+                "(this is '%s')"
+                % (state_str(state), state.session_id, self.hash_key)
+            )
+
+        self.dispatch.before_attach(self, state)
+
+        return True
+
+    def _after_attach(self, state: InstanceState[Any], obj: object) -> None:
+        state.session_id = self.hash_key
+        if state.modified and state._strong_obj is None:
+            state._strong_obj = obj
+        self.dispatch.after_attach(self, state)
+
+        if state.key:
+            self.dispatch.detached_to_persistent(self, state)
+        else:
+            self.dispatch.transient_to_pending(self, state)
+
+    def __contains__(self, instance: object) -> bool:
+        """Return True if the instance is associated with this session.
+
+        The instance may be pending or persistent within the Session for a
+        result of True.
+
+        """
+        try:
+            state = attributes.instance_state(instance)
+        except exc.NO_STATE as err:
+            raise exc.UnmappedInstanceError(instance) from err
+        return self._contains_state(state)
+
+    def __iter__(self) -> Iterator[object]:
+        """Iterate over all pending or persistent instances within this
+        Session.
+
+        """
+        return iter(
+            list(self._new.values()) + list(self.identity_map.values())
+        )
+
+    def _contains_state(self, state: InstanceState[Any]) -> bool:
+        return state in self._new or self.identity_map.contains_state(state)
+
+    def flush(self, objects: Optional[Sequence[Any]] = None) -> None:
+        """Flush all the object changes to the database.
+
+        Writes out all pending object creations, deletions and modifications
+        to the database as INSERTs, DELETEs, UPDATEs, etc.  Operations are
+        automatically ordered by the Session's unit of work dependency
+        solver.
+
+        Database operations will be issued in the current transactional
+        context and do not affect the state of the transaction, unless an
+        error occurs, in which case the entire transaction is rolled back.
+        You may flush() as often as you like within a transaction to move
+        changes from Python to the database's transaction buffer.
+
+        :param objects: Optional; restricts the flush operation to operate
+          only on elements that are in the given collection.
+
+          This feature is for an extremely narrow set of use cases where
+          particular objects may need to be operated upon before the
+          full flush() occurs.  It is not intended for general use.
+
+        """
+
+        if self._flushing:
+            raise sa_exc.InvalidRequestError("Session is already flushing")
+
+        if self._is_clean():
+            return
+        try:
+            self._flushing = True
+            self._flush(objects)
+        finally:
+            self._flushing = False
+
+    def _flush_warning(self, method: Any) -> None:
+        util.warn(
+            "Usage of the '%s' operation is not currently supported "
+            "within the execution stage of the flush process. "
+            "Results may not be consistent.  Consider using alternative "
+            "event listeners or connection-level operations instead." % method
+        )
+
+    def _is_clean(self) -> bool:
+        return (
+            not self.identity_map.check_modified()
+            and not self._deleted
+            and not self._new
+        )
+
+    def _flush(self, objects: Optional[Sequence[object]] = None) -> None:
+        dirty = self._dirty_states
+        if not dirty and not self._deleted and not self._new:
+            self.identity_map._modified.clear()
+            return
+
+        flush_context = UOWTransaction(self)
+
+        if self.dispatch.before_flush:
+            self.dispatch.before_flush(self, flush_context, objects)
+            # re-establish "dirty states" in case the listeners
+            # added
+            dirty = self._dirty_states
+
+        deleted = set(self._deleted)
+        new = set(self._new)
+
+        dirty = set(dirty).difference(deleted)
+
+        # create the set of all objects we want to operate upon
+        if objects:
+            # specific list passed in
+            objset = set()
+            for o in objects:
+                try:
+                    state = attributes.instance_state(o)
+
+                except exc.NO_STATE as err:
+                    raise exc.UnmappedInstanceError(o) from err
+                objset.add(state)
+        else:
+            objset = None
+
+        # store objects whose fate has been decided
+        processed = set()
+
+        # put all saves/updates into the flush context.  detect top-level
+        # orphans and throw them into deleted.
+        if objset:
+            proc = new.union(dirty).intersection(objset).difference(deleted)
+        else:
+            proc = new.union(dirty).difference(deleted)
+
+        for state in proc:
+            is_orphan = _state_mapper(state)._is_orphan(state)
+
+            is_persistent_orphan = is_orphan and state.has_identity
+
+            if (
+                is_orphan
+                and not is_persistent_orphan
+                and state._orphaned_outside_of_session
+            ):
+                self._expunge_states([state])
+            else:
+                _reg = flush_context.register_object(
+                    state, isdelete=is_persistent_orphan
+                )
+                assert _reg, "Failed to add object to the flush context!"
+                processed.add(state)
+
+        # put all remaining deletes into the flush context.
+        if objset:
+            proc = deleted.intersection(objset).difference(processed)
+        else:
+            proc = deleted.difference(processed)
+        for state in proc:
+            _reg = flush_context.register_object(state, isdelete=True)
+            assert _reg, "Failed to add object to the flush context!"
+
+        if not flush_context.has_work:
+            return
+
+        flush_context.transaction = transaction = self._autobegin_t()._begin()
+        try:
+            self._warn_on_events = True
+            try:
+                flush_context.execute()
+            finally:
+                self._warn_on_events = False
+
+            self.dispatch.after_flush(self, flush_context)
+
+            flush_context.finalize_flush_changes()
+
+            if not objects and self.identity_map._modified:
+                len_ = len(self.identity_map._modified)
+
+                statelib.InstanceState._commit_all_states(
+                    [
+                        (state, state.dict)
+                        for state in self.identity_map._modified
+                    ],
+                    instance_dict=self.identity_map,
+                )
+                util.warn(
+                    "Attribute history events accumulated on %d "
+                    "previously clean instances "
+                    "within inner-flush event handlers have been "
+                    "reset, and will not result in database updates. "
+                    "Consider using set_committed_value() within "
+                    "inner-flush event handlers to avoid this warning." % len_
+                )
+
+            # useful assertions:
+            # if not objects:
+            #    assert not self.identity_map._modified
+            # else:
+            #    assert self.identity_map._modified == \
+            #            self.identity_map._modified.difference(objects)
+
+            self.dispatch.after_flush_postexec(self, flush_context)
+
+            transaction.commit()
+
+        except:
+            with util.safe_reraise():
+                transaction.rollback(_capture_exception=True)
+
+    def bulk_save_objects(
+        self,
+        objects: Iterable[object],
+        return_defaults: bool = False,
+        update_changed_only: bool = True,
+        preserve_order: bool = True,
+    ) -> None:
+        """Perform a bulk save of the given list of objects.
+
+        .. legacy::
+
+            This method is a legacy feature as of the 2.0 series of
+            SQLAlchemy.   For modern bulk INSERT and UPDATE, see
+            the sections :ref:`orm_queryguide_bulk_insert` and
+            :ref:`orm_queryguide_bulk_update`.
+
+            For general INSERT and UPDATE of existing ORM mapped objects,
+            prefer standard :term:`unit of work` data management patterns,
+            introduced in the :ref:`unified_tutorial` at
+            :ref:`tutorial_orm_data_manipulation`.  SQLAlchemy 2.0
+            now uses :ref:`engine_insertmanyvalues` with modern dialects
+            which solves previous issues of bulk INSERT slowness.
+
+        :param objects: a sequence of mapped object instances.  The mapped
+         objects are persisted as is, and are **not** associated with the
+         :class:`.Session` afterwards.
+
+         For each object, whether the object is sent as an INSERT or an
+         UPDATE is dependent on the same rules used by the :class:`.Session`
+         in traditional operation; if the object has the
+         :attr:`.InstanceState.key`
+         attribute set, then the object is assumed to be "detached" and
+         will result in an UPDATE.  Otherwise, an INSERT is used.
+
+         In the case of an UPDATE, statements are grouped based on which
+         attributes have changed, and are thus to be the subject of each
+         SET clause.  If ``update_changed_only`` is False, then all
+         attributes present within each object are applied to the UPDATE
+         statement, which may help in allowing the statements to be grouped
+         together into a larger executemany(), and will also reduce the
+         overhead of checking history on attributes.
+
+        :param return_defaults: when True, rows that are missing values which
+         generate defaults, namely integer primary key defaults and sequences,
+         will be inserted **one at a time**, so that the primary key value
+         is available.  In particular this will allow joined-inheritance
+         and other multi-table mappings to insert correctly without the need
+         to provide primary key values ahead of time; however,
+         :paramref:`.Session.bulk_save_objects.return_defaults` **greatly
+         reduces the performance gains** of the method overall.  It is strongly
+         advised to please use the standard :meth:`_orm.Session.add_all`
+         approach.
+
+        :param update_changed_only: when True, UPDATE statements are rendered
+         based on those attributes in each state that have logged changes.
+         When False, all attributes present are rendered into the SET clause
+         with the exception of primary key attributes.
+
+        :param preserve_order: when True, the order of inserts and updates
+         matches exactly the order in which the objects are given.   When
+         False, common types of objects are grouped into inserts
+         and updates, to allow for more batching opportunities.
+
+        .. seealso::
+
+            :doc:`queryguide/dml`
+
+            :meth:`.Session.bulk_insert_mappings`
+
+            :meth:`.Session.bulk_update_mappings`
+
+        """
+
+        obj_states: Iterable[InstanceState[Any]]
+
+        obj_states = (attributes.instance_state(obj) for obj in objects)
+
+        if not preserve_order:
+            # the purpose of this sort is just so that common mappers
+            # and persistence states are grouped together, so that groupby
+            # will return a single group for a particular type of mapper.
+            # it's not trying to be deterministic beyond that.
+            obj_states = sorted(
+                obj_states,
+                key=lambda state: (id(state.mapper), state.key is not None),
+            )
+
+        def grouping_key(
+            state: InstanceState[_O],
+        ) -> Tuple[Mapper[_O], bool]:
+            return (state.mapper, state.key is not None)
+
+        for (mapper, isupdate), states in itertools.groupby(
+            obj_states, grouping_key
+        ):
+            self._bulk_save_mappings(
+                mapper,
+                states,
+                isupdate=isupdate,
+                isstates=True,
+                return_defaults=return_defaults,
+                update_changed_only=update_changed_only,
+                render_nulls=False,
+            )
+
+    def bulk_insert_mappings(
+        self,
+        mapper: Mapper[Any],
+        mappings: Iterable[Dict[str, Any]],
+        return_defaults: bool = False,
+        render_nulls: bool = False,
+    ) -> None:
+        """Perform a bulk insert of the given list of mapping dictionaries.
+
+        .. legacy::
+
+            This method is a legacy feature as of the 2.0 series of
+            SQLAlchemy.   For modern bulk INSERT and UPDATE, see
+            the sections :ref:`orm_queryguide_bulk_insert` and
+            :ref:`orm_queryguide_bulk_update`.  The 2.0 API shares
+            implementation details with this method and adds new features
+            as well.
+
+        :param mapper: a mapped class, or the actual :class:`_orm.Mapper`
+         object,
+         representing the single kind of object represented within the mapping
+         list.
+
+        :param mappings: a sequence of dictionaries, each one containing the
+         state of the mapped row to be inserted, in terms of the attribute
+         names on the mapped class.   If the mapping refers to multiple tables,
+         such as a joined-inheritance mapping, each dictionary must contain all
+         keys to be populated into all tables.
+
+        :param return_defaults: when True, the INSERT process will be altered
+         to ensure that newly generated primary key values will be fetched.
+         The rationale for this parameter is typically to enable
+         :ref:`Joined Table Inheritance <joined_inheritance>` mappings to
+         be bulk inserted.
+
+         .. note:: for backends that don't support RETURNING, the
+            :paramref:`_orm.Session.bulk_insert_mappings.return_defaults`
+            parameter can significantly decrease performance as INSERT
+            statements can no longer be batched.   See
+            :ref:`engine_insertmanyvalues`
+            for background on which backends are affected.
+
+        :param render_nulls: When True, a value of ``None`` will result
+         in a NULL value being included in the INSERT statement, rather
+         than the column being omitted from the INSERT.   This allows all
+         the rows being INSERTed to have the identical set of columns which
+         allows the full set of rows to be batched to the DBAPI.  Normally,
+         each column-set that contains a different combination of NULL values
+         than the previous row must omit a different series of columns from
+         the rendered INSERT statement, which means it must be emitted as a
+         separate statement.   By passing this flag, the full set of rows
+         are guaranteed to be batchable into one batch; the cost however is
+         that server-side defaults which are invoked by an omitted column will
+         be skipped, so care must be taken to ensure that these are not
+         necessary.
+
+         .. warning::
+
+            When this flag is set, **server side default SQL values will
+            not be invoked** for those columns that are inserted as NULL;
+            the NULL value will be sent explicitly.   Care must be taken
+            to ensure that no server-side default functions need to be
+            invoked for the operation as a whole.
+
+        .. seealso::
+
+            :doc:`queryguide/dml`
+
+            :meth:`.Session.bulk_save_objects`
+
+            :meth:`.Session.bulk_update_mappings`
+
+        """
+        self._bulk_save_mappings(
+            mapper,
+            mappings,
+            isupdate=False,
+            isstates=False,
+            return_defaults=return_defaults,
+            update_changed_only=False,
+            render_nulls=render_nulls,
+        )
+
+    def bulk_update_mappings(
+        self, mapper: Mapper[Any], mappings: Iterable[Dict[str, Any]]
+    ) -> None:
+        """Perform a bulk update of the given list of mapping dictionaries.
+
+        .. legacy::
+
+            This method is a legacy feature as of the 2.0 series of
+            SQLAlchemy.   For modern bulk INSERT and UPDATE, see
+            the sections :ref:`orm_queryguide_bulk_insert` and
+            :ref:`orm_queryguide_bulk_update`.  The 2.0 API shares
+            implementation details with this method and adds new features
+            as well.
+
+        :param mapper: a mapped class, or the actual :class:`_orm.Mapper`
+         object,
+         representing the single kind of object represented within the mapping
+         list.
+
+        :param mappings: a sequence of dictionaries, each one containing the
+         state of the mapped row to be updated, in terms of the attribute names
+         on the mapped class.   If the mapping refers to multiple tables, such
+         as a joined-inheritance mapping, each dictionary may contain keys
+         corresponding to all tables.   All those keys which are present and
+         are not part of the primary key are applied to the SET clause of the
+         UPDATE statement; the primary key values, which are required, are
+         applied to the WHERE clause.
+
+
+        .. seealso::
+
+            :doc:`queryguide/dml`
+
+            :meth:`.Session.bulk_insert_mappings`
+
+            :meth:`.Session.bulk_save_objects`
+
+        """
+        self._bulk_save_mappings(
+            mapper,
+            mappings,
+            isupdate=True,
+            isstates=False,
+            return_defaults=False,
+            update_changed_only=False,
+            render_nulls=False,
+        )
+
+    def _bulk_save_mappings(
+        self,
+        mapper: Mapper[_O],
+        mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
+        *,
+        isupdate: bool,
+        isstates: bool,
+        return_defaults: bool,
+        update_changed_only: bool,
+        render_nulls: bool,
+    ) -> None:
+        mapper = _class_to_mapper(mapper)
+        self._flushing = True
+
+        transaction = self._autobegin_t()._begin()
+        try:
+            if isupdate:
+                bulk_persistence._bulk_update(
+                    mapper,
+                    mappings,
+                    transaction,
+                    isstates=isstates,
+                    update_changed_only=update_changed_only,
+                )
+            else:
+                bulk_persistence._bulk_insert(
+                    mapper,
+                    mappings,
+                    transaction,
+                    isstates=isstates,
+                    return_defaults=return_defaults,
+                    render_nulls=render_nulls,
+                )
+            transaction.commit()
+
+        except:
+            with util.safe_reraise():
+                transaction.rollback(_capture_exception=True)
+        finally:
+            self._flushing = False
+
+    def is_modified(
+        self, instance: object, include_collections: bool = True
+    ) -> bool:
+        r"""Return ``True`` if the given instance has locally
+        modified attributes.
+
+        This method retrieves the history for each instrumented
+        attribute on the instance and performs a comparison of the current
+        value to its previously flushed or committed value, if any.
+
+        It is in effect a more expensive and accurate
+        version of checking for the given instance in the
+        :attr:`.Session.dirty` collection; a full test for
+        each attribute's net "dirty" status is performed.
+
+        E.g.::
+
+            return session.is_modified(someobject)
+
+        A few caveats to this method apply:
+
+        * Instances present in the :attr:`.Session.dirty` collection may
+          report ``False`` when tested with this method.  This is because
+          the object may have received change events via attribute mutation,
+          thus placing it in :attr:`.Session.dirty`, but ultimately the state
+          is the same as that loaded from the database, resulting in no net
+          change here.
+        * Scalar attributes may not have recorded the previously set
+          value when a new value was applied, if the attribute was not loaded,
+          or was expired, at the time the new value was received - in these
+          cases, the attribute is assumed to have a change, even if there is
+          ultimately no net change against its database value. SQLAlchemy in
+          most cases does not need the "old" value when a set event occurs, so
+          it skips the expense of a SQL call if the old value isn't present,
+          based on the assumption that an UPDATE of the scalar value is
+          usually needed, and in those few cases where it isn't, is less
+          expensive on average than issuing a defensive SELECT.
+
+          The "old" value is fetched unconditionally upon set only if the
+          attribute container has the ``active_history`` flag set to ``True``.
+          This flag is set typically for primary key attributes and scalar
+          object references that are not a simple many-to-one.  To set this
+          flag for any arbitrary mapped column, use the ``active_history``
+          argument with :func:`.column_property`.
+
+        :param instance: mapped instance to be tested for pending changes.
+        :param include_collections: Indicates if multivalued collections
+         should be included in the operation.  Setting this to ``False`` is a
+         way to detect only local-column based properties (i.e. scalar columns
+         or many-to-one foreign keys) that would result in an UPDATE for this
+         instance upon flush.
+
+        """
+        state = object_state(instance)
+
+        if not state.modified:
+            return False
+
+        dict_ = state.dict
+
+        for attr in state.manager.attributes:
+            if (
+                not include_collections
+                and hasattr(attr.impl, "get_collection")
+            ) or not hasattr(attr.impl, "get_history"):
+                continue
+
+            (added, unchanged, deleted) = attr.impl.get_history(
+                state, dict_, passive=PassiveFlag.NO_CHANGE
+            )
+
+            if added or deleted:
+                return True
+        else:
+            return False
+
+    @property
+    def is_active(self) -> bool:
+        """True if this :class:`.Session` not in "partial rollback" state.
+
+        .. versionchanged:: 1.4 The :class:`_orm.Session` no longer begins
+           a new transaction immediately, so this attribute will be False
+           when the :class:`_orm.Session` is first instantiated.
+
+        "partial rollback" state typically indicates that the flush process
+        of the :class:`_orm.Session` has failed, and that the
+        :meth:`_orm.Session.rollback` method must be emitted in order to
+        fully roll back the transaction.
+
+        If this :class:`_orm.Session` is not in a transaction at all, the
+        :class:`_orm.Session` will autobegin when it is first used, so in this
+        case :attr:`_orm.Session.is_active` will return True.
+
+        Otherwise, if this :class:`_orm.Session` is within a transaction,
+        and that transaction has not been rolled back internally, the
+        :attr:`_orm.Session.is_active` will also return True.
+
+        .. seealso::
+
+            :ref:`faq_session_rollback`
+
+            :meth:`_orm.Session.in_transaction`
+
+        """
+        return self._transaction is None or self._transaction.is_active
+
+    @property
+    def _dirty_states(self) -> Iterable[InstanceState[Any]]:
+        """The set of all persistent states considered dirty.
+
+        This method returns all states that were modified including
+        those that were possibly deleted.
+
+        """
+        return self.identity_map._dirty_states()
+
+    @property
+    def dirty(self) -> IdentitySet:
+        """The set of all persistent instances considered dirty.
+
+        E.g.::
+
+            some_mapped_object in session.dirty
+
+        Instances are considered dirty when they were modified but not
+        deleted.
+
+        Note that this 'dirty' calculation is 'optimistic'; most
+        attribute-setting or collection modification operations will
+        mark an instance as 'dirty' and place it in this set, even if
+        there is no net change to the attribute's value.  At flush
+        time, the value of each attribute is compared to its
+        previously saved value, and if there's no net change, no SQL
+        operation will occur (this is a more expensive operation so
+        it's only done at flush time).
+
+        To check if an instance has actionable net changes to its
+        attributes, use the :meth:`.Session.is_modified` method.
+
+        """
+        return IdentitySet(
+            [
+                state.obj()
+                for state in self._dirty_states
+                if state not in self._deleted
+            ]
+        )
+
+    @property
+    def deleted(self) -> IdentitySet:
+        "The set of all instances marked as 'deleted' within this ``Session``"
+
+        return util.IdentitySet(list(self._deleted.values()))
+
+    @property
+    def new(self) -> IdentitySet:
+        "The set of all instances marked as 'new' within this ``Session``."
+
+        return util.IdentitySet(list(self._new.values()))
+
+
+_S = TypeVar("_S", bound="Session")
+
+
+class sessionmaker(_SessionClassMethods, Generic[_S]):
+    """A configurable :class:`.Session` factory.
+
+    The :class:`.sessionmaker` factory generates new
+    :class:`.Session` objects when called, creating them given
+    the configurational arguments established here.
+
+    e.g.::
+
+        from sqlalchemy import create_engine
+        from sqlalchemy.orm import sessionmaker
+
+        # an Engine, which the Session will use for connection
+        # resources
+        engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/")
+
+        Session = sessionmaker(engine)
+
+        with Session() as session:
+            session.add(some_object)
+            session.add(some_other_object)
+            session.commit()
+
+    Context manager use is optional; otherwise, the returned
+    :class:`_orm.Session` object may be closed explicitly via the
+    :meth:`_orm.Session.close` method.   Using a
+    ``try:/finally:`` block is optional, however will ensure that the close
+    takes place even if there are database errors::
+
+        session = Session()
+        try:
+            session.add(some_object)
+            session.add(some_other_object)
+            session.commit()
+        finally:
+            session.close()
+
+    :class:`.sessionmaker` acts as a factory for :class:`_orm.Session`
+    objects in the same way as an :class:`_engine.Engine` acts as a factory
+    for :class:`_engine.Connection` objects.  In this way it also includes
+    a :meth:`_orm.sessionmaker.begin` method, that provides a context
+    manager which both begins and commits a transaction, as well as closes
+    out the :class:`_orm.Session` when complete, rolling back the transaction
+    if any errors occur::
+
+        Session = sessionmaker(engine)
+
+        with Session.begin() as session:
+            session.add(some_object)
+            session.add(some_other_object)
+        # commits transaction, closes session
+
+    .. versionadded:: 1.4
+
+    When calling upon :class:`_orm.sessionmaker` to construct a
+    :class:`_orm.Session`, keyword arguments may also be passed to the
+    method; these arguments will override that of the globally configured
+    parameters.  Below we use a :class:`_orm.sessionmaker` bound to a certain
+    :class:`_engine.Engine` to produce a :class:`_orm.Session` that is instead
+    bound to a specific :class:`_engine.Connection` procured from that engine::
+
+        Session = sessionmaker(engine)
+
+        # bind an individual session to a connection
+
+        with engine.connect() as connection:
+            with Session(bind=connection) as session:
+                ...  # work with session
+
+    The class also includes a method :meth:`_orm.sessionmaker.configure`, which
+    can be used to specify additional keyword arguments to the factory, which
+    will take effect for subsequent :class:`.Session` objects generated. This
+    is usually used to associate one or more :class:`_engine.Engine` objects
+    with an existing
+    :class:`.sessionmaker` factory before it is first used::
+
+        # application starts, sessionmaker does not have
+        # an engine bound yet
+        Session = sessionmaker()
+
+        # ... later, when an engine URL is read from a configuration
+        # file or other events allow the engine to be created
+        engine = create_engine("sqlite:///foo.db")
+        Session.configure(bind=engine)
+
+        sess = Session()
+        # work with session
+
+    .. seealso::
+
+        :ref:`session_getting` - introductory text on creating
+        sessions using :class:`.sessionmaker`.
+
+    """
+
+    class_: Type[_S]
+
+    @overload
+    def __init__(
+        self,
+        bind: Optional[_SessionBind] = ...,
+        *,
+        class_: Type[_S],
+        autoflush: bool = ...,
+        expire_on_commit: bool = ...,
+        info: Optional[_InfoType] = ...,
+        **kw: Any,
+    ): ...
+
+    @overload
+    def __init__(
+        self: "sessionmaker[Session]",
+        bind: Optional[_SessionBind] = ...,
+        *,
+        autoflush: bool = ...,
+        expire_on_commit: bool = ...,
+        info: Optional[_InfoType] = ...,
+        **kw: Any,
+    ): ...
+
+    def __init__(
+        self,
+        bind: Optional[_SessionBind] = None,
+        *,
+        class_: Type[_S] = Session,  # type: ignore
+        autoflush: bool = True,
+        expire_on_commit: bool = True,
+        info: Optional[_InfoType] = None,
+        **kw: Any,
+    ):
+        r"""Construct a new :class:`.sessionmaker`.
+
+        All arguments here except for ``class_`` correspond to arguments
+        accepted by :class:`.Session` directly.  See the
+        :meth:`.Session.__init__` docstring for more details on parameters.
+
+        :param bind: a :class:`_engine.Engine` or other :class:`.Connectable`
+         with
+         which newly created :class:`.Session` objects will be associated.
+        :param class\_: class to use in order to create new :class:`.Session`
+         objects.  Defaults to :class:`.Session`.
+        :param autoflush: The autoflush setting to use with newly created
+         :class:`.Session` objects.
+
+         .. seealso::
+
+            :ref:`session_flushing` - additional background on autoflush
+
+        :param expire_on_commit=True: the
+         :paramref:`_orm.Session.expire_on_commit` setting to use
+         with newly created :class:`.Session` objects.
+
+        :param info: optional dictionary of information that will be available
+         via :attr:`.Session.info`.  Note this dictionary is *updated*, not
+         replaced, when the ``info`` parameter is specified to the specific
+         :class:`.Session` construction operation.
+
+        :param \**kw: all other keyword arguments are passed to the
+         constructor of newly created :class:`.Session` objects.
+
+        """
+        kw["bind"] = bind
+        kw["autoflush"] = autoflush
+        kw["expire_on_commit"] = expire_on_commit
+        if info is not None:
+            kw["info"] = info
+        self.kw = kw
+        # make our own subclass of the given class, so that
+        # events can be associated with it specifically.
+        self.class_ = type(class_.__name__, (class_,), {})
+
+    def begin(self) -> contextlib.AbstractContextManager[_S]:
+        """Produce a context manager that both provides a new
+        :class:`_orm.Session` as well as a transaction that commits.
+
+
+        e.g.::
+
+            Session = sessionmaker(some_engine)
+
+            with Session.begin() as session:
+                session.add(some_object)
+
+            # commits transaction, closes session
+
+        .. versionadded:: 1.4
+
+
+        """
+
+        session = self()
+        return session._maker_context_manager()
+
+    def __call__(self, **local_kw: Any) -> _S:
+        """Produce a new :class:`.Session` object using the configuration
+        established in this :class:`.sessionmaker`.
+
+        In Python, the ``__call__`` method is invoked on an object when
+        it is "called" in the same way as a function::
+
+            Session = sessionmaker(some_engine)
+            session = Session()  # invokes sessionmaker.__call__()
+
+        """
+        for k, v in self.kw.items():
+            if k == "info" and "info" in local_kw:
+                d = v.copy()
+                d.update(local_kw["info"])
+                local_kw["info"] = d
+            else:
+                local_kw.setdefault(k, v)
+        return self.class_(**local_kw)
+
+    def configure(self, **new_kw: Any) -> None:
+        """(Re)configure the arguments for this sessionmaker.
+
+        e.g.::
+
+            Session = sessionmaker()
+
+            Session.configure(bind=create_engine("sqlite://"))
+        """
+        self.kw.update(new_kw)
+
+    def __repr__(self) -> str:
+        return "%s(class_=%r, %s)" % (
+            self.__class__.__name__,
+            self.class_.__name__,
+            ", ".join("%s=%r" % (k, v) for k, v in self.kw.items()),
+        )
+
+
+def close_all_sessions() -> None:
+    """Close all sessions in memory.
+
+    This function consults a global registry of all :class:`.Session` objects
+    and calls :meth:`.Session.close` on them, which resets them to a clean
+    state.
+
+    This function is not for general use but may be useful for test suites
+    within the teardown scheme.
+
+    .. versionadded:: 1.3
+
+    """
+
+    for sess in _sessions.values():
+        sess.close()
+
+
+def make_transient(instance: object) -> None:
+    """Alter the state of the given instance so that it is :term:`transient`.
+
+    .. note::
+
+        :func:`.make_transient` is a special-case function for
+        advanced use cases only.
+
+    The given mapped instance is assumed to be in the :term:`persistent` or
+    :term:`detached` state.   The function will remove its association with any
+    :class:`.Session` as well as its :attr:`.InstanceState.identity`. The
+    effect is that the object will behave as though it were newly constructed,
+    except retaining any attribute / collection values that were loaded at the
+    time of the call.   The :attr:`.InstanceState.deleted` flag is also reset
+    if this object had been deleted as a result of using
+    :meth:`.Session.delete`.
+
+    .. warning::
+
+        :func:`.make_transient` does **not** "unexpire" or otherwise eagerly
+        load ORM-mapped attributes that are not currently loaded at the time
+        the function is called.   This includes attributes which:
+
+        * were expired via :meth:`.Session.expire`
+
+        * were expired as the natural effect of committing a session
+          transaction, e.g. :meth:`.Session.commit`
+
+        * are normally :term:`lazy loaded` but are not currently loaded
+
+        * are "deferred" (see :ref:`orm_queryguide_column_deferral`) and are
+          not yet loaded
+
+        * were not present in the query which loaded this object, such as that
+          which is common in joined table inheritance and other scenarios.
+
+        After :func:`.make_transient` is called, unloaded attributes such
+        as those above will normally resolve to the value ``None`` when
+        accessed, or an empty collection for a collection-oriented attribute.
+        As the object is transient and un-associated with any database
+        identity, it will no longer retrieve these values.
+
+    .. seealso::
+
+        :func:`.make_transient_to_detached`
+
+    """
+    state = attributes.instance_state(instance)
+    s = _state_session(state)
+    if s:
+        s._expunge_states([state])
+
+    # remove expired state
+    state.expired_attributes.clear()
+
+    # remove deferred callables
+    if state.callables:
+        del state.callables
+
+    if state.key:
+        del state.key
+    if state._deleted:
+        del state._deleted
+
+
+def make_transient_to_detached(instance: object) -> None:
+    """Make the given transient instance :term:`detached`.
+
+    .. note::
+
+        :func:`.make_transient_to_detached` is a special-case function for
+        advanced use cases only.
+
+    All attribute history on the given instance
+    will be reset as though the instance were freshly loaded
+    from a query.  Missing attributes will be marked as expired.
+    The primary key attributes of the object, which are required, will be made
+    into the "key" of the instance.
+
+    The object can then be added to a session, or merged
+    possibly with the load=False flag, at which point it will look
+    as if it were loaded that way, without emitting SQL.
+
+    This is a special use case function that differs from a normal
+    call to :meth:`.Session.merge` in that a given persistent state
+    can be manufactured without any SQL calls.
+
+    .. seealso::
+
+        :func:`.make_transient`
+
+        :meth:`.Session.enable_relationship_loading`
+
+    """
+    state = attributes.instance_state(instance)
+    if state.session_id or state.key:
+        raise sa_exc.InvalidRequestError("Given object must be transient")
+    state.key = state.mapper._identity_key_from_state(state)
+    if state._deleted:
+        del state._deleted
+    state._commit_all(state.dict)
+    state._expire_attributes(state.dict, state.unloaded)
+
+
+def object_session(instance: object) -> Optional[Session]:
+    """Return the :class:`.Session` to which the given instance belongs.
+
+    This is essentially the same as the :attr:`.InstanceState.session`
+    accessor.  See that attribute for details.
+
+    """
+
+    try:
+        state = attributes.instance_state(instance)
+    except exc.NO_STATE as err:
+        raise exc.UnmappedInstanceError(instance) from err
+    else:
+        return _state_session(state)
+
+
+_new_sessionid = util.counter()
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py
new file mode 100644
index 00000000..d4bbf920
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state.py
@@ -0,0 +1,1143 @@
+# orm/state.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Defines instrumentation of instances.
+
+This module is usually not directly visible to user applications, but
+defines a large part of the ORM's interactivity.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+import weakref
+
+from . import base
+from . import exc as orm_exc
+from . import interfaces
+from ._typing import _O
+from ._typing import is_collection_impl
+from .base import ATTR_WAS_SET
+from .base import INIT_OK
+from .base import LoaderCallableStatus
+from .base import NEVER_SET
+from .base import NO_VALUE
+from .base import PASSIVE_NO_INITIALIZE
+from .base import PASSIVE_NO_RESULT
+from .base import PASSIVE_OFF
+from .base import SQL_OK
+from .path_registry import PathRegistry
+from .. import exc as sa_exc
+from .. import inspection
+from .. import util
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if TYPE_CHECKING:
+    from ._typing import _IdentityKeyType
+    from ._typing import _InstanceDict
+    from ._typing import _LoaderCallable
+    from .attributes import AttributeImpl
+    from .attributes import History
+    from .base import PassiveFlag
+    from .collections import _AdaptedCollectionProtocol
+    from .identity import IdentityMap
+    from .instrumentation import ClassManager
+    from .interfaces import ORMOption
+    from .mapper import Mapper
+    from .session import Session
+    from ..engine import Row
+    from ..ext.asyncio.session import async_session as _async_provider
+    from ..ext.asyncio.session import AsyncSession
+
+if TYPE_CHECKING:
+    _sessions: weakref.WeakValueDictionary[int, Session]
+else:
+    # late-populated by session.py
+    _sessions = None
+
+
+if not TYPE_CHECKING:
+    # optionally late-provided by sqlalchemy.ext.asyncio.session
+
+    _async_provider = None  # noqa
+
+
+class _InstanceDictProto(Protocol):
+    def __call__(self) -> Optional[IdentityMap]: ...
+
+
+class _InstallLoaderCallableProto(Protocol[_O]):
+    """used at result loading time to install a _LoaderCallable callable
+    upon a specific InstanceState, which will be used to populate an
+    attribute when that attribute is accessed.
+
+    Concrete examples are per-instance deferred column loaders and
+    relationship lazy loaders.
+
+    """
+
+    def __call__(
+        self, state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any]
+    ) -> None: ...
+
+
+@inspection._self_inspects
+class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
+    """Tracks state information at the instance level.
+
+    The :class:`.InstanceState` is a key object used by the
+    SQLAlchemy ORM in order to track the state of an object;
+    it is created the moment an object is instantiated, typically
+    as a result of :term:`instrumentation` which SQLAlchemy applies
+    to the ``__init__()`` method of the class.
+
+    :class:`.InstanceState` is also a semi-public object,
+    available for runtime inspection as to the state of a
+    mapped instance, including information such as its current
+    status within a particular :class:`.Session` and details
+    about data on individual attributes.  The public API
+    in order to acquire a :class:`.InstanceState` object
+    is to use the :func:`_sa.inspect` system::
+
+        >>> from sqlalchemy import inspect
+        >>> insp = inspect(some_mapped_object)
+        >>> insp.attrs.nickname.history
+        History(added=['new nickname'], unchanged=(), deleted=['nickname'])
+
+    .. seealso::
+
+        :ref:`orm_mapper_inspection_instancestate`
+
+    """
+
+    __slots__ = (
+        "__dict__",
+        "__weakref__",
+        "class_",
+        "manager",
+        "obj",
+        "committed_state",
+        "expired_attributes",
+    )
+
+    manager: ClassManager[_O]
+    session_id: Optional[int] = None
+    key: Optional[_IdentityKeyType[_O]] = None
+    runid: Optional[int] = None
+    load_options: Tuple[ORMOption, ...] = ()
+    load_path: PathRegistry = PathRegistry.root
+    insert_order: Optional[int] = None
+    _strong_obj: Optional[object] = None
+    obj: weakref.ref[_O]
+
+    committed_state: Dict[str, Any]
+
+    modified: bool = False
+    """When ``True`` the object was modified."""
+    expired: bool = False
+    """When ``True`` the object is :term:`expired`.
+
+    .. seealso::
+
+        :ref:`session_expire`
+    """
+    _deleted: bool = False
+    _load_pending: bool = False
+    _orphaned_outside_of_session: bool = False
+    is_instance: bool = True
+    identity_token: object = None
+    _last_known_values: Optional[Dict[str, Any]] = None
+
+    _instance_dict: _InstanceDictProto
+    """A weak reference, or in the default case a plain callable, that
+    returns a reference to the current :class:`.IdentityMap`, if any.
+
+    """
+    if not TYPE_CHECKING:
+
+        def _instance_dict(self):
+            """default 'weak reference' for _instance_dict"""
+            return None
+
+    expired_attributes: Set[str]
+    """The set of keys which are 'expired' to be loaded by
+    the manager's deferred scalar loader, assuming no pending
+    changes.
+
+    See also the ``unmodified`` collection which is intersected
+    against this set when a refresh operation occurs.
+    """
+
+    callables: Dict[str, Callable[[InstanceState[_O], PassiveFlag], Any]]
+    """A namespace where a per-state loader callable can be associated.
+
+    In SQLAlchemy 1.0, this is only used for lazy loaders / deferred
+    loaders that were set up via query option.
+
+    Previously, callables was used also to indicate expired attributes
+    by storing a link to the InstanceState itself in this dictionary.
+    This role is now handled by the expired_attributes set.
+
+    """
+
+    if not TYPE_CHECKING:
+        callables = util.EMPTY_DICT
+
+    def __init__(self, obj: _O, manager: ClassManager[_O]):
+        self.class_ = obj.__class__
+        self.manager = manager
+        self.obj = weakref.ref(obj, self._cleanup)
+        self.committed_state = {}
+        self.expired_attributes = set()
+
+    @util.memoized_property
+    def attrs(self) -> util.ReadOnlyProperties[AttributeState]:
+        """Return a namespace representing each attribute on
+        the mapped object, including its current value
+        and history.
+
+        The returned object is an instance of :class:`.AttributeState`.
+        This object allows inspection of the current data
+        within an attribute as well as attribute history
+        since the last flush.
+
+        """
+        return util.ReadOnlyProperties(
+            {key: AttributeState(self, key) for key in self.manager}
+        )
+
+    @property
+    def transient(self) -> bool:
+        """Return ``True`` if the object is :term:`transient`.
+
+        .. seealso::
+
+            :ref:`session_object_states`
+
+        """
+        return self.key is None and not self._attached
+
+    @property
+    def pending(self) -> bool:
+        """Return ``True`` if the object is :term:`pending`.
+
+        .. seealso::
+
+            :ref:`session_object_states`
+
+        """
+        return self.key is None and self._attached
+
+    @property
+    def deleted(self) -> bool:
+        """Return ``True`` if the object is :term:`deleted`.
+
+        An object that is in the deleted state is guaranteed to
+        not be within the :attr:`.Session.identity_map` of its parent
+        :class:`.Session`; however if the session's transaction is rolled
+        back, the object will be restored to the persistent state and
+        the identity map.
+
+        .. note::
+
+            The :attr:`.InstanceState.deleted` attribute refers to a specific
+            state of the object that occurs between the "persistent" and
+            "detached" states; once the object is :term:`detached`, the
+            :attr:`.InstanceState.deleted` attribute **no longer returns
+            True**; in order to detect that a state was deleted, regardless
+            of whether or not the object is associated with a
+            :class:`.Session`, use the :attr:`.InstanceState.was_deleted`
+            accessor.
+
+        .. versionadded: 1.1
+
+        .. seealso::
+
+            :ref:`session_object_states`
+
+        """
+        return self.key is not None and self._attached and self._deleted
+
+    @property
+    def was_deleted(self) -> bool:
+        """Return True if this object is or was previously in the
+        "deleted" state and has not been reverted to persistent.
+
+        This flag returns True once the object was deleted in flush.
+        When the object is expunged from the session either explicitly
+        or via transaction commit and enters the "detached" state,
+        this flag will continue to report True.
+
+        .. seealso::
+
+            :attr:`.InstanceState.deleted` - refers to the "deleted" state
+
+            :func:`.orm.util.was_deleted` - standalone function
+
+            :ref:`session_object_states`
+
+        """
+        return self._deleted
+
+    @property
+    def persistent(self) -> bool:
+        """Return ``True`` if the object is :term:`persistent`.
+
+        An object that is in the persistent state is guaranteed to
+        be within the :attr:`.Session.identity_map` of its parent
+        :class:`.Session`.
+
+        .. seealso::
+
+            :ref:`session_object_states`
+
+        """
+        return self.key is not None and self._attached and not self._deleted
+
+    @property
+    def detached(self) -> bool:
+        """Return ``True`` if the object is :term:`detached`.
+
+        .. seealso::
+
+            :ref:`session_object_states`
+
+        """
+        return self.key is not None and not self._attached
+
+    @util.non_memoized_property
+    @util.preload_module("sqlalchemy.orm.session")
+    def _attached(self) -> bool:
+        return (
+            self.session_id is not None
+            and self.session_id in util.preloaded.orm_session._sessions
+        )
+
+    def _track_last_known_value(self, key: str) -> None:
+        """Track the last known value of a particular key after expiration
+        operations.
+
+        .. versionadded:: 1.3
+
+        """
+
+        lkv = self._last_known_values
+        if lkv is None:
+            self._last_known_values = lkv = {}
+        if key not in lkv:
+            lkv[key] = NO_VALUE
+
+    @property
+    def session(self) -> Optional[Session]:
+        """Return the owning :class:`.Session` for this instance,
+        or ``None`` if none available.
+
+        Note that the result here can in some cases be *different*
+        from that of ``obj in session``; an object that's been deleted
+        will report as not ``in session``, however if the transaction is
+        still in progress, this attribute will still refer to that session.
+        Only when the transaction is completed does the object become
+        fully detached under normal circumstances.
+
+        .. seealso::
+
+            :attr:`_orm.InstanceState.async_session`
+
+        """
+        if self.session_id:
+            try:
+                return _sessions[self.session_id]
+            except KeyError:
+                pass
+        return None
+
+    @property
+    def async_session(self) -> Optional[AsyncSession]:
+        """Return the owning :class:`_asyncio.AsyncSession` for this instance,
+        or ``None`` if none available.
+
+        This attribute is only non-None when the :mod:`sqlalchemy.ext.asyncio`
+        API is in use for this ORM object. The returned
+        :class:`_asyncio.AsyncSession` object will be a proxy for the
+        :class:`_orm.Session` object that would be returned from the
+        :attr:`_orm.InstanceState.session` attribute for this
+        :class:`_orm.InstanceState`.
+
+        .. versionadded:: 1.4.18
+
+        .. seealso::
+
+            :ref:`asyncio_toplevel`
+
+        """
+        if _async_provider is None:
+            return None
+
+        sess = self.session
+        if sess is not None:
+            return _async_provider(sess)
+        else:
+            return None
+
+    @property
+    def object(self) -> Optional[_O]:
+        """Return the mapped object represented by this
+        :class:`.InstanceState`.
+
+        Returns None if the object has been garbage collected
+
+        """
+        return self.obj()
+
+    @property
+    def identity(self) -> Optional[Tuple[Any, ...]]:
+        """Return the mapped identity of the mapped object.
+        This is the primary key identity as persisted by the ORM
+        which can always be passed directly to
+        :meth:`_query.Query.get`.
+
+        Returns ``None`` if the object has no primary key identity.
+
+        .. note::
+            An object which is :term:`transient` or :term:`pending`
+            does **not** have a mapped identity until it is flushed,
+            even if its attributes include primary key values.
+
+        """
+        if self.key is None:
+            return None
+        else:
+            return self.key[1]
+
+    @property
+    def identity_key(self) -> Optional[_IdentityKeyType[_O]]:
+        """Return the identity key for the mapped object.
+
+        This is the key used to locate the object within
+        the :attr:`.Session.identity_map` mapping.   It contains
+        the identity as returned by :attr:`.identity` within it.
+
+
+        """
+        return self.key
+
+    @util.memoized_property
+    def parents(self) -> Dict[int, Union[Literal[False], InstanceState[Any]]]:
+        return {}
+
+    @util.memoized_property
+    def _pending_mutations(self) -> Dict[str, PendingCollection]:
+        return {}
+
+    @util.memoized_property
+    def _empty_collections(self) -> Dict[str, _AdaptedCollectionProtocol]:
+        return {}
+
+    @util.memoized_property
+    def mapper(self) -> Mapper[_O]:
+        """Return the :class:`_orm.Mapper` used for this mapped object."""
+        return self.manager.mapper
+
+    @property
+    def has_identity(self) -> bool:
+        """Return ``True`` if this object has an identity key.
+
+        This should always have the same value as the
+        expression ``state.persistent`` or ``state.detached``.
+
+        """
+        return bool(self.key)
+
+    @classmethod
+    def _detach_states(
+        self,
+        states: Iterable[InstanceState[_O]],
+        session: Session,
+        to_transient: bool = False,
+    ) -> None:
+        persistent_to_detached = (
+            session.dispatch.persistent_to_detached or None
+        )
+        deleted_to_detached = session.dispatch.deleted_to_detached or None
+        pending_to_transient = session.dispatch.pending_to_transient or None
+        persistent_to_transient = (
+            session.dispatch.persistent_to_transient or None
+        )
+
+        for state in states:
+            deleted = state._deleted
+            pending = state.key is None
+            persistent = not pending and not deleted
+
+            state.session_id = None
+
+            if to_transient and state.key:
+                del state.key
+            if persistent:
+                if to_transient:
+                    if persistent_to_transient is not None:
+                        persistent_to_transient(session, state)
+                elif persistent_to_detached is not None:
+                    persistent_to_detached(session, state)
+            elif deleted and deleted_to_detached is not None:
+                deleted_to_detached(session, state)
+            elif pending and pending_to_transient is not None:
+                pending_to_transient(session, state)
+
+            state._strong_obj = None
+
+    def _detach(self, session: Optional[Session] = None) -> None:
+        if session:
+            InstanceState._detach_states([self], session)
+        else:
+            self.session_id = self._strong_obj = None
+
+    def _dispose(self) -> None:
+        # used by the test suite, apparently
+        self._detach()
+
+    def _cleanup(self, ref: weakref.ref[_O]) -> None:
+        """Weakref callback cleanup.
+
+        This callable cleans out the state when it is being garbage
+        collected.
+
+        this _cleanup **assumes** that there are no strong refs to us!
+        Will not work otherwise!
+
+        """
+
+        # Python builtins become undefined during interpreter shutdown.
+        # Guard against exceptions during this phase, as the method cannot
+        # proceed in any case if builtins have been undefined.
+        if dict is None:
+            return
+
+        instance_dict = self._instance_dict()
+        if instance_dict is not None:
+            instance_dict._fast_discard(self)
+            del self._instance_dict
+
+            # we can't possibly be in instance_dict._modified
+            # b.c. this is weakref cleanup only, that set
+            # is strong referencing!
+            # assert self not in instance_dict._modified
+
+        self.session_id = self._strong_obj = None
+
+    @property
+    def dict(self) -> _InstanceDict:
+        """Return the instance dict used by the object.
+
+        Under normal circumstances, this is always synonymous
+        with the ``__dict__`` attribute of the mapped object,
+        unless an alternative instrumentation system has been
+        configured.
+
+        In the case that the actual object has been garbage
+        collected, this accessor returns a blank dictionary.
+
+        """
+        o = self.obj()
+        if o is not None:
+            return base.instance_dict(o)
+        else:
+            return {}
+
+    def _initialize_instance(*mixed: Any, **kwargs: Any) -> None:
+        self, instance, args = mixed[0], mixed[1], mixed[2:]  # noqa
+        manager = self.manager
+
+        manager.dispatch.init(self, args, kwargs)
+
+        try:
+            manager.original_init(*mixed[1:], **kwargs)
+        except:
+            with util.safe_reraise():
+                manager.dispatch.init_failure(self, args, kwargs)
+
+    def get_history(self, key: str, passive: PassiveFlag) -> History:
+        return self.manager[key].impl.get_history(self, self.dict, passive)
+
+    def get_impl(self, key: str) -> AttributeImpl:
+        return self.manager[key].impl
+
+    def _get_pending_mutation(self, key: str) -> PendingCollection:
+        if key not in self._pending_mutations:
+            self._pending_mutations[key] = PendingCollection()
+        return self._pending_mutations[key]
+
+    def __getstate__(self) -> Dict[str, Any]:
+        state_dict: Dict[str, Any] = {
+            "instance": self.obj(),
+            "class_": self.class_,
+            "committed_state": self.committed_state,
+            "expired_attributes": self.expired_attributes,
+        }
+        state_dict.update(
+            (k, self.__dict__[k])
+            for k in (
+                "_pending_mutations",
+                "modified",
+                "expired",
+                "callables",
+                "key",
+                "parents",
+                "load_options",
+                "class_",
+                "expired_attributes",
+                "info",
+            )
+            if k in self.__dict__
+        )
+        if self.load_path:
+            state_dict["load_path"] = self.load_path.serialize()
+
+        state_dict["manager"] = self.manager._serialize(self, state_dict)
+
+        return state_dict
+
+    def __setstate__(self, state_dict: Dict[str, Any]) -> None:
+        inst = state_dict["instance"]
+        if inst is not None:
+            self.obj = weakref.ref(inst, self._cleanup)
+            self.class_ = inst.__class__
+        else:
+            self.obj = lambda: None  # type: ignore
+            self.class_ = state_dict["class_"]
+
+        self.committed_state = state_dict.get("committed_state", {})
+        self._pending_mutations = state_dict.get("_pending_mutations", {})
+        self.parents = state_dict.get("parents", {})
+        self.modified = state_dict.get("modified", False)
+        self.expired = state_dict.get("expired", False)
+        if "info" in state_dict:
+            self.info.update(state_dict["info"])
+        if "callables" in state_dict:
+            self.callables = state_dict["callables"]
+
+            self.expired_attributes = state_dict["expired_attributes"]
+        else:
+            if "expired_attributes" in state_dict:
+                self.expired_attributes = state_dict["expired_attributes"]
+            else:
+                self.expired_attributes = set()
+
+        self.__dict__.update(
+            [
+                (k, state_dict[k])
+                for k in ("key", "load_options")
+                if k in state_dict
+            ]
+        )
+        if self.key:
+            self.identity_token = self.key[2]
+
+        if "load_path" in state_dict:
+            self.load_path = PathRegistry.deserialize(state_dict["load_path"])
+
+        state_dict["manager"](self, inst, state_dict)
+
+    def _reset(self, dict_: _InstanceDict, key: str) -> None:
+        """Remove the given attribute and any
+        callables associated with it."""
+
+        old = dict_.pop(key, None)
+        manager_impl = self.manager[key].impl
+        if old is not None and is_collection_impl(manager_impl):
+            manager_impl._invalidate_collection(old)
+        self.expired_attributes.discard(key)
+        if self.callables:
+            self.callables.pop(key, None)
+
+    def _copy_callables(self, from_: InstanceState[Any]) -> None:
+        if "callables" in from_.__dict__:
+            self.callables = dict(from_.callables)
+
+    @classmethod
+    def _instance_level_callable_processor(
+        cls, manager: ClassManager[_O], fn: _LoaderCallable, key: Any
+    ) -> _InstallLoaderCallableProto[_O]:
+        impl = manager[key].impl
+        if is_collection_impl(impl):
+            fixed_impl = impl
+
+            def _set_callable(
+                state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any]
+            ) -> None:
+                if "callables" not in state.__dict__:
+                    state.callables = {}
+                old = dict_.pop(key, None)
+                if old is not None:
+                    fixed_impl._invalidate_collection(old)
+                state.callables[key] = fn
+
+        else:
+
+            def _set_callable(
+                state: InstanceState[_O], dict_: _InstanceDict, row: Row[Any]
+            ) -> None:
+                if "callables" not in state.__dict__:
+                    state.callables = {}
+                state.callables[key] = fn
+
+        return _set_callable
+
+    def _expire(
+        self, dict_: _InstanceDict, modified_set: Set[InstanceState[Any]]
+    ) -> None:
+        self.expired = True
+        if self.modified:
+            modified_set.discard(self)
+            self.committed_state.clear()
+            self.modified = False
+
+        self._strong_obj = None
+
+        if "_pending_mutations" in self.__dict__:
+            del self.__dict__["_pending_mutations"]
+
+        if "parents" in self.__dict__:
+            del self.__dict__["parents"]
+
+        self.expired_attributes.update(
+            [impl.key for impl in self.manager._loader_impls]
+        )
+
+        if self.callables:
+            # the per state loader callables we can remove here are
+            # LoadDeferredColumns, which undefers a column at the instance
+            # level that is mapped with deferred, and LoadLazyAttribute,
+            # which lazy loads a relationship at the instance level that
+            # is mapped with "noload" or perhaps "immediateload".
+            # Before 1.4, only column-based
+            # attributes could be considered to be "expired", so here they
+            # were the only ones "unexpired", which means to make them deferred
+            # again.   For the moment, as of 1.4 we also apply the same
+            # treatment relationships now, that is, an instance level lazy
+            # loader is reset in the same way as a column loader.
+            for k in self.expired_attributes.intersection(self.callables):
+                del self.callables[k]
+
+        for k in self.manager._collection_impl_keys.intersection(dict_):
+            collection = dict_.pop(k)
+            collection._sa_adapter.invalidated = True
+
+        if self._last_known_values:
+            self._last_known_values.update(
+                {k: dict_[k] for k in self._last_known_values if k in dict_}
+            )
+
+        for key in self.manager._all_key_set.intersection(dict_):
+            del dict_[key]
+
+        self.manager.dispatch.expire(self, None)
+
+    def _expire_attributes(
+        self,
+        dict_: _InstanceDict,
+        attribute_names: Iterable[str],
+        no_loader: bool = False,
+    ) -> None:
+        pending = self.__dict__.get("_pending_mutations", None)
+
+        callables = self.callables
+
+        for key in attribute_names:
+            impl = self.manager[key].impl
+            if impl.accepts_scalar_loader:
+                if no_loader and (impl.callable_ or key in callables):
+                    continue
+
+                self.expired_attributes.add(key)
+                if callables and key in callables:
+                    del callables[key]
+            old = dict_.pop(key, NO_VALUE)
+            if is_collection_impl(impl) and old is not NO_VALUE:
+                impl._invalidate_collection(old)
+
+            lkv = self._last_known_values
+            if lkv is not None and key in lkv and old is not NO_VALUE:
+                lkv[key] = old
+
+            self.committed_state.pop(key, None)
+            if pending:
+                pending.pop(key, None)
+
+        self.manager.dispatch.expire(self, attribute_names)
+
+    def _load_expired(
+        self, state: InstanceState[_O], passive: PassiveFlag
+    ) -> LoaderCallableStatus:
+        """__call__ allows the InstanceState to act as a deferred
+        callable for loading expired attributes, which is also
+        serializable (picklable).
+
+        """
+
+        if not passive & SQL_OK:
+            return PASSIVE_NO_RESULT
+
+        toload = self.expired_attributes.intersection(self.unmodified)
+        toload = toload.difference(
+            attr
+            for attr in toload
+            if not self.manager[attr].impl.load_on_unexpire
+        )
+
+        self.manager.expired_attribute_loader(self, toload, passive)
+
+        # if the loader failed, or this
+        # instance state didn't have an identity,
+        # the attributes still might be in the callables
+        # dict.  ensure they are removed.
+        self.expired_attributes.clear()
+
+        return ATTR_WAS_SET
+
+    @property
+    def unmodified(self) -> Set[str]:
+        """Return the set of keys which have no uncommitted changes"""
+
+        return set(self.manager).difference(self.committed_state)
+
+    def unmodified_intersection(self, keys: Iterable[str]) -> Set[str]:
+        """Return self.unmodified.intersection(keys)."""
+
+        return (
+            set(keys)
+            .intersection(self.manager)
+            .difference(self.committed_state)
+        )
+
+    @property
+    def unloaded(self) -> Set[str]:
+        """Return the set of keys which do not have a loaded value.
+
+        This includes expired attributes and any other attribute that was never
+        populated or modified.
+
+        """
+        return (
+            set(self.manager)
+            .difference(self.committed_state)
+            .difference(self.dict)
+        )
+
+    @property
+    @util.deprecated(
+        "2.0",
+        "The :attr:`.InstanceState.unloaded_expirable` attribute is "
+        "deprecated.  Please use :attr:`.InstanceState.unloaded`.",
+    )
+    def unloaded_expirable(self) -> Set[str]:
+        """Synonymous with :attr:`.InstanceState.unloaded`.
+
+        This attribute was added as an implementation-specific detail at some
+        point and should be considered to be private.
+
+        """
+        return self.unloaded
+
+    @property
+    def _unloaded_non_object(self) -> Set[str]:
+        return self.unloaded.intersection(
+            attr
+            for attr in self.manager
+            if self.manager[attr].impl.accepts_scalar_loader
+        )
+
+    def _modified_event(
+        self,
+        dict_: _InstanceDict,
+        attr: Optional[AttributeImpl],
+        previous: Any,
+        collection: bool = False,
+        is_userland: bool = False,
+    ) -> None:
+        if attr:
+            if not attr.send_modified_events:
+                return
+            if is_userland and attr.key not in dict_:
+                raise sa_exc.InvalidRequestError(
+                    "Can't flag attribute '%s' modified; it's not present in "
+                    "the object state" % attr.key
+                )
+            if attr.key not in self.committed_state or is_userland:
+                if collection:
+                    if TYPE_CHECKING:
+                        assert is_collection_impl(attr)
+                    if previous is NEVER_SET:
+                        if attr.key in dict_:
+                            previous = dict_[attr.key]
+
+                    if previous not in (None, NO_VALUE, NEVER_SET):
+                        previous = attr.copy(previous)
+                self.committed_state[attr.key] = previous
+
+            lkv = self._last_known_values
+            if lkv is not None and attr.key in lkv:
+                lkv[attr.key] = NO_VALUE
+
+        # assert self._strong_obj is None or self.modified
+
+        if (self.session_id and self._strong_obj is None) or not self.modified:
+            self.modified = True
+            instance_dict = self._instance_dict()
+            if instance_dict:
+                has_modified = bool(instance_dict._modified)
+                instance_dict._modified.add(self)
+            else:
+                has_modified = False
+
+            # only create _strong_obj link if attached
+            # to a session
+
+            inst = self.obj()
+            if self.session_id:
+                self._strong_obj = inst
+
+                # if identity map already had modified objects,
+                # assume autobegin already occurred, else check
+                # for autobegin
+                if not has_modified:
+                    # inline of autobegin, to ensure session transaction
+                    # snapshot is established
+                    try:
+                        session = _sessions[self.session_id]
+                    except KeyError:
+                        pass
+                    else:
+                        if session._transaction is None:
+                            session._autobegin_t()
+
+            if inst is None and attr:
+                raise orm_exc.ObjectDereferencedError(
+                    "Can't emit change event for attribute '%s' - "
+                    "parent object of type %s has been garbage "
+                    "collected."
+                    % (self.manager[attr.key], base.state_class_str(self))
+                )
+
+    def _commit(self, dict_: _InstanceDict, keys: Iterable[str]) -> None:
+        """Commit attributes.
+
+        This is used by a partial-attribute load operation to mark committed
+        those attributes which were refreshed from the database.
+
+        Attributes marked as "expired" can potentially remain "expired" after
+        this step if a value was not populated in state.dict.
+
+        """
+        for key in keys:
+            self.committed_state.pop(key, None)
+
+        self.expired = False
+
+        self.expired_attributes.difference_update(
+            set(keys).intersection(dict_)
+        )
+
+        # the per-keys commit removes object-level callables,
+        # while that of commit_all does not.  it's not clear
+        # if this behavior has a clear rationale, however tests do
+        # ensure this is what it does.
+        if self.callables:
+            for key in (
+                set(self.callables).intersection(keys).intersection(dict_)
+            ):
+                del self.callables[key]
+
+    def _commit_all(
+        self, dict_: _InstanceDict, instance_dict: Optional[IdentityMap] = None
+    ) -> None:
+        """commit all attributes unconditionally.
+
+        This is used after a flush() or a full load/refresh
+        to remove all pending state from the instance.
+
+         - all attributes are marked as "committed"
+         - the "strong dirty reference" is removed
+         - the "modified" flag is set to False
+         - any "expired" markers for scalar attributes loaded are removed.
+         - lazy load callables for objects / collections *stay*
+
+        Attributes marked as "expired" can potentially remain
+        "expired" after this step if a value was not populated in state.dict.
+
+        """
+        self._commit_all_states([(self, dict_)], instance_dict)
+
+    @classmethod
+    def _commit_all_states(
+        self,
+        iter_: Iterable[Tuple[InstanceState[Any], _InstanceDict]],
+        instance_dict: Optional[IdentityMap] = None,
+    ) -> None:
+        """Mass / highly inlined version of commit_all()."""
+
+        for state, dict_ in iter_:
+            state_dict = state.__dict__
+
+            state.committed_state.clear()
+
+            if "_pending_mutations" in state_dict:
+                del state_dict["_pending_mutations"]
+
+            state.expired_attributes.difference_update(dict_)
+
+            if instance_dict and state.modified:
+                instance_dict._modified.discard(state)
+
+            state.modified = state.expired = False
+            state._strong_obj = None
+
+
+class AttributeState:
+    """Provide an inspection interface corresponding
+    to a particular attribute on a particular mapped object.
+
+    The :class:`.AttributeState` object is accessed
+    via the :attr:`.InstanceState.attrs` collection
+    of a particular :class:`.InstanceState`::
+
+        from sqlalchemy import inspect
+
+        insp = inspect(some_mapped_object)
+        attr_state = insp.attrs.some_attribute
+
+    """
+
+    __slots__ = ("state", "key")
+
+    state: InstanceState[Any]
+    key: str
+
+    def __init__(self, state: InstanceState[Any], key: str):
+        self.state = state
+        self.key = key
+
+    @property
+    def loaded_value(self) -> Any:
+        """The current value of this attribute as loaded from the database.
+
+        If the value has not been loaded, or is otherwise not present
+        in the object's dictionary, returns NO_VALUE.
+
+        """
+        return self.state.dict.get(self.key, NO_VALUE)
+
+    @property
+    def value(self) -> Any:
+        """Return the value of this attribute.
+
+        This operation is equivalent to accessing the object's
+        attribute directly or via ``getattr()``, and will fire
+        off any pending loader callables if needed.
+
+        """
+        return self.state.manager[self.key].__get__(
+            self.state.obj(), self.state.class_
+        )
+
+    @property
+    def history(self) -> History:
+        """Return the current **pre-flush** change history for
+        this attribute, via the :class:`.History` interface.
+
+        This method will **not** emit loader callables if the value of the
+        attribute is unloaded.
+
+        .. note::
+
+            The attribute history system tracks changes on a **per flush
+            basis**. Each time the :class:`.Session` is flushed, the history
+            of each attribute is reset to empty.   The :class:`.Session` by
+            default autoflushes each time a :class:`_query.Query` is invoked.
+            For
+            options on how to control this, see :ref:`session_flushing`.
+
+
+        .. seealso::
+
+            :meth:`.AttributeState.load_history` - retrieve history
+            using loader callables if the value is not locally present.
+
+            :func:`.attributes.get_history` - underlying function
+
+        """
+        return self.state.get_history(self.key, PASSIVE_NO_INITIALIZE)
+
+    def load_history(self) -> History:
+        """Return the current **pre-flush** change history for
+        this attribute, via the :class:`.History` interface.
+
+        This method **will** emit loader callables if the value of the
+        attribute is unloaded.
+
+        .. note::
+
+            The attribute history system tracks changes on a **per flush
+            basis**. Each time the :class:`.Session` is flushed, the history
+            of each attribute is reset to empty.   The :class:`.Session` by
+            default autoflushes each time a :class:`_query.Query` is invoked.
+            For
+            options on how to control this, see :ref:`session_flushing`.
+
+        .. seealso::
+
+            :attr:`.AttributeState.history`
+
+            :func:`.attributes.get_history` - underlying function
+
+        """
+        return self.state.get_history(self.key, PASSIVE_OFF ^ INIT_OK)
+
+
+class PendingCollection:
+    """A writable placeholder for an unloaded collection.
+
+    Stores items appended to and removed from a collection that has not yet
+    been loaded. When the collection is loaded, the changes stored in
+    PendingCollection are applied to it to produce the final result.
+
+    """
+
+    __slots__ = ("deleted_items", "added_items")
+
+    deleted_items: util.IdentitySet
+    added_items: util.OrderedIdentitySet
+
+    def __init__(self) -> None:
+        self.deleted_items = util.IdentitySet()
+        self.added_items = util.OrderedIdentitySet()
+
+    def merge_with_history(self, history: History) -> History:
+        return history._merge(self.added_items, self.deleted_items)
+
+    def append(self, value: Any) -> None:
+        if value in self.deleted_items:
+            self.deleted_items.remove(value)
+        else:
+            self.added_items.add(value)
+
+    def remove(self, value: Any) -> None:
+        if value in self.added_items:
+            self.added_items.remove(value)
+        else:
+            self.deleted_items.add(value)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py
new file mode 100644
index 00000000..10e417e8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py
@@ -0,0 +1,198 @@
+# orm/state_changes.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""State tracking utilities used by :class:`_orm.Session`.
+
+"""
+
+from __future__ import annotations
+
+import contextlib
+from enum import Enum
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Iterator
+from typing import NoReturn
+from typing import Optional
+from typing import Tuple
+from typing import TypeVar
+from typing import Union
+
+from .. import exc as sa_exc
+from .. import util
+from ..util.typing import Literal
+
+_F = TypeVar("_F", bound=Callable[..., Any])
+
+
+class _StateChangeState(Enum):
+    pass
+
+
+class _StateChangeStates(_StateChangeState):
+    ANY = 1
+    NO_CHANGE = 2
+    CHANGE_IN_PROGRESS = 3
+
+
+class _StateChange:
+    """Supplies state assertion decorators.
+
+    The current use case is for the :class:`_orm.SessionTransaction` class. The
+    :class:`_StateChange` class itself is agnostic of the
+    :class:`_orm.SessionTransaction` class so could in theory be generalized
+    for other systems as well.
+
+    """
+
+    _next_state: _StateChangeState = _StateChangeStates.ANY
+    _state: _StateChangeState = _StateChangeStates.NO_CHANGE
+    _current_fn: Optional[Callable[..., Any]] = None
+
+    def _raise_for_prerequisite_state(
+        self, operation_name: str, state: _StateChangeState
+    ) -> NoReturn:
+        raise sa_exc.IllegalStateChangeError(
+            f"Can't run operation '{operation_name}()' when Session "
+            f"is in state {state!r}",
+            code="isce",
+        )
+
+    @classmethod
+    def declare_states(
+        cls,
+        prerequisite_states: Union[
+            Literal[_StateChangeStates.ANY], Tuple[_StateChangeState, ...]
+        ],
+        moves_to: _StateChangeState,
+    ) -> Callable[[_F], _F]:
+        """Method decorator declaring valid states.
+
+        :param prerequisite_states: sequence of acceptable prerequisite
+         states.   Can be the single constant _State.ANY to indicate no
+         prerequisite state
+
+        :param moves_to: the expected state at the end of the method, assuming
+         no exceptions raised.   Can be the constant _State.NO_CHANGE to
+         indicate state should not change at the end of the method.
+
+        """
+        assert prerequisite_states, "no prequisite states sent"
+        has_prerequisite_states = (
+            prerequisite_states is not _StateChangeStates.ANY
+        )
+
+        prerequisite_state_collection = cast(
+            "Tuple[_StateChangeState, ...]", prerequisite_states
+        )
+        expect_state_change = moves_to is not _StateChangeStates.NO_CHANGE
+
+        @util.decorator
+        def _go(fn: _F, self: Any, *arg: Any, **kw: Any) -> Any:
+            current_state = self._state
+
+            if (
+                has_prerequisite_states
+                and current_state not in prerequisite_state_collection
+            ):
+                self._raise_for_prerequisite_state(fn.__name__, current_state)
+
+            next_state = self._next_state
+            existing_fn = self._current_fn
+            expect_state = moves_to if expect_state_change else current_state
+
+            if (
+                # destination states are restricted
+                next_state is not _StateChangeStates.ANY
+                # method seeks to change state
+                and expect_state_change
+                # destination state incorrect
+                and next_state is not expect_state
+            ):
+                if existing_fn and next_state in (
+                    _StateChangeStates.NO_CHANGE,
+                    _StateChangeStates.CHANGE_IN_PROGRESS,
+                ):
+                    raise sa_exc.IllegalStateChangeError(
+                        f"Method '{fn.__name__}()' can't be called here; "
+                        f"method '{existing_fn.__name__}()' is already "
+                        f"in progress and this would cause an unexpected "
+                        f"state change to {moves_to!r}",
+                        code="isce",
+                    )
+                else:
+                    raise sa_exc.IllegalStateChangeError(
+                        f"Cant run operation '{fn.__name__}()' here; "
+                        f"will move to state {moves_to!r} where we are "
+                        f"expecting {next_state!r}",
+                        code="isce",
+                    )
+
+            self._current_fn = fn
+            self._next_state = _StateChangeStates.CHANGE_IN_PROGRESS
+            try:
+                ret_value = fn(self, *arg, **kw)
+            except:
+                raise
+            else:
+                if self._state is expect_state:
+                    return ret_value
+
+                if self._state is current_state:
+                    raise sa_exc.IllegalStateChangeError(
+                        f"Method '{fn.__name__}()' failed to "
+                        "change state "
+                        f"to {moves_to!r} as expected",
+                        code="isce",
+                    )
+                elif existing_fn:
+                    raise sa_exc.IllegalStateChangeError(
+                        f"While method '{existing_fn.__name__}()' was "
+                        "running, "
+                        f"method '{fn.__name__}()' caused an "
+                        "unexpected "
+                        f"state change to {self._state!r}",
+                        code="isce",
+                    )
+                else:
+                    raise sa_exc.IllegalStateChangeError(
+                        f"Method '{fn.__name__}()' caused an unexpected "
+                        f"state change to {self._state!r}",
+                        code="isce",
+                    )
+
+            finally:
+                self._next_state = next_state
+                self._current_fn = existing_fn
+
+        return _go
+
+    @contextlib.contextmanager
+    def _expect_state(self, expected: _StateChangeState) -> Iterator[Any]:
+        """called within a method that changes states.
+
+        method must also use the ``@declare_states()`` decorator.
+
+        """
+        assert self._next_state is _StateChangeStates.CHANGE_IN_PROGRESS, (
+            "Unexpected call to _expect_state outside of "
+            "state-changing method"
+        )
+
+        self._next_state = expected
+        try:
+            yield
+        except:
+            raise
+        else:
+            if self._state is not expected:
+                raise sa_exc.IllegalStateChangeError(
+                    f"Unexpected state change to {self._state!r}", code="isce"
+                )
+        finally:
+            self._next_state = _StateChangeStates.CHANGE_IN_PROGRESS
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py
new file mode 100644
index 00000000..f2d16514
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategies.py
@@ -0,0 +1,3473 @@
+# orm/strategies.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""sqlalchemy.orm.interfaces.LoaderStrategy
+   implementations, and related MapperOptions."""
+
+from __future__ import annotations
+
+import collections
+import itertools
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import attributes
+from . import exc as orm_exc
+from . import interfaces
+from . import loading
+from . import path_registry
+from . import properties
+from . import query
+from . import relationships
+from . import unitofwork
+from . import util as orm_util
+from .base import _DEFER_FOR_STATE
+from .base import _RAISE_FOR_STATE
+from .base import _SET_DEFERRED_EXPIRED
+from .base import ATTR_WAS_SET
+from .base import LoaderCallableStatus
+from .base import PASSIVE_OFF
+from .base import PassiveFlag
+from .context import _column_descriptions
+from .context import ORMCompileState
+from .context import ORMSelectCompileState
+from .context import QueryContext
+from .interfaces import LoaderStrategy
+from .interfaces import StrategizedProperty
+from .session import _state_session
+from .state import InstanceState
+from .strategy_options import Load
+from .util import _none_only_set
+from .util import AliasedClass
+from .. import event
+from .. import exc as sa_exc
+from .. import inspect
+from .. import log
+from .. import sql
+from .. import util
+from ..sql import util as sql_util
+from ..sql import visitors
+from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
+from ..sql.selectable import Select
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from .mapper import Mapper
+    from .relationships import RelationshipProperty
+    from ..sql.elements import ColumnElement
+
+
+def _register_attribute(
+    prop,
+    mapper,
+    useobject,
+    compare_function=None,
+    typecallable=None,
+    callable_=None,
+    proxy_property=None,
+    active_history=False,
+    impl_class=None,
+    **kw,
+):
+    listen_hooks = []
+
+    uselist = useobject and prop.uselist
+
+    if useobject and prop.single_parent:
+        listen_hooks.append(single_parent_validator)
+
+    if prop.key in prop.parent.validators:
+        fn, opts = prop.parent.validators[prop.key]
+        listen_hooks.append(
+            lambda desc, prop: orm_util._validator_events(
+                desc, prop.key, fn, **opts
+            )
+        )
+
+    if useobject:
+        listen_hooks.append(unitofwork.track_cascade_events)
+
+    # need to assemble backref listeners
+    # after the singleparentvalidator, mapper validator
+    if useobject:
+        backref = prop.back_populates
+        if backref and prop._effective_sync_backref:
+            listen_hooks.append(
+                lambda desc, prop: attributes.backref_listeners(
+                    desc, backref, uselist
+                )
+            )
+
+    # a single MapperProperty is shared down a class inheritance
+    # hierarchy, so we set up attribute instrumentation and backref event
+    # for each mapper down the hierarchy.
+
+    # typically, "mapper" is the same as prop.parent, due to the way
+    # the configure_mappers() process runs, however this is not strongly
+    # enforced, and in the case of a second configure_mappers() run the
+    # mapper here might not be prop.parent; also, a subclass mapper may
+    # be called here before a superclass mapper.  That is, can't depend
+    # on mappers not already being set up so we have to check each one.
+
+    for m in mapper.self_and_descendants:
+        if prop is m._props.get(
+            prop.key
+        ) and not m.class_manager._attr_has_impl(prop.key):
+            desc = attributes.register_attribute_impl(
+                m.class_,
+                prop.key,
+                parent_token=prop,
+                uselist=uselist,
+                compare_function=compare_function,
+                useobject=useobject,
+                trackparent=useobject
+                and (
+                    prop.single_parent
+                    or prop.direction is interfaces.ONETOMANY
+                ),
+                typecallable=typecallable,
+                callable_=callable_,
+                active_history=active_history,
+                impl_class=impl_class,
+                send_modified_events=not useobject or not prop.viewonly,
+                doc=prop.doc,
+                **kw,
+            )
+
+            for hook in listen_hooks:
+                hook(desc, prop)
+
+
+@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
+class UninstrumentedColumnLoader(LoaderStrategy):
+    """Represent a non-instrumented MapperProperty.
+
+    The polymorphic_on argument of mapper() often results in this,
+    if the argument is against the with_polymorphic selectable.
+
+    """
+
+    __slots__ = ("columns",)
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.columns = self.parent_property.columns
+
+    def setup_query(
+        self,
+        compile_state,
+        query_entity,
+        path,
+        loadopt,
+        adapter,
+        column_collection=None,
+        **kwargs,
+    ):
+        for c in self.columns:
+            if adapter:
+                c = adapter.columns[c]
+            compile_state._append_dedupe_col_collection(c, column_collection)
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        pass
+
+
+@log.class_logger
+@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
+class ColumnLoader(LoaderStrategy):
+    """Provide loading behavior for a :class:`.ColumnProperty`."""
+
+    __slots__ = "columns", "is_composite"
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.columns = self.parent_property.columns
+        self.is_composite = hasattr(self.parent_property, "composite_class")
+
+    def setup_query(
+        self,
+        compile_state,
+        query_entity,
+        path,
+        loadopt,
+        adapter,
+        column_collection,
+        memoized_populators,
+        check_for_adapt=False,
+        **kwargs,
+    ):
+        for c in self.columns:
+            if adapter:
+                if check_for_adapt:
+                    c = adapter.adapt_check_present(c)
+                    if c is None:
+                        return
+                else:
+                    c = adapter.columns[c]
+
+            compile_state._append_dedupe_col_collection(c, column_collection)
+
+        fetch = self.columns[0]
+        if adapter:
+            fetch = adapter.columns[fetch]
+            if fetch is None:
+                # None happens here only for dml bulk_persistence cases
+                # when context.DMLReturningColFilter is used
+                return
+
+        memoized_populators[self.parent_property] = fetch
+
+    def init_class_attribute(self, mapper):
+        self.is_class_level = True
+        coltype = self.columns[0].type
+        # TODO: check all columns ?  check for foreign key as well?
+        active_history = (
+            self.parent_property.active_history
+            or self.columns[0].primary_key
+            or (
+                mapper.version_id_col is not None
+                and mapper._columntoproperty.get(mapper.version_id_col, None)
+                is self.parent_property
+            )
+        )
+
+        _register_attribute(
+            self.parent_property,
+            mapper,
+            useobject=False,
+            compare_function=coltype.compare_values,
+            active_history=active_history,
+        )
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        # look through list of columns represented here
+        # to see which, if any, is present in the row.
+
+        for col in self.columns:
+            if adapter:
+                col = adapter.columns[col]
+            getter = result._getter(col, False)
+            if getter:
+                populators["quick"].append((self.key, getter))
+                break
+        else:
+            populators["expire"].append((self.key, True))
+
+
+@log.class_logger
+@properties.ColumnProperty.strategy_for(query_expression=True)
+class ExpressionColumnLoader(ColumnLoader):
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+
+        # compare to the "default" expression that is mapped in
+        # the column.   If it's sql.null, we don't need to render
+        # unless an expr is passed in the options.
+        null = sql.null().label(None)
+        self._have_default_expression = any(
+            not c.compare(null) for c in self.parent_property.columns
+        )
+
+    def setup_query(
+        self,
+        compile_state,
+        query_entity,
+        path,
+        loadopt,
+        adapter,
+        column_collection,
+        memoized_populators,
+        **kwargs,
+    ):
+        columns = None
+        if loadopt and loadopt._extra_criteria:
+            columns = loadopt._extra_criteria
+
+        elif self._have_default_expression:
+            columns = self.parent_property.columns
+
+        if columns is None:
+            return
+
+        for c in columns:
+            if adapter:
+                c = adapter.columns[c]
+            compile_state._append_dedupe_col_collection(c, column_collection)
+
+        fetch = columns[0]
+        if adapter:
+            fetch = adapter.columns[fetch]
+            if fetch is None:
+                # None is not expected to be the result of any
+                # adapter implementation here, however there may be theoretical
+                # usages of returning() with context.DMLReturningColFilter
+                return
+
+        memoized_populators[self.parent_property] = fetch
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        # look through list of columns represented here
+        # to see which, if any, is present in the row.
+        if loadopt and loadopt._extra_criteria:
+            columns = loadopt._extra_criteria
+
+            for col in columns:
+                if adapter:
+                    col = adapter.columns[col]
+                getter = result._getter(col, False)
+                if getter:
+                    populators["quick"].append((self.key, getter))
+                    break
+            else:
+                populators["expire"].append((self.key, True))
+
+    def init_class_attribute(self, mapper):
+        self.is_class_level = True
+
+        _register_attribute(
+            self.parent_property,
+            mapper,
+            useobject=False,
+            compare_function=self.columns[0].type.compare_values,
+            accepts_scalar_loader=False,
+        )
+
+
+@log.class_logger
+@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
+@properties.ColumnProperty.strategy_for(
+    deferred=True, instrument=True, raiseload=True
+)
+@properties.ColumnProperty.strategy_for(do_nothing=True)
+class DeferredColumnLoader(LoaderStrategy):
+    """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
+
+    __slots__ = "columns", "group", "raiseload"
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        if hasattr(self.parent_property, "composite_class"):
+            raise NotImplementedError(
+                "Deferred loading for composite types not implemented yet"
+            )
+        self.raiseload = self.strategy_opts.get("raiseload", False)
+        self.columns = self.parent_property.columns
+        self.group = self.parent_property.group
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        # for a DeferredColumnLoader, this method is only used during a
+        # "row processor only" query; see test_deferred.py ->
+        # tests with "rowproc_only" in their name.  As of the 1.0 series,
+        # loading._instance_processor doesn't use a "row processing" function
+        # to populate columns, instead it uses data in the "populators"
+        # dictionary.  Normally, the DeferredColumnLoader.setup_query()
+        # sets up that data in the "memoized_populators" dictionary
+        # and "create_row_processor()" here is never invoked.
+
+        if (
+            context.refresh_state
+            and context.query._compile_options._only_load_props
+            and self.key in context.query._compile_options._only_load_props
+        ):
+            self.parent_property._get_strategy(
+                (("deferred", False), ("instrument", True))
+            ).create_row_processor(
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
+            )
+
+        elif not self.is_class_level:
+            if self.raiseload:
+                set_deferred_for_local_state = (
+                    self.parent_property._raise_column_loader
+                )
+            else:
+                set_deferred_for_local_state = (
+                    self.parent_property._deferred_column_loader
+                )
+            populators["new"].append((self.key, set_deferred_for_local_state))
+        else:
+            populators["expire"].append((self.key, False))
+
+    def init_class_attribute(self, mapper):
+        self.is_class_level = True
+
+        _register_attribute(
+            self.parent_property,
+            mapper,
+            useobject=False,
+            compare_function=self.columns[0].type.compare_values,
+            callable_=self._load_for_state,
+            load_on_unexpire=False,
+        )
+
+    def setup_query(
+        self,
+        compile_state,
+        query_entity,
+        path,
+        loadopt,
+        adapter,
+        column_collection,
+        memoized_populators,
+        only_load_props=None,
+        **kw,
+    ):
+        if (
+            (
+                compile_state.compile_options._render_for_subquery
+                and self.parent_property._renders_in_subqueries
+            )
+            or (
+                loadopt
+                and set(self.columns).intersection(
+                    self.parent._should_undefer_in_wildcard
+                )
+            )
+            or (
+                loadopt
+                and self.group
+                and loadopt.local_opts.get(
+                    "undefer_group_%s" % self.group, False
+                )
+            )
+            or (only_load_props and self.key in only_load_props)
+        ):
+            self.parent_property._get_strategy(
+                (("deferred", False), ("instrument", True))
+            ).setup_query(
+                compile_state,
+                query_entity,
+                path,
+                loadopt,
+                adapter,
+                column_collection,
+                memoized_populators,
+                **kw,
+            )
+        elif self.is_class_level:
+            memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
+        elif not self.raiseload:
+            memoized_populators[self.parent_property] = _DEFER_FOR_STATE
+        else:
+            memoized_populators[self.parent_property] = _RAISE_FOR_STATE
+
+    def _load_for_state(self, state, passive):
+        if not state.key:
+            return LoaderCallableStatus.ATTR_EMPTY
+
+        if not passive & PassiveFlag.SQL_OK:
+            return LoaderCallableStatus.PASSIVE_NO_RESULT
+
+        localparent = state.manager.mapper
+
+        if self.group:
+            toload = [
+                p.key
+                for p in localparent.iterate_properties
+                if isinstance(p, StrategizedProperty)
+                and isinstance(p.strategy, DeferredColumnLoader)
+                and p.group == self.group
+            ]
+        else:
+            toload = [self.key]
+
+        # narrow the keys down to just those which have no history
+        group = [k for k in toload if k in state.unmodified]
+
+        session = _state_session(state)
+        if session is None:
+            raise orm_exc.DetachedInstanceError(
+                "Parent instance %s is not bound to a Session; "
+                "deferred load operation of attribute '%s' cannot proceed"
+                % (orm_util.state_str(state), self.key)
+            )
+
+        if self.raiseload:
+            self._invoke_raise_load(state, passive, "raise")
+
+        loading.load_scalar_attributes(
+            state.mapper, state, set(group), PASSIVE_OFF
+        )
+
+        return LoaderCallableStatus.ATTR_WAS_SET
+
+    def _invoke_raise_load(self, state, passive, lazy):
+        raise sa_exc.InvalidRequestError(
+            "'%s' is not available due to raiseload=True" % (self,)
+        )
+
+
+class LoadDeferredColumns:
+    """serializable loader object used by DeferredColumnLoader"""
+
+    def __init__(self, key: str, raiseload: bool = False):
+        self.key = key
+        self.raiseload = raiseload
+
+    def __call__(self, state, passive=attributes.PASSIVE_OFF):
+        key = self.key
+
+        localparent = state.manager.mapper
+        prop = localparent._props[key]
+        if self.raiseload:
+            strategy_key = (
+                ("deferred", True),
+                ("instrument", True),
+                ("raiseload", True),
+            )
+        else:
+            strategy_key = (("deferred", True), ("instrument", True))
+        strategy = prop._get_strategy(strategy_key)
+        return strategy._load_for_state(state, passive)
+
+
+class AbstractRelationshipLoader(LoaderStrategy):
+    """LoaderStratgies which deal with related objects."""
+
+    __slots__ = "mapper", "target", "uselist", "entity"
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.mapper = self.parent_property.mapper
+        self.entity = self.parent_property.entity
+        self.target = self.parent_property.target
+        self.uselist = self.parent_property.uselist
+
+    def _immediateload_create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        return self.parent_property._get_strategy(
+            (("lazy", "immediate"),)
+        ).create_row_processor(
+            context,
+            query_entity,
+            path,
+            loadopt,
+            mapper,
+            result,
+            adapter,
+            populators,
+        )
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(do_nothing=True)
+class DoNothingLoader(LoaderStrategy):
+    """Relationship loader that makes no change to the object's state.
+
+    Compared to NoLoader, this loader does not initialize the
+    collection/attribute to empty/none; the usual default LazyLoader will
+    take effect.
+
+    """
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy="noload")
+@relationships.RelationshipProperty.strategy_for(lazy=None)
+class NoLoader(AbstractRelationshipLoader):
+    """Provide loading behavior for a :class:`.Relationship`
+    with "lazy=None".
+
+    """
+
+    __slots__ = ()
+
+    def init_class_attribute(self, mapper):
+        self.is_class_level = True
+
+        _register_attribute(
+            self.parent_property,
+            mapper,
+            useobject=True,
+            typecallable=self.parent_property.collection_class,
+        )
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        def invoke_no_load(state, dict_, row):
+            if self.uselist:
+                attributes.init_state_collection(state, dict_, self.key)
+            else:
+                dict_[self.key] = None
+
+        populators["new"].append((self.key, invoke_no_load))
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy=True)
+@relationships.RelationshipProperty.strategy_for(lazy="select")
+@relationships.RelationshipProperty.strategy_for(lazy="raise")
+@relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql")
+@relationships.RelationshipProperty.strategy_for(lazy="baked_select")
+class LazyLoader(
+    AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
+):
+    """Provide loading behavior for a :class:`.Relationship`
+    with "lazy=True", that is loads when first accessed.
+
+    """
+
+    __slots__ = (
+        "_lazywhere",
+        "_rev_lazywhere",
+        "_lazyload_reverse_option",
+        "_order_by",
+        "use_get",
+        "is_aliased_class",
+        "_bind_to_col",
+        "_equated_columns",
+        "_rev_bind_to_col",
+        "_rev_equated_columns",
+        "_simple_lazy_clause",
+        "_raise_always",
+        "_raise_on_sql",
+    )
+
+    _lazywhere: ColumnElement[bool]
+    _bind_to_col: Dict[str, ColumnElement[Any]]
+    _rev_lazywhere: ColumnElement[bool]
+    _rev_bind_to_col: Dict[str, ColumnElement[Any]]
+
+    parent_property: RelationshipProperty[Any]
+
+    def __init__(
+        self, parent: RelationshipProperty[Any], strategy_key: Tuple[Any, ...]
+    ):
+        super().__init__(parent, strategy_key)
+        self._raise_always = self.strategy_opts["lazy"] == "raise"
+        self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
+
+        self.is_aliased_class = inspect(self.entity).is_aliased_class
+
+        join_condition = self.parent_property._join_condition
+        (
+            self._lazywhere,
+            self._bind_to_col,
+            self._equated_columns,
+        ) = join_condition.create_lazy_clause()
+
+        (
+            self._rev_lazywhere,
+            self._rev_bind_to_col,
+            self._rev_equated_columns,
+        ) = join_condition.create_lazy_clause(reverse_direction=True)
+
+        if self.parent_property.order_by:
+            self._order_by = [
+                sql_util._deep_annotate(elem, {"_orm_adapt": True})
+                for elem in util.to_list(self.parent_property.order_by)
+            ]
+        else:
+            self._order_by = None
+
+        self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
+
+        # determine if our "lazywhere" clause is the same as the mapper's
+        # get() clause.  then we can just use mapper.get()
+        #
+        # TODO: the "not self.uselist" can be taken out entirely; a m2o
+        # load that populates for a list (very unusual, but is possible with
+        # the API) can still set for "None" and the attribute system will
+        # populate as an empty list.
+        self.use_get = (
+            not self.is_aliased_class
+            and not self.uselist
+            and self.entity._get_clause[0].compare(
+                self._lazywhere,
+                use_proxies=True,
+                compare_keys=False,
+                equivalents=self.mapper._equivalent_columns,
+            )
+        )
+
+        if self.use_get:
+            for col in list(self._equated_columns):
+                if col in self.mapper._equivalent_columns:
+                    for c in self.mapper._equivalent_columns[col]:
+                        self._equated_columns[c] = self._equated_columns[col]
+
+            self.logger.info(
+                "%s will use Session.get() to optimize instance loads", self
+            )
+
+    def init_class_attribute(self, mapper):
+        self.is_class_level = True
+
+        _legacy_inactive_history_style = (
+            self.parent_property._legacy_inactive_history_style
+        )
+
+        if self.parent_property.active_history:
+            active_history = True
+            _deferred_history = False
+
+        elif (
+            self.parent_property.direction is not interfaces.MANYTOONE
+            or not self.use_get
+        ):
+            if _legacy_inactive_history_style:
+                active_history = True
+                _deferred_history = False
+            else:
+                active_history = False
+                _deferred_history = True
+        else:
+            active_history = _deferred_history = False
+
+        _register_attribute(
+            self.parent_property,
+            mapper,
+            useobject=True,
+            callable_=self._load_for_state,
+            typecallable=self.parent_property.collection_class,
+            active_history=active_history,
+            _deferred_history=_deferred_history,
+        )
+
+    def _memoized_attr__simple_lazy_clause(self):
+        lazywhere = sql_util._deep_annotate(
+            self._lazywhere, {"_orm_adapt": True}
+        )
+
+        criterion, bind_to_col = (lazywhere, self._bind_to_col)
+
+        params = []
+
+        def visit_bindparam(bindparam):
+            bindparam.unique = False
+
+        visitors.traverse(criterion, {}, {"bindparam": visit_bindparam})
+
+        def visit_bindparam(bindparam):
+            if bindparam._identifying_key in bind_to_col:
+                params.append(
+                    (
+                        bindparam.key,
+                        bind_to_col[bindparam._identifying_key],
+                        None,
+                    )
+                )
+            elif bindparam.callable is None:
+                params.append((bindparam.key, None, bindparam.value))
+
+        criterion = visitors.cloned_traverse(
+            criterion, {}, {"bindparam": visit_bindparam}
+        )
+
+        return criterion, params
+
+    def _generate_lazy_clause(self, state, passive):
+        criterion, param_keys = self._simple_lazy_clause
+
+        if state is None:
+            return sql_util.adapt_criterion_to_null(
+                criterion, [key for key, ident, value in param_keys]
+            )
+
+        mapper = self.parent_property.parent
+
+        o = state.obj()  # strong ref
+        dict_ = attributes.instance_dict(o)
+
+        if passive & PassiveFlag.INIT_OK:
+            passive ^= PassiveFlag.INIT_OK
+
+        params = {}
+        for key, ident, value in param_keys:
+            if ident is not None:
+                if passive and passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
+                    value = mapper._get_committed_state_attr_by_column(
+                        state, dict_, ident, passive
+                    )
+                else:
+                    value = mapper._get_state_attr_by_column(
+                        state, dict_, ident, passive
+                    )
+
+            params[key] = value
+
+        return criterion, params
+
+    def _invoke_raise_load(self, state, passive, lazy):
+        raise sa_exc.InvalidRequestError(
+            "'%s' is not available due to lazy='%s'" % (self, lazy)
+        )
+
+    def _load_for_state(
+        self,
+        state,
+        passive,
+        loadopt=None,
+        extra_criteria=(),
+        extra_options=(),
+        alternate_effective_path=None,
+        execution_options=util.EMPTY_DICT,
+    ):
+        if not state.key and (
+            (
+                not self.parent_property.load_on_pending
+                and not state._load_pending
+            )
+            or not state.session_id
+        ):
+            return LoaderCallableStatus.ATTR_EMPTY
+
+        pending = not state.key
+        primary_key_identity = None
+
+        use_get = self.use_get and (not loadopt or not loadopt._extra_criteria)
+
+        if (not passive & PassiveFlag.SQL_OK and not use_get) or (
+            not passive & attributes.NON_PERSISTENT_OK and pending
+        ):
+            return LoaderCallableStatus.PASSIVE_NO_RESULT
+
+        if (
+            # we were given lazy="raise"
+            self._raise_always
+            # the no_raise history-related flag was not passed
+            and not passive & PassiveFlag.NO_RAISE
+            and (
+                # if we are use_get and related_object_ok is disabled,
+                # which means we are at most looking in the identity map
+                # for history purposes or otherwise returning
+                # PASSIVE_NO_RESULT, don't raise.  This is also a
+                # history-related flag
+                not use_get
+                or passive & PassiveFlag.RELATED_OBJECT_OK
+            )
+        ):
+            self._invoke_raise_load(state, passive, "raise")
+
+        session = _state_session(state)
+        if not session:
+            if passive & PassiveFlag.NO_RAISE:
+                return LoaderCallableStatus.PASSIVE_NO_RESULT
+
+            raise orm_exc.DetachedInstanceError(
+                "Parent instance %s is not bound to a Session; "
+                "lazy load operation of attribute '%s' cannot proceed"
+                % (orm_util.state_str(state), self.key)
+            )
+
+        # if we have a simple primary key load, check the
+        # identity map without generating a Query at all
+        if use_get:
+            primary_key_identity = self._get_ident_for_use_get(
+                session, state, passive
+            )
+            if LoaderCallableStatus.PASSIVE_NO_RESULT in primary_key_identity:
+                return LoaderCallableStatus.PASSIVE_NO_RESULT
+            elif LoaderCallableStatus.NEVER_SET in primary_key_identity:
+                return LoaderCallableStatus.NEVER_SET
+
+            # test for None alone in primary_key_identity based on
+            # allow_partial_pks preference.   PASSIVE_NO_RESULT and NEVER_SET
+            # have already been tested above
+            if not self.mapper.allow_partial_pks:
+                if _none_only_set.intersection(primary_key_identity):
+                    return None
+            else:
+                if _none_only_set.issuperset(primary_key_identity):
+                    return None
+
+            if (
+                self.key in state.dict
+                and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
+            ):
+                return LoaderCallableStatus.ATTR_WAS_SET
+
+            # look for this identity in the identity map.  Delegate to the
+            # Query class in use, as it may have special rules for how it
+            # does this, including how it decides what the correct
+            # identity_token would be for this identity.
+
+            instance = session._identity_lookup(
+                self.entity,
+                primary_key_identity,
+                passive=passive,
+                lazy_loaded_from=state,
+            )
+
+            if instance is not None:
+                if instance is LoaderCallableStatus.PASSIVE_CLASS_MISMATCH:
+                    return None
+                else:
+                    return instance
+            elif (
+                not passive & PassiveFlag.SQL_OK
+                or not passive & PassiveFlag.RELATED_OBJECT_OK
+            ):
+                return LoaderCallableStatus.PASSIVE_NO_RESULT
+
+        return self._emit_lazyload(
+            session,
+            state,
+            primary_key_identity,
+            passive,
+            loadopt,
+            extra_criteria,
+            extra_options,
+            alternate_effective_path,
+            execution_options,
+        )
+
+    def _get_ident_for_use_get(self, session, state, passive):
+        instance_mapper = state.manager.mapper
+
+        if passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
+            get_attr = instance_mapper._get_committed_state_attr_by_column
+        else:
+            get_attr = instance_mapper._get_state_attr_by_column
+
+        dict_ = state.dict
+
+        return [
+            get_attr(state, dict_, self._equated_columns[pk], passive=passive)
+            for pk in self.mapper.primary_key
+        ]
+
+    @util.preload_module("sqlalchemy.orm.strategy_options")
+    def _emit_lazyload(
+        self,
+        session,
+        state,
+        primary_key_identity,
+        passive,
+        loadopt,
+        extra_criteria,
+        extra_options,
+        alternate_effective_path,
+        execution_options,
+    ):
+        strategy_options = util.preloaded.orm_strategy_options
+
+        clauseelement = self.entity.__clause_element__()
+        stmt = Select._create_raw_select(
+            _raw_columns=[clauseelement],
+            _propagate_attrs=clauseelement._propagate_attrs,
+            _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
+            _compile_options=ORMCompileState.default_compile_options,
+        )
+        load_options = QueryContext.default_load_options
+
+        load_options += {
+            "_invoke_all_eagers": False,
+            "_lazy_loaded_from": state,
+        }
+
+        if self.parent_property.secondary is not None:
+            stmt = stmt.select_from(
+                self.mapper, self.parent_property.secondary
+            )
+
+        pending = not state.key
+
+        # don't autoflush on pending
+        if pending or passive & attributes.NO_AUTOFLUSH:
+            stmt._execution_options = util.immutabledict({"autoflush": False})
+
+        use_get = self.use_get
+
+        if state.load_options or (loadopt and loadopt._extra_criteria):
+            if alternate_effective_path is None:
+                effective_path = state.load_path[self.parent_property]
+            else:
+                effective_path = alternate_effective_path[self.parent_property]
+
+            opts = state.load_options
+
+            if loadopt and loadopt._extra_criteria:
+                use_get = False
+                opts += (
+                    orm_util.LoaderCriteriaOption(self.entity, extra_criteria),
+                )
+
+            stmt._with_options = opts
+        elif alternate_effective_path is None:
+            # this path is used if there are not already any options
+            # in the query, but an event may want to add them
+            effective_path = state.mapper._path_registry[self.parent_property]
+        else:
+            # added by immediateloader
+            effective_path = alternate_effective_path[self.parent_property]
+
+        if extra_options:
+            stmt._with_options += extra_options
+
+        stmt._compile_options += {"_current_path": effective_path}
+
+        if use_get:
+            if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
+                self._invoke_raise_load(state, passive, "raise_on_sql")
+
+            return loading.load_on_pk_identity(
+                session,
+                stmt,
+                primary_key_identity,
+                load_options=load_options,
+                execution_options=execution_options,
+            )
+
+        if self._order_by:
+            stmt._order_by_clauses = self._order_by
+
+        def _lazyload_reverse(compile_context):
+            for rev in self.parent_property._reverse_property:
+                # reverse props that are MANYTOONE are loading *this*
+                # object from get(), so don't need to eager out to those.
+                if (
+                    rev.direction is interfaces.MANYTOONE
+                    and rev._use_get
+                    and not isinstance(rev.strategy, LazyLoader)
+                ):
+                    strategy_options.Load._construct_for_existing_path(
+                        compile_context.compile_options._current_path[
+                            rev.parent
+                        ]
+                    ).lazyload(rev).process_compile_state(compile_context)
+
+        stmt._with_context_options += (
+            (_lazyload_reverse, self.parent_property),
+        )
+
+        lazy_clause, params = self._generate_lazy_clause(state, passive)
+
+        if execution_options:
+            execution_options = util.EMPTY_DICT.merge_with(
+                execution_options,
+                {
+                    "_sa_orm_load_options": load_options,
+                },
+            )
+        else:
+            execution_options = {
+                "_sa_orm_load_options": load_options,
+            }
+
+        if (
+            self.key in state.dict
+            and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
+        ):
+            return LoaderCallableStatus.ATTR_WAS_SET
+
+        if pending:
+            if util.has_intersection(orm_util._none_set, params.values()):
+                return None
+
+        elif util.has_intersection(orm_util._never_set, params.values()):
+            return None
+
+        if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
+            self._invoke_raise_load(state, passive, "raise_on_sql")
+
+        stmt._where_criteria = (lazy_clause,)
+
+        result = session.execute(
+            stmt, params, execution_options=execution_options
+        )
+
+        result = result.unique().scalars().all()
+
+        if self.uselist:
+            return result
+        else:
+            l = len(result)
+            if l:
+                if l > 1:
+                    util.warn(
+                        "Multiple rows returned with "
+                        "uselist=False for lazily-loaded attribute '%s' "
+                        % self.parent_property
+                    )
+
+                return result[0]
+            else:
+                return None
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        key = self.key
+
+        if (
+            context.load_options._is_user_refresh
+            and context.query._compile_options._only_load_props
+            and self.key in context.query._compile_options._only_load_props
+        ):
+            return self._immediateload_create_row_processor(
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
+            )
+
+        if not self.is_class_level or (loadopt and loadopt._extra_criteria):
+            # we are not the primary manager for this attribute
+            # on this class - set up a
+            # per-instance lazyloader, which will override the
+            # class-level behavior.
+            # this currently only happens when using a
+            # "lazyload" option on a "no load"
+            # attribute - "eager" attributes always have a
+            # class-level lazyloader installed.
+            set_lazy_callable = (
+                InstanceState._instance_level_callable_processor
+            )(
+                mapper.class_manager,
+                LoadLazyAttribute(
+                    key,
+                    self,
+                    loadopt,
+                    (
+                        loadopt._generate_extra_criteria(context)
+                        if loadopt._extra_criteria
+                        else None
+                    ),
+                ),
+                key,
+            )
+
+            populators["new"].append((self.key, set_lazy_callable))
+        elif context.populate_existing or mapper.always_refresh:
+
+            def reset_for_lazy_callable(state, dict_, row):
+                # we are the primary manager for this attribute on
+                # this class - reset its
+                # per-instance attribute state, so that the class-level
+                # lazy loader is
+                # executed when next referenced on this instance.
+                # this is needed in
+                # populate_existing() types of scenarios to reset
+                # any existing state.
+                state._reset(dict_, key)
+
+            populators["new"].append((self.key, reset_for_lazy_callable))
+
+
+class LoadLazyAttribute:
+    """semi-serializable loader object used by LazyLoader
+
+    Historically, this object would be carried along with instances that
+    needed to run lazyloaders, so it had to be serializable to support
+    cached instances.
+
+    this is no longer a general requirement, and the case where this object
+    is used is exactly the case where we can't really serialize easily,
+    which is when extra criteria in the loader option is present.
+
+    We can't reliably serialize that as it refers to mapped entities and
+    AliasedClass objects that are local to the current process, which would
+    need to be matched up on deserialize e.g. the sqlalchemy.ext.serializer
+    approach.
+
+    """
+
+    def __init__(self, key, initiating_strategy, loadopt, extra_criteria):
+        self.key = key
+        self.strategy_key = initiating_strategy.strategy_key
+        self.loadopt = loadopt
+        self.extra_criteria = extra_criteria
+
+    def __getstate__(self):
+        if self.extra_criteria is not None:
+            util.warn(
+                "Can't reliably serialize a lazyload() option that "
+                "contains additional criteria; please use eager loading "
+                "for this case"
+            )
+        return {
+            "key": self.key,
+            "strategy_key": self.strategy_key,
+            "loadopt": self.loadopt,
+            "extra_criteria": (),
+        }
+
+    def __call__(self, state, passive=attributes.PASSIVE_OFF):
+        key = self.key
+        instance_mapper = state.manager.mapper
+        prop = instance_mapper._props[key]
+        strategy = prop._strategies[self.strategy_key]
+
+        return strategy._load_for_state(
+            state,
+            passive,
+            loadopt=self.loadopt,
+            extra_criteria=self.extra_criteria,
+        )
+
+
+class PostLoader(AbstractRelationshipLoader):
+    """A relationship loader that emits a second SELECT statement."""
+
+    __slots__ = ()
+
+    def _setup_for_recursion(self, context, path, loadopt, join_depth=None):
+        effective_path = (
+            context.compile_state.current_path or orm_util.PathRegistry.root
+        ) + path
+
+        top_level_context = context._get_top_level_context()
+        execution_options = util.immutabledict(
+            {"sa_top_level_orm_context": top_level_context}
+        )
+
+        if loadopt:
+            recursion_depth = loadopt.local_opts.get("recursion_depth", None)
+            unlimited_recursion = recursion_depth == -1
+        else:
+            recursion_depth = None
+            unlimited_recursion = False
+
+        if recursion_depth is not None:
+            if not self.parent_property._is_self_referential:
+                raise sa_exc.InvalidRequestError(
+                    f"recursion_depth option on relationship "
+                    f"{self.parent_property} not valid for "
+                    "non-self-referential relationship"
+                )
+            recursion_depth = context.execution_options.get(
+                f"_recursion_depth_{id(self)}", recursion_depth
+            )
+
+            if not unlimited_recursion and recursion_depth < 0:
+                return (
+                    effective_path,
+                    False,
+                    execution_options,
+                    recursion_depth,
+                )
+
+            if not unlimited_recursion:
+                execution_options = execution_options.union(
+                    {
+                        f"_recursion_depth_{id(self)}": recursion_depth - 1,
+                    }
+                )
+
+        if loading.PostLoad.path_exists(
+            context, effective_path, self.parent_property
+        ):
+            return effective_path, False, execution_options, recursion_depth
+
+        path_w_prop = path[self.parent_property]
+        effective_path_w_prop = effective_path[self.parent_property]
+
+        if not path_w_prop.contains(context.attributes, "loader"):
+            if join_depth:
+                if effective_path_w_prop.length / 2 > join_depth:
+                    return (
+                        effective_path,
+                        False,
+                        execution_options,
+                        recursion_depth,
+                    )
+            elif effective_path_w_prop.contains_mapper(self.mapper):
+                return (
+                    effective_path,
+                    False,
+                    execution_options,
+                    recursion_depth,
+                )
+
+        return effective_path, True, execution_options, recursion_depth
+
+
+@relationships.RelationshipProperty.strategy_for(lazy="immediate")
+class ImmediateLoader(PostLoader):
+    __slots__ = ("join_depth",)
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.join_depth = self.parent_property.join_depth
+
+    def init_class_attribute(self, mapper):
+        self.parent_property._get_strategy(
+            (("lazy", "select"),)
+        ).init_class_attribute(mapper)
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        if not context.compile_state.compile_options._enable_eagerloads:
+            return
+
+        (
+            effective_path,
+            run_loader,
+            execution_options,
+            recursion_depth,
+        ) = self._setup_for_recursion(context, path, loadopt, self.join_depth)
+
+        if not run_loader:
+            # this will not emit SQL and will only emit for a many-to-one
+            # "use get" load.   the "_RELATED" part means it may return
+            # instance even if its expired, since this is a mutually-recursive
+            # load operation.
+            flags = attributes.PASSIVE_NO_FETCH_RELATED | PassiveFlag.NO_RAISE
+        else:
+            flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE
+
+        loading.PostLoad.callable_for_path(
+            context,
+            effective_path,
+            self.parent,
+            self.parent_property,
+            self._load_for_path,
+            loadopt,
+            flags,
+            recursion_depth,
+            execution_options,
+        )
+
+    def _load_for_path(
+        self,
+        context,
+        path,
+        states,
+        load_only,
+        loadopt,
+        flags,
+        recursion_depth,
+        execution_options,
+    ):
+        if recursion_depth:
+            new_opt = Load(loadopt.path.entity)
+            new_opt.context = (
+                loadopt,
+                loadopt._recurse(),
+            )
+            alternate_effective_path = path._truncate_recursive()
+            extra_options = (new_opt,)
+        else:
+            new_opt = None
+            alternate_effective_path = path
+            extra_options = ()
+
+        key = self.key
+        lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
+        for state, overwrite in states:
+            dict_ = state.dict
+
+            if overwrite or key not in dict_:
+                value = lazyloader._load_for_state(
+                    state,
+                    flags,
+                    extra_options=extra_options,
+                    alternate_effective_path=alternate_effective_path,
+                    execution_options=execution_options,
+                )
+                if value not in (
+                    ATTR_WAS_SET,
+                    LoaderCallableStatus.PASSIVE_NO_RESULT,
+                ):
+                    state.get_impl(key).set_committed_value(
+                        state, dict_, value
+                    )
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy="subquery")
+class SubqueryLoader(PostLoader):
+    __slots__ = ("join_depth",)
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.join_depth = self.parent_property.join_depth
+
+    def init_class_attribute(self, mapper):
+        self.parent_property._get_strategy(
+            (("lazy", "select"),)
+        ).init_class_attribute(mapper)
+
+    def _get_leftmost(
+        self,
+        orig_query_entity_index,
+        subq_path,
+        current_compile_state,
+        is_root,
+    ):
+        given_subq_path = subq_path
+        subq_path = subq_path.path
+        subq_mapper = orm_util._class_to_mapper(subq_path[0])
+
+        # determine attributes of the leftmost mapper
+        if (
+            self.parent.isa(subq_mapper)
+            and self.parent_property is subq_path[1]
+        ):
+            leftmost_mapper, leftmost_prop = self.parent, self.parent_property
+        else:
+            leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1]
+
+        if is_root:
+            # the subq_path is also coming from cached state, so when we start
+            # building up this path, it has to also be converted to be in terms
+            # of the current state. this is for the specific case of the entity
+            # is an AliasedClass against a subquery that's not otherwise going
+            # to adapt
+            new_subq_path = current_compile_state._entities[
+                orig_query_entity_index
+            ].entity_zero._path_registry[leftmost_prop]
+            additional = len(subq_path) - len(new_subq_path)
+            if additional:
+                new_subq_path += path_registry.PathRegistry.coerce(
+                    subq_path[-additional:]
+                )
+        else:
+            new_subq_path = given_subq_path
+
+        leftmost_cols = leftmost_prop.local_columns
+
+        leftmost_attr = [
+            getattr(
+                new_subq_path.path[0].entity,
+                leftmost_mapper._columntoproperty[c].key,
+            )
+            for c in leftmost_cols
+        ]
+
+        return leftmost_mapper, leftmost_attr, leftmost_prop, new_subq_path
+
+    def _generate_from_original_query(
+        self,
+        orig_compile_state,
+        orig_query,
+        leftmost_mapper,
+        leftmost_attr,
+        leftmost_relationship,
+        orig_entity,
+    ):
+        # reformat the original query
+        # to look only for significant columns
+        q = orig_query._clone().correlate(None)
+
+        # LEGACY: make a Query back from the select() !!
+        # This suits at least two legacy cases:
+        # 1. applications which expect before_compile() to be called
+        #    below when we run .subquery() on this query (Keystone)
+        # 2. applications which are doing subqueryload with complex
+        #    from_self() queries, as query.subquery() / .statement
+        #    has to do the full compile context for multiply-nested
+        #    from_self() (Neutron) - see test_subqload_from_self
+        #    for demo.
+        q2 = query.Query.__new__(query.Query)
+        q2.__dict__.update(q.__dict__)
+        q = q2
+
+        # set the query's "FROM" list explicitly to what the
+        # FROM list would be in any case, as we will be limiting
+        # the columns in the SELECT list which may no longer include
+        # all entities mentioned in things like WHERE, JOIN, etc.
+        if not q._from_obj:
+            q._enable_assertions = False
+            q.select_from.non_generative(
+                q,
+                *{
+                    ent["entity"]
+                    for ent in _column_descriptions(
+                        orig_query, compile_state=orig_compile_state
+                    )
+                    if ent["entity"] is not None
+                },
+            )
+
+        # select from the identity columns of the outer (specifically, these
+        # are the 'local_cols' of the property).  This will remove other
+        # columns from the query that might suggest the right entity which is
+        # why we do set select_from above.   The attributes we have are
+        # coerced and adapted using the original query's adapter, which is
+        # needed only for the case of adapting a subclass column to
+        # that of a polymorphic selectable, e.g. we have
+        # Engineer.primary_language and the entity is Person.  All other
+        # adaptations, e.g. from_self, select_entity_from(), will occur
+        # within the new query when it compiles, as the compile_state we are
+        # using here is only a partial one.  If the subqueryload is from a
+        # with_polymorphic() or other aliased() object, left_attr will already
+        # be the correct attributes so no adaptation is needed.
+        target_cols = orig_compile_state._adapt_col_list(
+            [
+                sql.coercions.expect(sql.roles.ColumnsClauseRole, o)
+                for o in leftmost_attr
+            ],
+            orig_compile_state._get_current_adapter(),
+        )
+        q._raw_columns = target_cols
+
+        distinct_target_key = leftmost_relationship.distinct_target_key
+
+        if distinct_target_key is True:
+            q._distinct = True
+        elif distinct_target_key is None:
+            # if target_cols refer to a non-primary key or only
+            # part of a composite primary key, set the q as distinct
+            for t in {c.table for c in target_cols}:
+                if not set(target_cols).issuperset(t.primary_key):
+                    q._distinct = True
+                    break
+
+        # don't need ORDER BY if no limit/offset
+        if not q._has_row_limiting_clause:
+            q._order_by_clauses = ()
+
+        if q._distinct is True and q._order_by_clauses:
+            # the logic to automatically add the order by columns to the query
+            # when distinct is True is deprecated in the query
+            to_add = sql_util.expand_column_list_from_order_by(
+                target_cols, q._order_by_clauses
+            )
+            if to_add:
+                q._set_entities(target_cols + to_add)
+
+        # the original query now becomes a subquery
+        # which we'll join onto.
+        # LEGACY: as "q" is a Query, the before_compile() event is invoked
+        # here.
+        embed_q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).subquery()
+        left_alias = orm_util.AliasedClass(
+            leftmost_mapper, embed_q, use_mapper_path=True
+        )
+        return left_alias
+
+    def _prep_for_joins(self, left_alias, subq_path):
+        # figure out what's being joined.  a.k.a. the fun part
+        to_join = []
+        pairs = list(subq_path.pairs())
+
+        for i, (mapper, prop) in enumerate(pairs):
+            if i > 0:
+                # look at the previous mapper in the chain -
+                # if it is as or more specific than this prop's
+                # mapper, use that instead.
+                # note we have an assumption here that
+                # the non-first element is always going to be a mapper,
+                # not an AliasedClass
+
+                prev_mapper = pairs[i - 1][1].mapper
+                to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
+            else:
+                to_append = mapper
+
+            to_join.append((to_append, prop.key))
+
+        # determine the immediate parent class we are joining from,
+        # which needs to be aliased.
+
+        if len(to_join) < 2:
+            # in the case of a one level eager load, this is the
+            # leftmost "left_alias".
+            parent_alias = left_alias
+        else:
+            info = inspect(to_join[-1][0])
+            if info.is_aliased_class:
+                parent_alias = info.entity
+            else:
+                # alias a plain mapper as we may be
+                # joining multiple times
+                parent_alias = orm_util.AliasedClass(
+                    info.entity, use_mapper_path=True
+                )
+
+        local_cols = self.parent_property.local_columns
+
+        local_attr = [
+            getattr(parent_alias, self.parent._columntoproperty[c].key)
+            for c in local_cols
+        ]
+        return to_join, local_attr, parent_alias
+
+    def _apply_joins(
+        self, q, to_join, left_alias, parent_alias, effective_entity
+    ):
+        ltj = len(to_join)
+        if ltj == 1:
+            to_join = [
+                getattr(left_alias, to_join[0][1]).of_type(effective_entity)
+            ]
+        elif ltj == 2:
+            to_join = [
+                getattr(left_alias, to_join[0][1]).of_type(parent_alias),
+                getattr(parent_alias, to_join[-1][1]).of_type(
+                    effective_entity
+                ),
+            ]
+        elif ltj > 2:
+            middle = [
+                (
+                    (
+                        orm_util.AliasedClass(item[0])
+                        if not inspect(item[0]).is_aliased_class
+                        else item[0].entity
+                    ),
+                    item[1],
+                )
+                for item in to_join[1:-1]
+            ]
+            inner = []
+
+            while middle:
+                item = middle.pop(0)
+                attr = getattr(item[0], item[1])
+                if middle:
+                    attr = attr.of_type(middle[0][0])
+                else:
+                    attr = attr.of_type(parent_alias)
+
+                inner.append(attr)
+
+            to_join = (
+                [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)]
+                + inner
+                + [
+                    getattr(parent_alias, to_join[-1][1]).of_type(
+                        effective_entity
+                    )
+                ]
+            )
+
+        for attr in to_join:
+            q = q.join(attr)
+
+        return q
+
+    def _setup_options(
+        self,
+        context,
+        q,
+        subq_path,
+        rewritten_path,
+        orig_query,
+        effective_entity,
+        loadopt,
+    ):
+        # note that because the subqueryload object
+        # does not re-use the cached query, instead always making
+        # use of the current invoked query, while we have two queries
+        # here (orig and context.query), they are both non-cached
+        # queries and we can transfer the options as is without
+        # adjusting for new criteria.   Some work on #6881 / #6889
+        # brought this into question.
+        new_options = orig_query._with_options
+
+        if loadopt and loadopt._extra_criteria:
+            new_options += (
+                orm_util.LoaderCriteriaOption(
+                    self.entity,
+                    loadopt._generate_extra_criteria(context),
+                ),
+            )
+
+        # propagate loader options etc. to the new query.
+        # these will fire relative to subq_path.
+        q = q._with_current_path(rewritten_path)
+        q = q.options(*new_options)
+
+        return q
+
+    def _setup_outermost_orderby(self, q):
+        if self.parent_property.order_by:
+
+            def _setup_outermost_orderby(compile_context):
+                compile_context.eager_order_by += tuple(
+                    util.to_list(self.parent_property.order_by)
+                )
+
+            q = q._add_context_option(
+                _setup_outermost_orderby, self.parent_property
+            )
+
+        return q
+
+    class _SubqCollections:
+        """Given a :class:`_query.Query` used to emit the "subquery load",
+        provide a load interface that executes the query at the
+        first moment a value is needed.
+
+        """
+
+        __slots__ = (
+            "session",
+            "execution_options",
+            "load_options",
+            "params",
+            "subq",
+            "_data",
+        )
+
+        def __init__(self, context, subq):
+            # avoid creating a cycle by storing context
+            # even though that's preferable
+            self.session = context.session
+            self.execution_options = context.execution_options
+            self.load_options = context.load_options
+            self.params = context.params or {}
+            self.subq = subq
+            self._data = None
+
+        def get(self, key, default):
+            if self._data is None:
+                self._load()
+            return self._data.get(key, default)
+
+        def _load(self):
+            self._data = collections.defaultdict(list)
+
+            q = self.subq
+            assert q.session is None
+
+            q = q.with_session(self.session)
+
+            if self.load_options._populate_existing:
+                q = q.populate_existing()
+            # to work with baked query, the parameters may have been
+            # updated since this query was created, so take these into account
+
+            rows = list(q.params(self.params))
+            for k, v in itertools.groupby(rows, lambda x: x[1:]):
+                self._data[k].extend(vv[0] for vv in v)
+
+        def loader(self, state, dict_, row):
+            if self._data is None:
+                self._load()
+
+    def _setup_query_from_rowproc(
+        self,
+        context,
+        query_entity,
+        path,
+        entity,
+        loadopt,
+        adapter,
+    ):
+        compile_state = context.compile_state
+        if (
+            not compile_state.compile_options._enable_eagerloads
+            or compile_state.compile_options._for_refresh_state
+        ):
+            return
+
+        orig_query_entity_index = compile_state._entities.index(query_entity)
+        context.loaders_require_buffering = True
+
+        path = path[self.parent_property]
+
+        # build up a path indicating the path from the leftmost
+        # entity to the thing we're subquery loading.
+        with_poly_entity = path.get(
+            compile_state.attributes, "path_with_polymorphic", None
+        )
+        if with_poly_entity is not None:
+            effective_entity = with_poly_entity
+        else:
+            effective_entity = self.entity
+
+        subq_path, rewritten_path = context.query._execution_options.get(
+            ("subquery_paths", None),
+            (orm_util.PathRegistry.root, orm_util.PathRegistry.root),
+        )
+        is_root = subq_path is orm_util.PathRegistry.root
+        subq_path = subq_path + path
+        rewritten_path = rewritten_path + path
+
+        # use the current query being invoked, not the compile state
+        # one.  this is so that we get the current parameters.  however,
+        # it means we can't use the existing compile state, we have to make
+        # a new one.    other approaches include possibly using the
+        # compiled query but swapping the params, seems only marginally
+        # less time spent but more complicated
+        orig_query = context.query._execution_options.get(
+            ("orig_query", SubqueryLoader), context.query
+        )
+
+        # make a new compile_state for the query that's probably cached, but
+        # we're sort of undoing a bit of that caching :(
+        compile_state_cls = ORMCompileState._get_plugin_class_for_plugin(
+            orig_query, "orm"
+        )
+
+        if orig_query._is_lambda_element:
+            if context.load_options._lazy_loaded_from is None:
+                util.warn(
+                    'subqueryloader for "%s" must invoke lambda callable '
+                    "at %r in "
+                    "order to produce a new query, decreasing the efficiency "
+                    "of caching for this statement.  Consider using "
+                    "selectinload() for more effective full-lambda caching"
+                    % (self, orig_query)
+                )
+            orig_query = orig_query._resolved
+
+        # this is the more "quick" version, however it's not clear how
+        # much of this we need.    in particular I can't get a test to
+        # fail if the "set_base_alias" is missing and not sure why that is.
+        orig_compile_state = compile_state_cls._create_entities_collection(
+            orig_query, legacy=False
+        )
+
+        (
+            leftmost_mapper,
+            leftmost_attr,
+            leftmost_relationship,
+            rewritten_path,
+        ) = self._get_leftmost(
+            orig_query_entity_index,
+            rewritten_path,
+            orig_compile_state,
+            is_root,
+        )
+
+        # generate a new Query from the original, then
+        # produce a subquery from it.
+        left_alias = self._generate_from_original_query(
+            orig_compile_state,
+            orig_query,
+            leftmost_mapper,
+            leftmost_attr,
+            leftmost_relationship,
+            entity,
+        )
+
+        # generate another Query that will join the
+        # left alias to the target relationships.
+        # basically doing a longhand
+        # "from_self()".  (from_self() itself not quite industrial
+        # strength enough for all contingencies...but very close)
+
+        q = query.Query(effective_entity)
+
+        q._execution_options = context.query._execution_options.merge_with(
+            context.execution_options,
+            {
+                ("orig_query", SubqueryLoader): orig_query,
+                ("subquery_paths", None): (subq_path, rewritten_path),
+            },
+        )
+
+        q = q._set_enable_single_crit(False)
+        to_join, local_attr, parent_alias = self._prep_for_joins(
+            left_alias, subq_path
+        )
+
+        q = q.add_columns(*local_attr)
+        q = self._apply_joins(
+            q, to_join, left_alias, parent_alias, effective_entity
+        )
+
+        q = self._setup_options(
+            context,
+            q,
+            subq_path,
+            rewritten_path,
+            orig_query,
+            effective_entity,
+            loadopt,
+        )
+        q = self._setup_outermost_orderby(q)
+
+        return q
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        if (
+            loadopt
+            and context.compile_state.statement is not None
+            and context.compile_state.statement.is_dml
+        ):
+            util.warn_deprecated(
+                "The subqueryload loader option is not compatible with DML "
+                "statements such as INSERT, UPDATE.  Only SELECT may be used."
+                "This warning will become an exception in a future release.",
+                "2.0",
+            )
+
+        if context.refresh_state:
+            return self._immediateload_create_row_processor(
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
+            )
+
+        _, run_loader, _, _ = self._setup_for_recursion(
+            context, path, loadopt, self.join_depth
+        )
+        if not run_loader:
+            return
+
+        if not isinstance(context.compile_state, ORMSelectCompileState):
+            # issue 7505 - subqueryload() in 1.3 and previous would silently
+            # degrade for from_statement() without warning. this behavior
+            # is restored here
+            return
+
+        if not self.parent.class_manager[self.key].impl.supports_population:
+            raise sa_exc.InvalidRequestError(
+                "'%s' does not support object "
+                "population - eager loading cannot be applied." % self
+            )
+
+        # a little dance here as the "path" is still something that only
+        # semi-tracks the exact series of things we are loading, still not
+        # telling us about with_polymorphic() and stuff like that when it's at
+        # the root..  the initial MapperEntity is more accurate for this case.
+        if len(path) == 1:
+            if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
+                return
+        elif not orm_util._entity_isa(path[-1], self.parent):
+            return
+
+        subq = self._setup_query_from_rowproc(
+            context,
+            query_entity,
+            path,
+            path[-1],
+            loadopt,
+            adapter,
+        )
+
+        if subq is None:
+            return
+
+        assert subq.session is None
+
+        path = path[self.parent_property]
+
+        local_cols = self.parent_property.local_columns
+
+        # cache the loaded collections in the context
+        # so that inheriting mappers don't re-load when they
+        # call upon create_row_processor again
+        collections = path.get(context.attributes, "collections")
+        if collections is None:
+            collections = self._SubqCollections(context, subq)
+            path.set(context.attributes, "collections", collections)
+
+        if adapter:
+            local_cols = [adapter.columns[c] for c in local_cols]
+
+        if self.uselist:
+            self._create_collection_loader(
+                context, result, collections, local_cols, populators
+            )
+        else:
+            self._create_scalar_loader(
+                context, result, collections, local_cols, populators
+            )
+
+    def _create_collection_loader(
+        self, context, result, collections, local_cols, populators
+    ):
+        tuple_getter = result._tuple_getter(local_cols)
+
+        def load_collection_from_subq(state, dict_, row):
+            collection = collections.get(tuple_getter(row), ())
+            state.get_impl(self.key).set_committed_value(
+                state, dict_, collection
+            )
+
+        def load_collection_from_subq_existing_row(state, dict_, row):
+            if self.key not in dict_:
+                load_collection_from_subq(state, dict_, row)
+
+        populators["new"].append((self.key, load_collection_from_subq))
+        populators["existing"].append(
+            (self.key, load_collection_from_subq_existing_row)
+        )
+
+        if context.invoke_all_eagers:
+            populators["eager"].append((self.key, collections.loader))
+
+    def _create_scalar_loader(
+        self, context, result, collections, local_cols, populators
+    ):
+        tuple_getter = result._tuple_getter(local_cols)
+
+        def load_scalar_from_subq(state, dict_, row):
+            collection = collections.get(tuple_getter(row), (None,))
+            if len(collection) > 1:
+                util.warn(
+                    "Multiple rows returned with "
+                    "uselist=False for eagerly-loaded attribute '%s' " % self
+                )
+
+            scalar = collection[0]
+            state.get_impl(self.key).set_committed_value(state, dict_, scalar)
+
+        def load_scalar_from_subq_existing_row(state, dict_, row):
+            if self.key not in dict_:
+                load_scalar_from_subq(state, dict_, row)
+
+        populators["new"].append((self.key, load_scalar_from_subq))
+        populators["existing"].append(
+            (self.key, load_scalar_from_subq_existing_row)
+        )
+        if context.invoke_all_eagers:
+            populators["eager"].append((self.key, collections.loader))
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy="joined")
+@relationships.RelationshipProperty.strategy_for(lazy=False)
+class JoinedLoader(AbstractRelationshipLoader):
+    """Provide loading behavior for a :class:`.Relationship`
+    using joined eager loading.
+
+    """
+
+    __slots__ = "join_depth"
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.join_depth = self.parent_property.join_depth
+
+    def init_class_attribute(self, mapper):
+        self.parent_property._get_strategy(
+            (("lazy", "select"),)
+        ).init_class_attribute(mapper)
+
+    def setup_query(
+        self,
+        compile_state,
+        query_entity,
+        path,
+        loadopt,
+        adapter,
+        column_collection=None,
+        parentmapper=None,
+        chained_from_outerjoin=False,
+        **kwargs,
+    ):
+        """Add a left outer join to the statement that's being constructed."""
+
+        if not compile_state.compile_options._enable_eagerloads:
+            return
+        elif (
+            loadopt
+            and compile_state.statement is not None
+            and compile_state.statement.is_dml
+        ):
+            util.warn_deprecated(
+                "The joinedload loader option is not compatible with DML "
+                "statements such as INSERT, UPDATE.  Only SELECT may be used."
+                "This warning will become an exception in a future release.",
+                "2.0",
+            )
+        elif self.uselist:
+            compile_state.multi_row_eager_loaders = True
+
+        path = path[self.parent_property]
+
+        with_polymorphic = None
+
+        user_defined_adapter = (
+            self._init_user_defined_eager_proc(
+                loadopt, compile_state, compile_state.attributes
+            )
+            if loadopt
+            else False
+        )
+
+        if user_defined_adapter is not False:
+            # setup an adapter but dont create any JOIN, assume it's already
+            # in the query
+            (
+                clauses,
+                adapter,
+                add_to_collection,
+            ) = self._setup_query_on_user_defined_adapter(
+                compile_state,
+                query_entity,
+                path,
+                adapter,
+                user_defined_adapter,
+            )
+
+            # don't do "wrap" for multi-row, we want to wrap
+            # limited/distinct SELECT,
+            # because we want to put the JOIN on the outside.
+
+        else:
+            # if not via query option, check for
+            # a cycle
+            if not path.contains(compile_state.attributes, "loader"):
+                if self.join_depth:
+                    if path.length / 2 > self.join_depth:
+                        return
+                elif path.contains_mapper(self.mapper):
+                    return
+
+            # add the JOIN and create an adapter
+            (
+                clauses,
+                adapter,
+                add_to_collection,
+                chained_from_outerjoin,
+            ) = self._generate_row_adapter(
+                compile_state,
+                query_entity,
+                path,
+                loadopt,
+                adapter,
+                column_collection,
+                parentmapper,
+                chained_from_outerjoin,
+            )
+
+            # for multi-row, we want to wrap limited/distinct SELECT,
+            # because we want to put the JOIN on the outside.
+            compile_state.eager_adding_joins = True
+
+        with_poly_entity = path.get(
+            compile_state.attributes, "path_with_polymorphic", None
+        )
+        if with_poly_entity is not None:
+            with_polymorphic = inspect(
+                with_poly_entity
+            ).with_polymorphic_mappers
+        else:
+            with_polymorphic = None
+
+        path = path[self.entity]
+
+        loading._setup_entity_query(
+            compile_state,
+            self.mapper,
+            query_entity,
+            path,
+            clauses,
+            add_to_collection,
+            with_polymorphic=with_polymorphic,
+            parentmapper=self.mapper,
+            chained_from_outerjoin=chained_from_outerjoin,
+        )
+
+        has_nones = util.NONE_SET.intersection(compile_state.secondary_columns)
+
+        if has_nones:
+            if with_poly_entity is not None:
+                raise sa_exc.InvalidRequestError(
+                    "Detected unaliased columns when generating joined "
+                    "load.  Make sure to use aliased=True or flat=True "
+                    "when using joined loading with with_polymorphic()."
+                )
+            else:
+                compile_state.secondary_columns = [
+                    c for c in compile_state.secondary_columns if c is not None
+                ]
+
+    def _init_user_defined_eager_proc(
+        self, loadopt, compile_state, target_attributes
+    ):
+        # check if the opt applies at all
+        if "eager_from_alias" not in loadopt.local_opts:
+            # nope
+            return False
+
+        path = loadopt.path.parent
+
+        # the option applies.  check if the "user_defined_eager_row_processor"
+        # has been built up.
+        adapter = path.get(
+            compile_state.attributes, "user_defined_eager_row_processor", False
+        )
+        if adapter is not False:
+            # just return it
+            return adapter
+
+        # otherwise figure it out.
+        alias = loadopt.local_opts["eager_from_alias"]
+        root_mapper, prop = path[-2:]
+
+        if alias is not None:
+            if isinstance(alias, str):
+                alias = prop.target.alias(alias)
+            adapter = orm_util.ORMAdapter(
+                orm_util._TraceAdaptRole.JOINEDLOAD_USER_DEFINED_ALIAS,
+                prop.mapper,
+                selectable=alias,
+                equivalents=prop.mapper._equivalent_columns,
+                limit_on_entity=False,
+            )
+        else:
+            if path.contains(
+                compile_state.attributes, "path_with_polymorphic"
+            ):
+                with_poly_entity = path.get(
+                    compile_state.attributes, "path_with_polymorphic"
+                )
+                adapter = orm_util.ORMAdapter(
+                    orm_util._TraceAdaptRole.JOINEDLOAD_PATH_WITH_POLYMORPHIC,
+                    with_poly_entity,
+                    equivalents=prop.mapper._equivalent_columns,
+                )
+            else:
+                adapter = compile_state._polymorphic_adapters.get(
+                    prop.mapper, None
+                )
+        path.set(
+            target_attributes,
+            "user_defined_eager_row_processor",
+            adapter,
+        )
+
+        return adapter
+
+    def _setup_query_on_user_defined_adapter(
+        self, context, entity, path, adapter, user_defined_adapter
+    ):
+        # apply some more wrapping to the "user defined adapter"
+        # if we are setting up the query for SQL render.
+        adapter = entity._get_entity_clauses(context)
+
+        if adapter and user_defined_adapter:
+            user_defined_adapter = user_defined_adapter.wrap(adapter)
+            path.set(
+                context.attributes,
+                "user_defined_eager_row_processor",
+                user_defined_adapter,
+            )
+        elif adapter:
+            user_defined_adapter = adapter
+            path.set(
+                context.attributes,
+                "user_defined_eager_row_processor",
+                user_defined_adapter,
+            )
+
+        add_to_collection = context.primary_columns
+        return user_defined_adapter, adapter, add_to_collection
+
+    def _generate_row_adapter(
+        self,
+        compile_state,
+        entity,
+        path,
+        loadopt,
+        adapter,
+        column_collection,
+        parentmapper,
+        chained_from_outerjoin,
+    ):
+        with_poly_entity = path.get(
+            compile_state.attributes, "path_with_polymorphic", None
+        )
+        if with_poly_entity:
+            to_adapt = with_poly_entity
+        else:
+            insp = inspect(self.entity)
+            if insp.is_aliased_class:
+                alt_selectable = insp.selectable
+            else:
+                alt_selectable = None
+
+            to_adapt = orm_util.AliasedClass(
+                self.mapper,
+                alias=(
+                    alt_selectable._anonymous_fromclause(flat=True)
+                    if alt_selectable is not None
+                    else None
+                ),
+                flat=True,
+                use_mapper_path=True,
+            )
+
+        to_adapt_insp = inspect(to_adapt)
+
+        clauses = to_adapt_insp._memo(
+            ("joinedloader_ormadapter", self),
+            orm_util.ORMAdapter,
+            orm_util._TraceAdaptRole.JOINEDLOAD_MEMOIZED_ADAPTER,
+            to_adapt_insp,
+            equivalents=self.mapper._equivalent_columns,
+            adapt_required=True,
+            allow_label_resolve=False,
+            anonymize_labels=True,
+        )
+
+        assert clauses.is_aliased_class
+
+        innerjoin = (
+            loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin)
+            if loadopt is not None
+            else self.parent_property.innerjoin
+        )
+
+        if not innerjoin:
+            # if this is an outer join, all non-nested eager joins from
+            # this path must also be outer joins
+            chained_from_outerjoin = True
+
+        compile_state.create_eager_joins.append(
+            (
+                self._create_eager_join,
+                entity,
+                path,
+                adapter,
+                parentmapper,
+                clauses,
+                innerjoin,
+                chained_from_outerjoin,
+                loadopt._extra_criteria if loadopt else (),
+            )
+        )
+
+        add_to_collection = compile_state.secondary_columns
+        path.set(compile_state.attributes, "eager_row_processor", clauses)
+
+        return clauses, adapter, add_to_collection, chained_from_outerjoin
+
+    def _create_eager_join(
+        self,
+        compile_state,
+        query_entity,
+        path,
+        adapter,
+        parentmapper,
+        clauses,
+        innerjoin,
+        chained_from_outerjoin,
+        extra_criteria,
+    ):
+        if parentmapper is None:
+            localparent = query_entity.mapper
+        else:
+            localparent = parentmapper
+
+        # whether or not the Query will wrap the selectable in a subquery,
+        # and then attach eager load joins to that (i.e., in the case of
+        # LIMIT/OFFSET etc.)
+        should_nest_selectable = (
+            compile_state.multi_row_eager_loaders
+            and compile_state._should_nest_selectable
+        )
+
+        query_entity_key = None
+
+        if (
+            query_entity not in compile_state.eager_joins
+            and not should_nest_selectable
+            and compile_state.from_clauses
+        ):
+            indexes = sql_util.find_left_clause_that_matches_given(
+                compile_state.from_clauses, query_entity.selectable
+            )
+
+            if len(indexes) > 1:
+                # for the eager load case, I can't reproduce this right
+                # now.   For query.join() I can.
+                raise sa_exc.InvalidRequestError(
+                    "Can't identify which query entity in which to joined "
+                    "eager load from.   Please use an exact match when "
+                    "specifying the join path."
+                )
+
+            if indexes:
+                clause = compile_state.from_clauses[indexes[0]]
+                # join to an existing FROM clause on the query.
+                # key it to its list index in the eager_joins dict.
+                # Query._compile_context will adapt as needed and
+                # append to the FROM clause of the select().
+                query_entity_key, default_towrap = indexes[0], clause
+
+        if query_entity_key is None:
+            query_entity_key, default_towrap = (
+                query_entity,
+                query_entity.selectable,
+            )
+
+        towrap = compile_state.eager_joins.setdefault(
+            query_entity_key, default_towrap
+        )
+
+        if adapter:
+            if getattr(adapter, "is_aliased_class", False):
+                # joining from an adapted entity.  The adapted entity
+                # might be a "with_polymorphic", so resolve that to our
+                # specific mapper's entity before looking for our attribute
+                # name on it.
+                efm = adapter.aliased_insp._entity_for_mapper(
+                    localparent
+                    if localparent.isa(self.parent)
+                    else self.parent
+                )
+
+                # look for our attribute on the adapted entity, else fall back
+                # to our straight property
+                onclause = getattr(efm.entity, self.key, self.parent_property)
+            else:
+                onclause = getattr(
+                    orm_util.AliasedClass(
+                        self.parent, adapter.selectable, use_mapper_path=True
+                    ),
+                    self.key,
+                    self.parent_property,
+                )
+
+        else:
+            onclause = self.parent_property
+
+        assert clauses.is_aliased_class
+
+        attach_on_outside = (
+            not chained_from_outerjoin
+            or not innerjoin
+            or innerjoin == "unnested"
+            or query_entity.entity_zero.represents_outer_join
+        )
+
+        extra_join_criteria = extra_criteria
+        additional_entity_criteria = compile_state.global_attributes.get(
+            ("additional_entity_criteria", self.mapper), ()
+        )
+        if additional_entity_criteria:
+            extra_join_criteria += tuple(
+                ae._resolve_where_criteria(self.mapper)
+                for ae in additional_entity_criteria
+                if ae.propagate_to_loaders
+            )
+
+        if attach_on_outside:
+            # this is the "classic" eager join case.
+            eagerjoin = orm_util._ORMJoin(
+                towrap,
+                clauses.aliased_insp,
+                onclause,
+                isouter=not innerjoin
+                or query_entity.entity_zero.represents_outer_join
+                or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
+                _left_memo=self.parent,
+                _right_memo=path[self.mapper],
+                _extra_criteria=extra_join_criteria,
+            )
+        else:
+            # all other cases are innerjoin=='nested' approach
+            eagerjoin = self._splice_nested_inner_join(
+                path, path[-2], towrap, clauses, onclause, extra_join_criteria
+            )
+
+        compile_state.eager_joins[query_entity_key] = eagerjoin
+
+        # send a hint to the Query as to where it may "splice" this join
+        eagerjoin.stop_on = query_entity.selectable
+
+        if not parentmapper:
+            # for parentclause that is the non-eager end of the join,
+            # ensure all the parent cols in the primaryjoin are actually
+            # in the
+            # columns clause (i.e. are not deferred), so that aliasing applied
+            # by the Query propagates those columns outward.
+            # This has the effect
+            # of "undefering" those columns.
+            for col in sql_util._find_columns(
+                self.parent_property.primaryjoin
+            ):
+                if localparent.persist_selectable.c.contains_column(col):
+                    if adapter:
+                        col = adapter.columns[col]
+                    compile_state._append_dedupe_col_collection(
+                        col, compile_state.primary_columns
+                    )
+
+        if self.parent_property.order_by:
+            compile_state.eager_order_by += tuple(
+                (eagerjoin._target_adapter.copy_and_process)(
+                    util.to_list(self.parent_property.order_by)
+                )
+            )
+
+    def _splice_nested_inner_join(
+        self,
+        path,
+        entity_we_want_to_splice_onto,
+        join_obj,
+        clauses,
+        onclause,
+        extra_criteria,
+        entity_inside_join_structure: Union[
+            Mapper, None, Literal[False]
+        ] = False,
+        detected_existing_path: Optional[path_registry.PathRegistry] = None,
+    ):
+        # recursive fn to splice a nested join into an existing one.
+        # entity_inside_join_structure=False means this is the outermost call,
+        # and it should return a value.  entity_inside_join_structure=<mapper>
+        # indicates we've descended into a join and are looking at a FROM
+        # clause representing this mapper; if this is not
+        # entity_we_want_to_splice_onto then return None to end the recursive
+        # branch
+
+        assert entity_we_want_to_splice_onto is path[-2]
+
+        if entity_inside_join_structure is False:
+            assert isinstance(join_obj, orm_util._ORMJoin)
+
+        if isinstance(join_obj, sql.selectable.FromGrouping):
+            # FromGrouping - continue descending into the structure
+            return self._splice_nested_inner_join(
+                path,
+                entity_we_want_to_splice_onto,
+                join_obj.element,
+                clauses,
+                onclause,
+                extra_criteria,
+                entity_inside_join_structure,
+            )
+        elif isinstance(join_obj, orm_util._ORMJoin):
+            # _ORMJoin - continue descending into the structure
+
+            join_right_path = join_obj._right_memo
+
+            # see if right side of join is viable
+            target_join = self._splice_nested_inner_join(
+                path,
+                entity_we_want_to_splice_onto,
+                join_obj.right,
+                clauses,
+                onclause,
+                extra_criteria,
+                entity_inside_join_structure=(
+                    join_right_path[-1].mapper
+                    if join_right_path is not None
+                    else None
+                ),
+            )
+
+            if target_join is not None:
+                # for a right splice, attempt to flatten out
+                # a JOIN b JOIN c JOIN .. to avoid needless
+                # parenthesis nesting
+                if not join_obj.isouter and not target_join.isouter:
+                    eagerjoin = join_obj._splice_into_center(target_join)
+                else:
+                    eagerjoin = orm_util._ORMJoin(
+                        join_obj.left,
+                        target_join,
+                        join_obj.onclause,
+                        isouter=join_obj.isouter,
+                        _left_memo=join_obj._left_memo,
+                    )
+
+                eagerjoin._target_adapter = target_join._target_adapter
+                return eagerjoin
+
+            else:
+                # see if left side of join is viable
+                target_join = self._splice_nested_inner_join(
+                    path,
+                    entity_we_want_to_splice_onto,
+                    join_obj.left,
+                    clauses,
+                    onclause,
+                    extra_criteria,
+                    entity_inside_join_structure=join_obj._left_memo,
+                    detected_existing_path=join_right_path,
+                )
+
+                if target_join is not None:
+                    eagerjoin = orm_util._ORMJoin(
+                        target_join,
+                        join_obj.right,
+                        join_obj.onclause,
+                        isouter=join_obj.isouter,
+                        _right_memo=join_obj._right_memo,
+                    )
+                    eagerjoin._target_adapter = target_join._target_adapter
+                    return eagerjoin
+
+            # neither side viable, return None, or fail if this was the top
+            # most call
+            if entity_inside_join_structure is False:
+                assert (
+                    False
+                ), "assertion failed attempting to produce joined eager loads"
+            return None
+
+        # reached an endpoint (e.g. a table that's mapped, or an alias of that
+        # table).  determine if we can use this endpoint to splice onto
+
+        # is this the entity we want to splice onto in the first place?
+        if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure):
+            return None
+
+        # path check.  if we know the path how this join endpoint got here,
+        # lets look at our path we are satisfying and see if we're in the
+        # wrong place.  This is specifically for when our entity may
+        # appear more than once in the path, issue #11449
+        # updated in issue #11965.
+        if detected_existing_path and len(detected_existing_path) > 2:
+            # this assertion is currently based on how this call is made,
+            # where given a join_obj, the call will have these parameters as
+            # entity_inside_join_structure=join_obj._left_memo
+            # and entity_inside_join_structure=join_obj._right_memo.mapper
+            assert detected_existing_path[-3] is entity_inside_join_structure
+
+            # from that, see if the path we are targeting matches the
+            # "existing" path of this join all the way up to the midpoint
+            # of this join object (e.g. the relationship).
+            # if not, then this is not our target
+            #
+            # a test condition where this test is false looks like:
+            #
+            # desired splice:         Node->kind->Kind
+            # path of desired splice: NodeGroup->nodes->Node->kind
+            # path we've located:     NodeGroup->nodes->Node->common_node->Node
+            #
+            # above, because we want to splice kind->Kind onto
+            # NodeGroup->nodes->Node, this is not our path because it actually
+            # goes more steps than we want into self-referential
+            # ->common_node->Node
+            #
+            # a test condition where this test is true looks like:
+            #
+            # desired splice:         B->c2s->C2
+            # path of desired splice: A->bs->B->c2s
+            # path we've located:     A->bs->B->c1s->C1
+            #
+            # above, we want to splice c2s->C2 onto B, and the located path
+            # shows that the join ends with B->c1s->C1.  so we will
+            # add another join onto that, which would create a "branch" that
+            # we might represent in a pseudopath as:
+            #
+            # B->c1s->C1
+            #  ->c2s->C2
+            #
+            # i.e. A JOIN B ON <bs> JOIN C1 ON <c1s>
+            #                       JOIN C2 ON <c2s>
+            #
+
+            if detected_existing_path[0:-2] != path.path[0:-1]:
+                return None
+
+        return orm_util._ORMJoin(
+            join_obj,
+            clauses.aliased_insp,
+            onclause,
+            isouter=False,
+            _left_memo=entity_inside_join_structure,
+            _right_memo=path[path[-1].mapper],
+            _extra_criteria=extra_criteria,
+        )
+
+    def _create_eager_adapter(self, context, result, adapter, path, loadopt):
+        compile_state = context.compile_state
+
+        user_defined_adapter = (
+            self._init_user_defined_eager_proc(
+                loadopt, compile_state, context.attributes
+            )
+            if loadopt
+            else False
+        )
+
+        if user_defined_adapter is not False:
+            decorator = user_defined_adapter
+            # user defined eagerloads are part of the "primary"
+            # portion of the load.
+            # the adapters applied to the Query should be honored.
+            if compile_state.compound_eager_adapter and decorator:
+                decorator = decorator.wrap(
+                    compile_state.compound_eager_adapter
+                )
+            elif compile_state.compound_eager_adapter:
+                decorator = compile_state.compound_eager_adapter
+        else:
+            decorator = path.get(
+                compile_state.attributes, "eager_row_processor"
+            )
+            if decorator is None:
+                return False
+
+        if self.mapper._result_has_identity_key(result, decorator):
+            return decorator
+        else:
+            # no identity key - don't return a row
+            # processor, will cause a degrade to lazy
+            return False
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+
+        if not context.compile_state.compile_options._enable_eagerloads:
+            return
+
+        if not self.parent.class_manager[self.key].impl.supports_population:
+            raise sa_exc.InvalidRequestError(
+                "'%s' does not support object "
+                "population - eager loading cannot be applied." % self
+            )
+
+        if self.uselist:
+            context.loaders_require_uniquing = True
+
+        our_path = path[self.parent_property]
+
+        eager_adapter = self._create_eager_adapter(
+            context, result, adapter, our_path, loadopt
+        )
+
+        if eager_adapter is not False:
+            key = self.key
+
+            _instance = loading._instance_processor(
+                query_entity,
+                self.mapper,
+                context,
+                result,
+                our_path[self.entity],
+                eager_adapter,
+            )
+
+            if not self.uselist:
+                self._create_scalar_loader(context, key, _instance, populators)
+            else:
+                self._create_collection_loader(
+                    context, key, _instance, populators
+                )
+        else:
+            self.parent_property._get_strategy(
+                (("lazy", "select"),)
+            ).create_row_processor(
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
+            )
+
+    def _create_collection_loader(self, context, key, _instance, populators):
+        def load_collection_from_joined_new_row(state, dict_, row):
+            # note this must unconditionally clear out any existing collection.
+            # an existing collection would be present only in the case of
+            # populate_existing().
+            collection = attributes.init_state_collection(state, dict_, key)
+            result_list = util.UniqueAppender(
+                collection, "append_without_event"
+            )
+            context.attributes[(state, key)] = result_list
+            inst = _instance(row)
+            if inst is not None:
+                result_list.append(inst)
+
+        def load_collection_from_joined_existing_row(state, dict_, row):
+            if (state, key) in context.attributes:
+                result_list = context.attributes[(state, key)]
+            else:
+                # appender_key can be absent from context.attributes
+                # with isnew=False when self-referential eager loading
+                # is used; the same instance may be present in two
+                # distinct sets of result columns
+                collection = attributes.init_state_collection(
+                    state, dict_, key
+                )
+                result_list = util.UniqueAppender(
+                    collection, "append_without_event"
+                )
+                context.attributes[(state, key)] = result_list
+            inst = _instance(row)
+            if inst is not None:
+                result_list.append(inst)
+
+        def load_collection_from_joined_exec(state, dict_, row):
+            _instance(row)
+
+        populators["new"].append(
+            (self.key, load_collection_from_joined_new_row)
+        )
+        populators["existing"].append(
+            (self.key, load_collection_from_joined_existing_row)
+        )
+        if context.invoke_all_eagers:
+            populators["eager"].append(
+                (self.key, load_collection_from_joined_exec)
+            )
+
+    def _create_scalar_loader(self, context, key, _instance, populators):
+        def load_scalar_from_joined_new_row(state, dict_, row):
+            # set a scalar object instance directly on the parent
+            # object, bypassing InstrumentedAttribute event handlers.
+            dict_[key] = _instance(row)
+
+        def load_scalar_from_joined_existing_row(state, dict_, row):
+            # call _instance on the row, even though the object has
+            # been created, so that we further descend into properties
+            existing = _instance(row)
+
+            # conflicting value already loaded, this shouldn't happen
+            if key in dict_:
+                if existing is not dict_[key]:
+                    util.warn(
+                        "Multiple rows returned with "
+                        "uselist=False for eagerly-loaded attribute '%s' "
+                        % self
+                    )
+            else:
+                # this case is when one row has multiple loads of the
+                # same entity (e.g. via aliasing), one has an attribute
+                # that the other doesn't.
+                dict_[key] = existing
+
+        def load_scalar_from_joined_exec(state, dict_, row):
+            _instance(row)
+
+        populators["new"].append((self.key, load_scalar_from_joined_new_row))
+        populators["existing"].append(
+            (self.key, load_scalar_from_joined_existing_row)
+        )
+        if context.invoke_all_eagers:
+            populators["eager"].append(
+                (self.key, load_scalar_from_joined_exec)
+            )
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy="selectin")
+class SelectInLoader(PostLoader, util.MemoizedSlots):
+    __slots__ = (
+        "join_depth",
+        "omit_join",
+        "_parent_alias",
+        "_query_info",
+        "_fallback_query_info",
+    )
+
+    query_info = collections.namedtuple(
+        "queryinfo",
+        [
+            "load_only_child",
+            "load_with_join",
+            "in_expr",
+            "pk_cols",
+            "zero_idx",
+            "child_lookup_cols",
+        ],
+    )
+
+    _chunksize = 500
+
+    def __init__(self, parent, strategy_key):
+        super().__init__(parent, strategy_key)
+        self.join_depth = self.parent_property.join_depth
+        is_m2o = self.parent_property.direction is interfaces.MANYTOONE
+
+        if self.parent_property.omit_join is not None:
+            self.omit_join = self.parent_property.omit_join
+        else:
+            lazyloader = self.parent_property._get_strategy(
+                (("lazy", "select"),)
+            )
+            if is_m2o:
+                self.omit_join = lazyloader.use_get
+            else:
+                self.omit_join = self.parent._get_clause[0].compare(
+                    lazyloader._rev_lazywhere,
+                    use_proxies=True,
+                    compare_keys=False,
+                    equivalents=self.parent._equivalent_columns,
+                )
+
+        if self.omit_join:
+            if is_m2o:
+                self._query_info = self._init_for_omit_join_m2o()
+                self._fallback_query_info = self._init_for_join()
+            else:
+                self._query_info = self._init_for_omit_join()
+        else:
+            self._query_info = self._init_for_join()
+
+    def _init_for_omit_join(self):
+        pk_to_fk = dict(
+            self.parent_property._join_condition.local_remote_pairs
+        )
+        pk_to_fk.update(
+            (equiv, pk_to_fk[k])
+            for k in list(pk_to_fk)
+            for equiv in self.parent._equivalent_columns.get(k, ())
+        )
+
+        pk_cols = fk_cols = [
+            pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk
+        ]
+        if len(fk_cols) > 1:
+            in_expr = sql.tuple_(*fk_cols)
+            zero_idx = False
+        else:
+            in_expr = fk_cols[0]
+            zero_idx = True
+
+        return self.query_info(False, False, in_expr, pk_cols, zero_idx, None)
+
+    def _init_for_omit_join_m2o(self):
+        pk_cols = self.mapper.primary_key
+        if len(pk_cols) > 1:
+            in_expr = sql.tuple_(*pk_cols)
+            zero_idx = False
+        else:
+            in_expr = pk_cols[0]
+            zero_idx = True
+
+        lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
+        lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols]
+
+        return self.query_info(
+            True, False, in_expr, pk_cols, zero_idx, lookup_cols
+        )
+
+    def _init_for_join(self):
+        self._parent_alias = AliasedClass(self.parent.class_)
+        pa_insp = inspect(self._parent_alias)
+        pk_cols = [
+            pa_insp._adapt_element(col) for col in self.parent.primary_key
+        ]
+        if len(pk_cols) > 1:
+            in_expr = sql.tuple_(*pk_cols)
+            zero_idx = False
+        else:
+            in_expr = pk_cols[0]
+            zero_idx = True
+        return self.query_info(False, True, in_expr, pk_cols, zero_idx, None)
+
+    def init_class_attribute(self, mapper):
+        self.parent_property._get_strategy(
+            (("lazy", "select"),)
+        ).init_class_attribute(mapper)
+
+    def create_row_processor(
+        self,
+        context,
+        query_entity,
+        path,
+        loadopt,
+        mapper,
+        result,
+        adapter,
+        populators,
+    ):
+        if context.refresh_state:
+            return self._immediateload_create_row_processor(
+                context,
+                query_entity,
+                path,
+                loadopt,
+                mapper,
+                result,
+                adapter,
+                populators,
+            )
+
+        (
+            effective_path,
+            run_loader,
+            execution_options,
+            recursion_depth,
+        ) = self._setup_for_recursion(
+            context, path, loadopt, join_depth=self.join_depth
+        )
+
+        if not run_loader:
+            return
+
+        if not context.compile_state.compile_options._enable_eagerloads:
+            return
+
+        if not self.parent.class_manager[self.key].impl.supports_population:
+            raise sa_exc.InvalidRequestError(
+                "'%s' does not support object "
+                "population - eager loading cannot be applied." % self
+            )
+
+        # a little dance here as the "path" is still something that only
+        # semi-tracks the exact series of things we are loading, still not
+        # telling us about with_polymorphic() and stuff like that when it's at
+        # the root..  the initial MapperEntity is more accurate for this case.
+        if len(path) == 1:
+            if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
+                return
+        elif not orm_util._entity_isa(path[-1], self.parent):
+            return
+
+        selectin_path = effective_path
+
+        path_w_prop = path[self.parent_property]
+
+        # build up a path indicating the path from the leftmost
+        # entity to the thing we're subquery loading.
+        with_poly_entity = path_w_prop.get(
+            context.attributes, "path_with_polymorphic", None
+        )
+        if with_poly_entity is not None:
+            effective_entity = inspect(with_poly_entity)
+        else:
+            effective_entity = self.entity
+
+        loading.PostLoad.callable_for_path(
+            context,
+            selectin_path,
+            self.parent,
+            self.parent_property,
+            self._load_for_path,
+            effective_entity,
+            loadopt,
+            recursion_depth,
+            execution_options,
+        )
+
+    def _load_for_path(
+        self,
+        context,
+        path,
+        states,
+        load_only,
+        effective_entity,
+        loadopt,
+        recursion_depth,
+        execution_options,
+    ):
+        if load_only and self.key not in load_only:
+            return
+
+        query_info = self._query_info
+
+        if query_info.load_only_child:
+            our_states = collections.defaultdict(list)
+            none_states = []
+
+            mapper = self.parent
+
+            for state, overwrite in states:
+                state_dict = state.dict
+                related_ident = tuple(
+                    mapper._get_state_attr_by_column(
+                        state,
+                        state_dict,
+                        lk,
+                        passive=attributes.PASSIVE_NO_FETCH,
+                    )
+                    for lk in query_info.child_lookup_cols
+                )
+                # if the loaded parent objects do not have the foreign key
+                # to the related item loaded, then degrade into the joined
+                # version of selectinload
+                if LoaderCallableStatus.PASSIVE_NO_RESULT in related_ident:
+                    query_info = self._fallback_query_info
+                    break
+
+                # organize states into lists keyed to particular foreign
+                # key values.
+                if None not in related_ident:
+                    our_states[related_ident].append(
+                        (state, state_dict, overwrite)
+                    )
+                else:
+                    # For FK values that have None, add them to a
+                    # separate collection that will be populated separately
+                    none_states.append((state, state_dict, overwrite))
+
+        # note the above conditional may have changed query_info
+        if not query_info.load_only_child:
+            our_states = [
+                (state.key[1], state, state.dict, overwrite)
+                for state, overwrite in states
+            ]
+
+        pk_cols = query_info.pk_cols
+        in_expr = query_info.in_expr
+
+        if not query_info.load_with_join:
+            # in "omit join" mode, the primary key column and the
+            # "in" expression are in terms of the related entity.  So
+            # if the related entity is polymorphic or otherwise aliased,
+            # we need to adapt our "pk_cols" and "in_expr" to that
+            # entity.   in non-"omit join" mode, these are against the
+            # parent entity and do not need adaption.
+            if effective_entity.is_aliased_class:
+                pk_cols = [
+                    effective_entity._adapt_element(col) for col in pk_cols
+                ]
+                in_expr = effective_entity._adapt_element(in_expr)
+
+        bundle_ent = orm_util.Bundle("pk", *pk_cols)
+        bundle_sql = bundle_ent.__clause_element__()
+
+        entity_sql = effective_entity.__clause_element__()
+        q = Select._create_raw_select(
+            _raw_columns=[bundle_sql, entity_sql],
+            _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
+            _compile_options=ORMCompileState.default_compile_options,
+            _propagate_attrs={
+                "compile_state_plugin": "orm",
+                "plugin_subject": effective_entity,
+            },
+        )
+
+        if not query_info.load_with_join:
+            # the Bundle we have in the "omit_join" case is against raw, non
+            # annotated columns, so to ensure the Query knows its primary
+            # entity, we add it explicitly.  If we made the Bundle against
+            # annotated columns, we hit a performance issue in this specific
+            # case, which is detailed in issue #4347.
+            q = q.select_from(effective_entity)
+        else:
+            # in the non-omit_join case, the Bundle is against the annotated/
+            # mapped column of the parent entity, but the #4347 issue does not
+            # occur in this case.
+            q = q.select_from(self._parent_alias).join(
+                getattr(self._parent_alias, self.parent_property.key).of_type(
+                    effective_entity
+                )
+            )
+
+        q = q.filter(in_expr.in_(sql.bindparam("primary_keys")))
+
+        # a test which exercises what these comments talk about is
+        # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic
+        #
+        # effective_entity above is given to us in terms of the cached
+        # statement, namely this one:
+        orig_query = context.compile_state.select_statement
+
+        # the actual statement that was requested is this one:
+        #  context_query = context.user_passed_query
+        #
+        # that's not the cached one, however.  So while it is of the identical
+        # structure, if it has entities like AliasedInsp, which we get from
+        # aliased() or with_polymorphic(), the AliasedInsp will likely be a
+        # different object identity each time, and will not match up
+        # hashing-wise to the corresponding AliasedInsp that's in the
+        # cached query, meaning it won't match on paths and loader lookups
+        # and loaders like this one will be skipped if it is used in options.
+        #
+        # as it turns out, standard loader options like selectinload(),
+        # lazyload() that have a path need
+        # to come from the cached query so that the AliasedInsp etc. objects
+        # that are in the query line up with the object that's in the path
+        # of the strategy object. however other options like
+        # with_loader_criteria() that doesn't have a path (has a fixed entity)
+        # and needs to have access to the latest closure state in order to
+        # be correct, we need to use the uncached one.
+        #
+        # as of #8399 we let the loader option itself figure out what it
+        # wants to do given cached and uncached version of itself.
+
+        effective_path = path[self.parent_property]
+
+        if orig_query is context.user_passed_query:
+            new_options = orig_query._with_options
+        else:
+            cached_options = orig_query._with_options
+            uncached_options = context.user_passed_query._with_options
+
+            # propagate compile state options from the original query,
+            # updating their "extra_criteria" as necessary.
+            # note this will create a different cache key than
+            # "orig" options if extra_criteria is present, because the copy
+            # of extra_criteria will have different boundparam than that of
+            # the QueryableAttribute in the path
+            new_options = [
+                orig_opt._adapt_cached_option_to_uncached_option(
+                    context, uncached_opt
+                )
+                for orig_opt, uncached_opt in zip(
+                    cached_options, uncached_options
+                )
+            ]
+
+        if loadopt and loadopt._extra_criteria:
+            new_options += (
+                orm_util.LoaderCriteriaOption(
+                    effective_entity,
+                    loadopt._generate_extra_criteria(context),
+                ),
+            )
+
+        if recursion_depth is not None:
+            effective_path = effective_path._truncate_recursive()
+
+        q = q.options(*new_options)
+
+        q = q._update_compile_options({"_current_path": effective_path})
+        if context.populate_existing:
+            q = q.execution_options(populate_existing=True)
+
+        if self.parent_property.order_by:
+            if not query_info.load_with_join:
+                eager_order_by = self.parent_property.order_by
+                if effective_entity.is_aliased_class:
+                    eager_order_by = [
+                        effective_entity._adapt_element(elem)
+                        for elem in eager_order_by
+                    ]
+                q = q.order_by(*eager_order_by)
+            else:
+
+                def _setup_outermost_orderby(compile_context):
+                    compile_context.eager_order_by += tuple(
+                        util.to_list(self.parent_property.order_by)
+                    )
+
+                q = q._add_context_option(
+                    _setup_outermost_orderby, self.parent_property
+                )
+
+        if query_info.load_only_child:
+            self._load_via_child(
+                our_states,
+                none_states,
+                query_info,
+                q,
+                context,
+                execution_options,
+            )
+        else:
+            self._load_via_parent(
+                our_states, query_info, q, context, execution_options
+            )
+
+    def _load_via_child(
+        self,
+        our_states,
+        none_states,
+        query_info,
+        q,
+        context,
+        execution_options,
+    ):
+        uselist = self.uselist
+
+        # this sort is really for the benefit of the unit tests
+        our_keys = sorted(our_states)
+        while our_keys:
+            chunk = our_keys[0 : self._chunksize]
+            our_keys = our_keys[self._chunksize :]
+            data = {
+                k: v
+                for k, v in context.session.execute(
+                    q,
+                    params={
+                        "primary_keys": [
+                            key[0] if query_info.zero_idx else key
+                            for key in chunk
+                        ]
+                    },
+                    execution_options=execution_options,
+                ).unique()
+            }
+
+            for key in chunk:
+                # for a real foreign key and no concurrent changes to the
+                # DB while running this method, "key" is always present in
+                # data.  However, for primaryjoins without real foreign keys
+                # a non-None primaryjoin condition may still refer to no
+                # related object.
+                related_obj = data.get(key, None)
+                for state, dict_, overwrite in our_states[key]:
+                    if not overwrite and self.key in dict_:
+                        continue
+
+                    state.get_impl(self.key).set_committed_value(
+                        state,
+                        dict_,
+                        related_obj if not uselist else [related_obj],
+                    )
+        # populate none states with empty value / collection
+        for state, dict_, overwrite in none_states:
+            if not overwrite and self.key in dict_:
+                continue
+
+            # note it's OK if this is a uselist=True attribute, the empty
+            # collection will be populated
+            state.get_impl(self.key).set_committed_value(state, dict_, None)
+
+    def _load_via_parent(
+        self, our_states, query_info, q, context, execution_options
+    ):
+        uselist = self.uselist
+        _empty_result = () if uselist else None
+
+        while our_states:
+            chunk = our_states[0 : self._chunksize]
+            our_states = our_states[self._chunksize :]
+
+            primary_keys = [
+                key[0] if query_info.zero_idx else key
+                for key, state, state_dict, overwrite in chunk
+            ]
+
+            data = collections.defaultdict(list)
+            for k, v in itertools.groupby(
+                context.session.execute(
+                    q,
+                    params={"primary_keys": primary_keys},
+                    execution_options=execution_options,
+                ).unique(),
+                lambda x: x[0],
+            ):
+                data[k].extend(vv[1] for vv in v)
+
+            for key, state, state_dict, overwrite in chunk:
+                if not overwrite and self.key in state_dict:
+                    continue
+
+                collection = data.get(key, _empty_result)
+
+                if not uselist and collection:
+                    if len(collection) > 1:
+                        util.warn(
+                            "Multiple rows returned with "
+                            "uselist=False for eagerly-loaded "
+                            "attribute '%s' " % self
+                        )
+                    state.get_impl(self.key).set_committed_value(
+                        state, state_dict, collection[0]
+                    )
+                else:
+                    # note that empty tuple set on uselist=False sets the
+                    # value to None
+                    state.get_impl(self.key).set_committed_value(
+                        state, state_dict, collection
+                    )
+
+
+def single_parent_validator(desc, prop):
+    def _do_check(state, value, oldvalue, initiator):
+        if value is not None and initiator.key == prop.key:
+            hasparent = initiator.hasparent(attributes.instance_state(value))
+            if hasparent and oldvalue is not value:
+                raise sa_exc.InvalidRequestError(
+                    "Instance %s is already associated with an instance "
+                    "of %s via its %s attribute, and is only allowed a "
+                    "single parent."
+                    % (orm_util.instance_str(value), state.class_, prop),
+                    code="bbf1",
+                )
+        return value
+
+    def append(state, value, initiator):
+        return _do_check(state, value, None, initiator)
+
+    def set_(state, value, oldvalue, initiator):
+        return _do_check(state, value, oldvalue, initiator)
+
+    event.listen(
+        desc, "append", append, raw=True, retval=True, active_history=True
+    )
+    event.listen(desc, "set", set_, raw=True, retval=True, active_history=True)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py
new file mode 100644
index 00000000..f4f292ee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/strategy_options.py
@@ -0,0 +1,2550 @@
+# orm/strategy_options.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""
+
+"""
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import Union
+
+from . import util as orm_util
+from ._typing import insp_is_aliased_class
+from ._typing import insp_is_attribute
+from ._typing import insp_is_mapper
+from ._typing import insp_is_mapper_property
+from .attributes import QueryableAttribute
+from .base import InspectionAttr
+from .interfaces import LoaderOption
+from .path_registry import _DEFAULT_TOKEN
+from .path_registry import _StrPathToken
+from .path_registry import _WILDCARD_TOKEN
+from .path_registry import AbstractEntityRegistry
+from .path_registry import path_is_property
+from .path_registry import PathRegistry
+from .path_registry import TokenRegistry
+from .util import _orm_full_deannotate
+from .util import AliasedInsp
+from .. import exc as sa_exc
+from .. import inspect
+from .. import util
+from ..sql import and_
+from ..sql import cache_key
+from ..sql import coercions
+from ..sql import roles
+from ..sql import traversals
+from ..sql import visitors
+from ..sql.base import _generative
+from ..util.typing import Final
+from ..util.typing import Literal
+from ..util.typing import Self
+
+_RELATIONSHIP_TOKEN: Final[Literal["relationship"]] = "relationship"
+_COLUMN_TOKEN: Final[Literal["column"]] = "column"
+
+_FN = TypeVar("_FN", bound="Callable[..., Any]")
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _InternalEntityType
+    from .context import _MapperEntity
+    from .context import ORMCompileState
+    from .context import QueryContext
+    from .interfaces import _StrategyKey
+    from .interfaces import MapperProperty
+    from .interfaces import ORMOption
+    from .mapper import Mapper
+    from .path_registry import _PathRepresentation
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _FromClauseArgument
+    from ..sql.cache_key import _CacheKeyTraversalType
+    from ..sql.cache_key import CacheKey
+
+
+_AttrType = Union[Literal["*"], "QueryableAttribute[Any]"]
+
+_WildcardKeyType = Literal["relationship", "column"]
+_StrategySpec = Dict[str, Any]
+_OptsType = Dict[str, Any]
+_AttrGroupType = Tuple[_AttrType, ...]
+
+
+class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
+    __slots__ = ("propagate_to_loaders",)
+
+    _is_strategy_option = True
+    propagate_to_loaders: bool
+
+    def contains_eager(
+        self,
+        attr: _AttrType,
+        alias: Optional[_FromClauseArgument] = None,
+        _is_chain: bool = False,
+        _propagate_to_loaders: bool = False,
+    ) -> Self:
+        r"""Indicate that the given attribute should be eagerly loaded from
+        columns stated manually in the query.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        The option is used in conjunction with an explicit join that loads
+        the desired rows, i.e.::
+
+            sess.query(Order).join(Order.user).options(contains_eager(Order.user))
+
+        The above query would join from the ``Order`` entity to its related
+        ``User`` entity, and the returned ``Order`` objects would have the
+        ``Order.user`` attribute pre-populated.
+
+        It may also be used for customizing the entries in an eagerly loaded
+        collection; queries will normally want to use the
+        :ref:`orm_queryguide_populate_existing` execution option assuming the
+        primary collection of parent objects may already have been loaded::
+
+            sess.query(User).join(User.addresses).filter(
+                Address.email_address.like("%@aol.com")
+            ).options(contains_eager(User.addresses)).populate_existing()
+
+        See the section :ref:`contains_eager` for complete usage details.
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`contains_eager`
+
+        """
+        if alias is not None:
+            if not isinstance(alias, str):
+                coerced_alias = coercions.expect(roles.FromClauseRole, alias)
+            else:
+                util.warn_deprecated(
+                    "Passing a string name for the 'alias' argument to "
+                    "'contains_eager()` is deprecated, and will not work in a "
+                    "future release.  Please use a sqlalchemy.alias() or "
+                    "sqlalchemy.orm.aliased() construct.",
+                    version="1.4",
+                )
+                coerced_alias = alias
+
+        elif getattr(attr, "_of_type", None):
+            assert isinstance(attr, QueryableAttribute)
+            ot: Optional[_InternalEntityType[Any]] = inspect(attr._of_type)
+            assert ot is not None
+            coerced_alias = ot.selectable
+        else:
+            coerced_alias = None
+
+        cloned = self._set_relationship_strategy(
+            attr,
+            {"lazy": "joined"},
+            propagate_to_loaders=_propagate_to_loaders,
+            opts={"eager_from_alias": coerced_alias},
+            _reconcile_to_other=True if _is_chain else None,
+        )
+        return cloned
+
+    def load_only(self, *attrs: _AttrType, raiseload: bool = False) -> Self:
+        r"""Indicate that for a particular entity, only the given list
+        of column-based attribute names should be loaded; all others will be
+        deferred.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        Example - given a class ``User``, load only the ``name`` and
+        ``fullname`` attributes::
+
+            session.query(User).options(load_only(User.name, User.fullname))
+
+        Example - given a relationship ``User.addresses -> Address``, specify
+        subquery loading for the ``User.addresses`` collection, but on each
+        ``Address`` object load only the ``email_address`` attribute::
+
+            session.query(User).options(
+                subqueryload(User.addresses).load_only(Address.email_address)
+            )
+
+        For a statement that has multiple entities,
+        the lead entity can be
+        specifically referred to using the :class:`_orm.Load` constructor::
+
+            stmt = (
+                select(User, Address)
+                .join(User.addresses)
+                .options(
+                    Load(User).load_only(User.name, User.fullname),
+                    Load(Address).load_only(Address.email_address),
+                )
+            )
+
+        When used together with the
+        :ref:`populate_existing <orm_queryguide_populate_existing>`
+        execution option only the attributes listed will be refreshed.
+
+        :param \*attrs: Attributes to be loaded, all others will be deferred.
+
+        :param raiseload: raise :class:`.InvalidRequestError` rather than
+         lazy loading a value when a deferred attribute is accessed. Used
+         to prevent unwanted SQL from being emitted.
+
+         .. versionadded:: 2.0
+
+        .. seealso::
+
+            :ref:`orm_queryguide_column_deferral` - in the
+            :ref:`queryguide_toplevel`
+
+        :param \*attrs: Attributes to be loaded, all others will be deferred.
+
+        :param raiseload: raise :class:`.InvalidRequestError` rather than
+         lazy loading a value when a deferred attribute is accessed. Used
+         to prevent unwanted SQL from being emitted.
+
+         .. versionadded:: 2.0
+
+        """
+        cloned = self._set_column_strategy(
+            attrs,
+            {"deferred": False, "instrument": True},
+        )
+
+        wildcard_strategy = {"deferred": True, "instrument": True}
+        if raiseload:
+            wildcard_strategy["raiseload"] = True
+
+        cloned = cloned._set_column_strategy(
+            ("*",),
+            wildcard_strategy,
+        )
+        return cloned
+
+    def joinedload(
+        self,
+        attr: _AttrType,
+        innerjoin: Optional[bool] = None,
+    ) -> Self:
+        """Indicate that the given attribute should be loaded using joined
+        eager loading.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        examples::
+
+            # joined-load the "orders" collection on "User"
+            select(User).options(joinedload(User.orders))
+
+            # joined-load Order.items and then Item.keywords
+            select(Order).options(joinedload(Order.items).joinedload(Item.keywords))
+
+            # lazily load Order.items, but when Items are loaded,
+            # joined-load the keywords collection
+            select(Order).options(lazyload(Order.items).joinedload(Item.keywords))
+
+        :param innerjoin: if ``True``, indicates that the joined eager load
+         should use an inner join instead of the default of left outer join::
+
+            select(Order).options(joinedload(Order.user, innerjoin=True))
+
+        In order to chain multiple eager joins together where some may be
+        OUTER and others INNER, right-nested joins are used to link them::
+
+            select(A).options(
+                joinedload(A.bs, innerjoin=False).joinedload(B.cs, innerjoin=True)
+            )
+
+        The above query, linking A.bs via "outer" join and B.cs via "inner"
+        join would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When
+        using older versions of SQLite (< 3.7.16), this form of JOIN is
+        translated to use full subqueries as this syntax is otherwise not
+        directly supported.
+
+        The ``innerjoin`` flag can also be stated with the term ``"unnested"``.
+        This indicates that an INNER JOIN should be used, *unless* the join
+        is linked to a LEFT OUTER JOIN to the left, in which case it
+        will render as LEFT OUTER JOIN.  For example, supposing ``A.bs``
+        is an outerjoin::
+
+            select(A).options(joinedload(A.bs).joinedload(B.cs, innerjoin="unnested"))
+
+        The above join will render as "a LEFT OUTER JOIN b LEFT OUTER JOIN c",
+        rather than as "a LEFT OUTER JOIN (b JOIN c)".
+
+        .. note:: The "unnested" flag does **not** affect the JOIN rendered
+            from a many-to-many association table, e.g. a table configured as
+            :paramref:`_orm.relationship.secondary`, to the target table; for
+            correctness of results, these joins are always INNER and are
+            therefore right-nested if linked to an OUTER join.
+
+        .. note::
+
+            The joins produced by :func:`_orm.joinedload` are **anonymously
+            aliased**. The criteria by which the join proceeds cannot be
+            modified, nor can the ORM-enabled :class:`_sql.Select` or legacy
+            :class:`_query.Query` refer to these joins in any way, including
+            ordering. See :ref:`zen_of_eager_loading` for further detail.
+
+            To produce a specific SQL JOIN which is explicitly available, use
+            :meth:`_sql.Select.join` and :meth:`_query.Query.join`. To combine
+            explicit JOINs with eager loading of collections, use
+            :func:`_orm.contains_eager`; see :ref:`contains_eager`.
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`joined_eager_loading`
+
+        """  # noqa: E501
+        loader = self._set_relationship_strategy(
+            attr,
+            {"lazy": "joined"},
+            opts=(
+                {"innerjoin": innerjoin}
+                if innerjoin is not None
+                else util.EMPTY_DICT
+            ),
+        )
+        return loader
+
+    def subqueryload(self, attr: _AttrType) -> Self:
+        """Indicate that the given attribute should be loaded using
+        subquery eager loading.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        examples::
+
+            # subquery-load the "orders" collection on "User"
+            select(User).options(subqueryload(User.orders))
+
+            # subquery-load Order.items and then Item.keywords
+            select(Order).options(
+                subqueryload(Order.items).subqueryload(Item.keywords)
+            )
+
+            # lazily load Order.items, but when Items are loaded,
+            # subquery-load the keywords collection
+            select(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`subquery_eager_loading`
+
+        """
+        return self._set_relationship_strategy(attr, {"lazy": "subquery"})
+
+    def selectinload(
+        self,
+        attr: _AttrType,
+        recursion_depth: Optional[int] = None,
+    ) -> Self:
+        """Indicate that the given attribute should be loaded using
+        SELECT IN eager loading.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        examples::
+
+            # selectin-load the "orders" collection on "User"
+            select(User).options(selectinload(User.orders))
+
+            # selectin-load Order.items and then Item.keywords
+            select(Order).options(
+                selectinload(Order.items).selectinload(Item.keywords)
+            )
+
+            # lazily load Order.items, but when Items are loaded,
+            # selectin-load the keywords collection
+            select(Order).options(lazyload(Order.items).selectinload(Item.keywords))
+
+        :param recursion_depth: optional int; when set to a positive integer
+         in conjunction with a self-referential relationship,
+         indicates "selectin" loading will continue that many levels deep
+         automatically until no items are found.
+
+         .. note:: The :paramref:`_orm.selectinload.recursion_depth` option
+            currently supports only self-referential relationships.  There
+            is not yet an option to automatically traverse recursive structures
+            with more than one relationship involved.
+
+            Additionally, the :paramref:`_orm.selectinload.recursion_depth`
+            parameter is new and experimental and should be treated as "alpha"
+            status for the 2.0 series.
+
+         .. versionadded:: 2.0 added
+            :paramref:`_orm.selectinload.recursion_depth`
+
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`selectin_eager_loading`
+
+        """
+        return self._set_relationship_strategy(
+            attr,
+            {"lazy": "selectin"},
+            opts={"recursion_depth": recursion_depth},
+        )
+
+    def lazyload(self, attr: _AttrType) -> Self:
+        """Indicate that the given attribute should be loaded using "lazy"
+        loading.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`lazy_loading`
+
+        """
+        return self._set_relationship_strategy(attr, {"lazy": "select"})
+
+    def immediateload(
+        self,
+        attr: _AttrType,
+        recursion_depth: Optional[int] = None,
+    ) -> Self:
+        """Indicate that the given attribute should be loaded using
+        an immediate load with a per-attribute SELECT statement.
+
+        The load is achieved using the "lazyloader" strategy and does not
+        fire off any additional eager loaders.
+
+        The :func:`.immediateload` option is superseded in general
+        by the :func:`.selectinload` option, which performs the same task
+        more efficiently by emitting a SELECT for all loaded objects.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        :param recursion_depth: optional int; when set to a positive integer
+         in conjunction with a self-referential relationship,
+         indicates "selectin" loading will continue that many levels deep
+         automatically until no items are found.
+
+         .. note:: The :paramref:`_orm.immediateload.recursion_depth` option
+            currently supports only self-referential relationships.  There
+            is not yet an option to automatically traverse recursive structures
+            with more than one relationship involved.
+
+         .. warning:: This parameter is new and experimental and should be
+            treated as "alpha" status
+
+         .. versionadded:: 2.0 added
+            :paramref:`_orm.immediateload.recursion_depth`
+
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`selectin_eager_loading`
+
+        """
+        loader = self._set_relationship_strategy(
+            attr,
+            {"lazy": "immediate"},
+            opts={"recursion_depth": recursion_depth},
+        )
+        return loader
+
+    def noload(self, attr: _AttrType) -> Self:
+        """Indicate that the given relationship attribute should remain
+        unloaded.
+
+        The relationship attribute will return ``None`` when accessed without
+        producing any loading effect.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        :func:`_orm.noload` applies to :func:`_orm.relationship` attributes
+        only.
+
+        .. legacy:: The :func:`_orm.noload` option is **legacy**.  As it
+           forces collections to be empty, which invariably leads to
+           non-intuitive and difficult to predict results.  There are no
+           legitimate uses for this option in modern SQLAlchemy.
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+        """
+
+        return self._set_relationship_strategy(attr, {"lazy": "noload"})
+
+    def raiseload(self, attr: _AttrType, sql_only: bool = False) -> Self:
+        """Indicate that the given attribute should raise an error if accessed.
+
+        A relationship attribute configured with :func:`_orm.raiseload` will
+        raise an :exc:`~sqlalchemy.exc.InvalidRequestError` upon access. The
+        typical way this is useful is when an application is attempting to
+        ensure that all relationship attributes that are accessed in a
+        particular context would have been already loaded via eager loading.
+        Instead of having to read through SQL logs to ensure lazy loads aren't
+        occurring, this strategy will cause them to raise immediately.
+
+        :func:`_orm.raiseload` applies to :func:`_orm.relationship` attributes
+        only. In order to apply raise-on-SQL behavior to a column-based
+        attribute, use the :paramref:`.orm.defer.raiseload` parameter on the
+        :func:`.defer` loader option.
+
+        :param sql_only: if True, raise only if the lazy load would emit SQL,
+         but not if it is only checking the identity map, or determining that
+         the related value should just be None due to missing keys. When False,
+         the strategy will raise for all varieties of relationship loading.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        .. seealso::
+
+            :ref:`loading_toplevel`
+
+            :ref:`prevent_lazy_with_raiseload`
+
+            :ref:`orm_queryguide_deferred_raiseload`
+
+        """
+
+        return self._set_relationship_strategy(
+            attr, {"lazy": "raise_on_sql" if sql_only else "raise"}
+        )
+
+    def defaultload(self, attr: _AttrType) -> Self:
+        """Indicate an attribute should load using its predefined loader style.
+
+        The behavior of this loading option is to not change the current
+        loading style of the attribute, meaning that the previously configured
+        one is used or, if no previous style was selected, the default
+        loading will be used.
+
+        This method is used to link to other loader options further into
+        a chain of attributes without altering the loader style of the links
+        along the chain.  For example, to set joined eager loading for an
+        element of an element::
+
+            session.query(MyClass).options(
+                defaultload(MyClass.someattribute).joinedload(
+                    MyOtherClass.someotherattribute
+                )
+            )
+
+        :func:`.defaultload` is also useful for setting column-level options on
+        a related class, namely that of :func:`.defer` and :func:`.undefer`::
+
+            session.scalars(
+                select(MyClass).options(
+                    defaultload(MyClass.someattribute)
+                    .defer("some_column")
+                    .undefer("some_other_column")
+                )
+            )
+
+        .. seealso::
+
+            :ref:`orm_queryguide_relationship_sub_options`
+
+            :meth:`_orm.Load.options`
+
+        """
+        return self._set_relationship_strategy(attr, None)
+
+    def defer(self, key: _AttrType, raiseload: bool = False) -> Self:
+        r"""Indicate that the given column-oriented attribute should be
+        deferred, e.g. not loaded until accessed.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        e.g.::
+
+            from sqlalchemy.orm import defer
+
+            session.query(MyClass).options(
+                defer(MyClass.attribute_one), defer(MyClass.attribute_two)
+            )
+
+        To specify a deferred load of an attribute on a related class,
+        the path can be specified one token at a time, specifying the loading
+        style for each link along the chain.  To leave the loading style
+        for a link unchanged, use :func:`_orm.defaultload`::
+
+            session.query(MyClass).options(
+                defaultload(MyClass.someattr).defer(RelatedClass.some_column)
+            )
+
+        Multiple deferral options related to a relationship can be bundled
+        at once using :meth:`_orm.Load.options`::
+
+
+            select(MyClass).options(
+                defaultload(MyClass.someattr).options(
+                    defer(RelatedClass.some_column),
+                    defer(RelatedClass.some_other_column),
+                    defer(RelatedClass.another_column),
+                )
+            )
+
+        :param key: Attribute to be deferred.
+
+        :param raiseload: raise :class:`.InvalidRequestError` rather than
+         lazy loading a value when the deferred attribute is accessed. Used
+         to prevent unwanted SQL from being emitted.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :ref:`orm_queryguide_column_deferral` - in the
+            :ref:`queryguide_toplevel`
+
+            :func:`_orm.load_only`
+
+            :func:`_orm.undefer`
+
+        """
+        strategy = {"deferred": True, "instrument": True}
+        if raiseload:
+            strategy["raiseload"] = True
+        return self._set_column_strategy((key,), strategy)
+
+    def undefer(self, key: _AttrType) -> Self:
+        r"""Indicate that the given column-oriented attribute should be
+        undeferred, e.g. specified within the SELECT statement of the entity
+        as a whole.
+
+        The column being undeferred is typically set up on the mapping as a
+        :func:`.deferred` attribute.
+
+        This function is part of the :class:`_orm.Load` interface and supports
+        both method-chained and standalone operation.
+
+        Examples::
+
+            # undefer two columns
+            session.query(MyClass).options(
+                undefer(MyClass.col1), undefer(MyClass.col2)
+            )
+
+            # undefer all columns specific to a single class using Load + *
+            session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
+
+            # undefer a column on a related object
+            select(MyClass).options(defaultload(MyClass.items).undefer(MyClass.text))
+
+        :param key: Attribute to be undeferred.
+
+        .. seealso::
+
+            :ref:`orm_queryguide_column_deferral` - in the
+            :ref:`queryguide_toplevel`
+
+            :func:`_orm.defer`
+
+            :func:`_orm.undefer_group`
+
+        """  # noqa: E501
+        return self._set_column_strategy(
+            (key,), {"deferred": False, "instrument": True}
+        )
+
+    def undefer_group(self, name: str) -> Self:
+        """Indicate that columns within the given deferred group name should be
+        undeferred.
+
+        The columns being undeferred are set up on the mapping as
+        :func:`.deferred` attributes and include a "group" name.
+
+        E.g::
+
+            session.query(MyClass).options(undefer_group("large_attrs"))
+
+        To undefer a group of attributes on a related entity, the path can be
+        spelled out using relationship loader options, such as
+        :func:`_orm.defaultload`::
+
+            select(MyClass).options(
+                defaultload("someattr").undefer_group("large_attrs")
+            )
+
+        .. seealso::
+
+            :ref:`orm_queryguide_column_deferral` - in the
+            :ref:`queryguide_toplevel`
+
+            :func:`_orm.defer`
+
+            :func:`_orm.undefer`
+
+        """
+        return self._set_column_strategy(
+            (_WILDCARD_TOKEN,), None, {f"undefer_group_{name}": True}
+        )
+
+    def with_expression(
+        self,
+        key: _AttrType,
+        expression: _ColumnExpressionArgument[Any],
+    ) -> Self:
+        r"""Apply an ad-hoc SQL expression to a "deferred expression"
+        attribute.
+
+        This option is used in conjunction with the
+        :func:`_orm.query_expression` mapper-level construct that indicates an
+        attribute which should be the target of an ad-hoc SQL expression.
+
+        E.g.::
+
+            stmt = select(SomeClass).options(
+                with_expression(SomeClass.x_y_expr, SomeClass.x + SomeClass.y)
+            )
+
+        .. versionadded:: 1.2
+
+        :param key: Attribute to be populated
+
+        :param expr: SQL expression to be applied to the attribute.
+
+        .. seealso::
+
+            :ref:`orm_queryguide_with_expression` - background and usage
+            examples
+
+        """
+
+        expression = _orm_full_deannotate(
+            coercions.expect(roles.LabeledColumnExprRole, expression)
+        )
+
+        return self._set_column_strategy(
+            (key,), {"query_expression": True}, extra_criteria=(expression,)
+        )
+
+    def selectin_polymorphic(self, classes: Iterable[Type[Any]]) -> Self:
+        """Indicate an eager load should take place for all attributes
+        specific to a subclass.
+
+        This uses an additional SELECT with IN against all matched primary
+        key values, and is the per-query analogue to the ``"selectin"``
+        setting on the :paramref:`.mapper.polymorphic_load` parameter.
+
+        .. versionadded:: 1.2
+
+        .. seealso::
+
+            :ref:`polymorphic_selectin`
+
+        """
+        self = self._set_class_strategy(
+            {"selectinload_polymorphic": True},
+            opts={
+                "entities": tuple(
+                    sorted((inspect(cls) for cls in classes), key=id)
+                )
+            },
+        )
+        return self
+
+    @overload
+    def _coerce_strat(self, strategy: _StrategySpec) -> _StrategyKey: ...
+
+    @overload
+    def _coerce_strat(self, strategy: Literal[None]) -> None: ...
+
+    def _coerce_strat(
+        self, strategy: Optional[_StrategySpec]
+    ) -> Optional[_StrategyKey]:
+        if strategy is not None:
+            strategy_key = tuple(sorted(strategy.items()))
+        else:
+            strategy_key = None
+        return strategy_key
+
+    @_generative
+    def _set_relationship_strategy(
+        self,
+        attr: _AttrType,
+        strategy: Optional[_StrategySpec],
+        propagate_to_loaders: bool = True,
+        opts: Optional[_OptsType] = None,
+        _reconcile_to_other: Optional[bool] = None,
+    ) -> Self:
+        strategy_key = self._coerce_strat(strategy)
+
+        self._clone_for_bind_strategy(
+            (attr,),
+            strategy_key,
+            _RELATIONSHIP_TOKEN,
+            opts=opts,
+            propagate_to_loaders=propagate_to_loaders,
+            reconcile_to_other=_reconcile_to_other,
+        )
+        return self
+
+    @_generative
+    def _set_column_strategy(
+        self,
+        attrs: Tuple[_AttrType, ...],
+        strategy: Optional[_StrategySpec],
+        opts: Optional[_OptsType] = None,
+        extra_criteria: Optional[Tuple[Any, ...]] = None,
+    ) -> Self:
+        strategy_key = self._coerce_strat(strategy)
+
+        self._clone_for_bind_strategy(
+            attrs,
+            strategy_key,
+            _COLUMN_TOKEN,
+            opts=opts,
+            attr_group=attrs,
+            extra_criteria=extra_criteria,
+        )
+        return self
+
+    @_generative
+    def _set_generic_strategy(
+        self,
+        attrs: Tuple[_AttrType, ...],
+        strategy: _StrategySpec,
+        _reconcile_to_other: Optional[bool] = None,
+    ) -> Self:
+        strategy_key = self._coerce_strat(strategy)
+        self._clone_for_bind_strategy(
+            attrs,
+            strategy_key,
+            None,
+            propagate_to_loaders=True,
+            reconcile_to_other=_reconcile_to_other,
+        )
+        return self
+
+    @_generative
+    def _set_class_strategy(
+        self, strategy: _StrategySpec, opts: _OptsType
+    ) -> Self:
+        strategy_key = self._coerce_strat(strategy)
+
+        self._clone_for_bind_strategy(None, strategy_key, None, opts=opts)
+        return self
+
+    def _apply_to_parent(self, parent: Load) -> None:
+        """apply this :class:`_orm._AbstractLoad` object as a sub-option o
+        a :class:`_orm.Load` object.
+
+        Implementation is provided by subclasses.
+
+        """
+        raise NotImplementedError()
+
+    def options(self, *opts: _AbstractLoad) -> Self:
+        r"""Apply a series of options as sub-options to this
+        :class:`_orm._AbstractLoad` object.
+
+        Implementation is provided by subclasses.
+
+        """
+        raise NotImplementedError()
+
+    def _clone_for_bind_strategy(
+        self,
+        attrs: Optional[Tuple[_AttrType, ...]],
+        strategy: Optional[_StrategyKey],
+        wildcard_key: Optional[_WildcardKeyType],
+        opts: Optional[_OptsType] = None,
+        attr_group: Optional[_AttrGroupType] = None,
+        propagate_to_loaders: bool = True,
+        reconcile_to_other: Optional[bool] = None,
+        extra_criteria: Optional[Tuple[Any, ...]] = None,
+    ) -> Self:
+        raise NotImplementedError()
+
+    def process_compile_state_replaced_entities(
+        self,
+        compile_state: ORMCompileState,
+        mapper_entities: Sequence[_MapperEntity],
+    ) -> None:
+        if not compile_state.compile_options._enable_eagerloads:
+            return
+
+        # process is being run here so that the options given are validated
+        # against what the lead entities were, as well as to accommodate
+        # for the entities having been replaced with equivalents
+        self._process(
+            compile_state,
+            mapper_entities,
+            not bool(compile_state.current_path),
+        )
+
+    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+        if not compile_state.compile_options._enable_eagerloads:
+            return
+
+        self._process(
+            compile_state,
+            compile_state._lead_mapper_entities,
+            not bool(compile_state.current_path)
+            and not compile_state.compile_options._for_refresh_state,
+        )
+
+    def _process(
+        self,
+        compile_state: ORMCompileState,
+        mapper_entities: Sequence[_MapperEntity],
+        raiseerr: bool,
+    ) -> None:
+        """implemented by subclasses"""
+        raise NotImplementedError()
+
+    @classmethod
+    def _chop_path(
+        cls,
+        to_chop: _PathRepresentation,
+        path: PathRegistry,
+        debug: bool = False,
+    ) -> Optional[_PathRepresentation]:
+        i = -1
+
+        for i, (c_token, p_token) in enumerate(
+            zip(to_chop, path.natural_path)
+        ):
+            if isinstance(c_token, str):
+                if i == 0 and (
+                    c_token.endswith(f":{_DEFAULT_TOKEN}")
+                    or c_token.endswith(f":{_WILDCARD_TOKEN}")
+                ):
+                    return to_chop
+                elif (
+                    c_token != f"{_RELATIONSHIP_TOKEN}:{_WILDCARD_TOKEN}"
+                    and c_token != p_token.key  # type: ignore
+                ):
+                    return None
+
+            if c_token is p_token:
+                continue
+            elif (
+                isinstance(c_token, InspectionAttr)
+                and insp_is_mapper(c_token)
+                and insp_is_mapper(p_token)
+                and c_token.isa(p_token)
+            ):
+                continue
+
+            else:
+                return None
+        return to_chop[i + 1 :]
+
+
+class Load(_AbstractLoad):
+    """Represents loader options which modify the state of a
+    ORM-enabled :class:`_sql.Select` or a legacy :class:`_query.Query` in
+    order to affect how various mapped attributes are loaded.
+
+    The :class:`_orm.Load` object is in most cases used implicitly behind the
+    scenes when one makes use of a query option like :func:`_orm.joinedload`,
+    :func:`_orm.defer`, or similar.   It typically is not instantiated directly
+    except for in some very specific cases.
+
+    .. seealso::
+
+        :ref:`orm_queryguide_relationship_per_entity_wildcard` - illustrates an
+        example where direct use of :class:`_orm.Load` may be useful
+
+    """
+
+    __slots__ = (
+        "path",
+        "context",
+        "additional_source_entities",
+    )
+
+    _traverse_internals = [
+        ("path", visitors.ExtendedInternalTraversal.dp_has_cache_key),
+        (
+            "context",
+            visitors.InternalTraversal.dp_has_cache_key_list,
+        ),
+        ("propagate_to_loaders", visitors.InternalTraversal.dp_boolean),
+        (
+            "additional_source_entities",
+            visitors.InternalTraversal.dp_has_cache_key_list,
+        ),
+    ]
+    _cache_key_traversal = None
+
+    path: PathRegistry
+    context: Tuple[_LoadElement, ...]
+    additional_source_entities: Tuple[_InternalEntityType[Any], ...]
+
+    def __init__(self, entity: _EntityType[Any]):
+        insp = cast("Union[Mapper[Any], AliasedInsp[Any]]", inspect(entity))
+        insp._post_inspect
+
+        self.path = insp._path_registry
+        self.context = ()
+        self.propagate_to_loaders = False
+        self.additional_source_entities = ()
+
+    def __str__(self) -> str:
+        return f"Load({self.path[0]})"
+
+    @classmethod
+    def _construct_for_existing_path(
+        cls, path: AbstractEntityRegistry
+    ) -> Load:
+        load = cls.__new__(cls)
+        load.path = path
+        load.context = ()
+        load.propagate_to_loaders = False
+        load.additional_source_entities = ()
+        return load
+
+    def _adapt_cached_option_to_uncached_option(
+        self, context: QueryContext, uncached_opt: ORMOption
+    ) -> ORMOption:
+        if uncached_opt is self:
+            return self
+        return self._adjust_for_extra_criteria(context)
+
+    def _prepend_path(self, path: PathRegistry) -> Load:
+        cloned = self._clone()
+        cloned.context = tuple(
+            element._prepend_path(path) for element in self.context
+        )
+        return cloned
+
+    def _adjust_for_extra_criteria(self, context: QueryContext) -> Load:
+        """Apply the current bound parameters in a QueryContext to all
+        occurrences "extra_criteria" stored within this ``Load`` object,
+        returning a new instance of this ``Load`` object.
+
+        """
+
+        # avoid generating cache keys for the queries if we don't
+        # actually have any extra_criteria options, which is the
+        # common case
+        for value in self.context:
+            if value._extra_criteria:
+                break
+        else:
+            return self
+
+        replacement_cache_key = context.user_passed_query._generate_cache_key()
+
+        if replacement_cache_key is None:
+            return self
+
+        orig_query = context.compile_state.select_statement
+        orig_cache_key = orig_query._generate_cache_key()
+        assert orig_cache_key is not None
+
+        def process(
+            opt: _LoadElement,
+            replacement_cache_key: CacheKey,
+            orig_cache_key: CacheKey,
+        ) -> _LoadElement:
+            cloned_opt = opt._clone()
+
+            cloned_opt._extra_criteria = tuple(
+                replacement_cache_key._apply_params_to_element(
+                    orig_cache_key, crit
+                )
+                for crit in cloned_opt._extra_criteria
+            )
+
+            return cloned_opt
+
+        cloned = self._clone()
+        cloned.context = tuple(
+            (
+                process(value, replacement_cache_key, orig_cache_key)
+                if value._extra_criteria
+                else value
+            )
+            for value in self.context
+        )
+        return cloned
+
+    def _reconcile_query_entities_with_us(self, mapper_entities, raiseerr):
+        """called at process time to allow adjustment of the root
+        entity inside of _LoadElement objects.
+
+        """
+        path = self.path
+
+        ezero = None
+        for ent in mapper_entities:
+            ezero = ent.entity_zero
+            if ezero and orm_util._entity_corresponds_to(
+                # technically this can be a token also, but this is
+                # safe to pass to _entity_corresponds_to()
+                ezero,
+                cast("_InternalEntityType[Any]", path[0]),
+            ):
+                return ezero
+
+        return None
+
+    def _process(
+        self,
+        compile_state: ORMCompileState,
+        mapper_entities: Sequence[_MapperEntity],
+        raiseerr: bool,
+    ) -> None:
+        reconciled_lead_entity = self._reconcile_query_entities_with_us(
+            mapper_entities, raiseerr
+        )
+
+        # if the context has a current path, this is a lazy load
+        has_current_path = bool(compile_state.compile_options._current_path)
+
+        for loader in self.context:
+            # issue #11292
+            # historically, propagate_to_loaders was only considered at
+            # object loading time, whether or not to carry along options
+            # onto an object's loaded state where it would be used by lazyload.
+            # however, the defaultload() option needs to propagate in case
+            # its sub-options propagate_to_loaders, but its sub-options
+            # that dont propagate should not be applied for lazy loaders.
+            # so we check again
+            if has_current_path and not loader.propagate_to_loaders:
+                continue
+            loader.process_compile_state(
+                self,
+                compile_state,
+                mapper_entities,
+                reconciled_lead_entity,
+                raiseerr,
+            )
+
+    def _apply_to_parent(self, parent: Load) -> None:
+        """apply this :class:`_orm.Load` object as a sub-option of another
+        :class:`_orm.Load` object.
+
+        This method is used by the :meth:`_orm.Load.options` method.
+
+        """
+        cloned = self._generate()
+
+        assert cloned.propagate_to_loaders == self.propagate_to_loaders
+
+        if not any(
+            orm_util._entity_corresponds_to_use_path_impl(
+                elem, cloned.path.odd_element(0)
+            )
+            for elem in (parent.path.odd_element(-1),)
+            + parent.additional_source_entities
+        ):
+            if len(cloned.path) > 1:
+                attrname = cloned.path[1]
+                parent_entity = cloned.path[0]
+            else:
+                attrname = cloned.path[0]
+                parent_entity = cloned.path[0]
+            _raise_for_does_not_link(parent.path, attrname, parent_entity)
+
+        cloned.path = PathRegistry.coerce(parent.path[0:-1] + cloned.path[:])
+
+        if self.context:
+            cloned.context = tuple(
+                value._prepend_path_from(parent) for value in self.context
+            )
+
+        if cloned.context:
+            parent.context += cloned.context
+            parent.additional_source_entities += (
+                cloned.additional_source_entities
+            )
+
+    @_generative
+    def options(self, *opts: _AbstractLoad) -> Self:
+        r"""Apply a series of options as sub-options to this
+        :class:`_orm.Load`
+        object.
+
+        E.g.::
+
+            query = session.query(Author)
+            query = query.options(
+                joinedload(Author.book).options(
+                    load_only(Book.summary, Book.excerpt),
+                    joinedload(Book.citations).options(joinedload(Citation.author)),
+                )
+            )
+
+        :param \*opts: A series of loader option objects (ultimately
+         :class:`_orm.Load` objects) which should be applied to the path
+         specified by this :class:`_orm.Load` object.
+
+        .. versionadded:: 1.3.6
+
+        .. seealso::
+
+            :func:`.defaultload`
+
+            :ref:`orm_queryguide_relationship_sub_options`
+
+        """
+        for opt in opts:
+            try:
+                opt._apply_to_parent(self)
+            except AttributeError as ae:
+                if not isinstance(opt, _AbstractLoad):
+                    raise sa_exc.ArgumentError(
+                        f"Loader option {opt} is not compatible with the "
+                        "Load.options() method."
+                    ) from ae
+                else:
+                    raise
+        return self
+
+    def _clone_for_bind_strategy(
+        self,
+        attrs: Optional[Tuple[_AttrType, ...]],
+        strategy: Optional[_StrategyKey],
+        wildcard_key: Optional[_WildcardKeyType],
+        opts: Optional[_OptsType] = None,
+        attr_group: Optional[_AttrGroupType] = None,
+        propagate_to_loaders: bool = True,
+        reconcile_to_other: Optional[bool] = None,
+        extra_criteria: Optional[Tuple[Any, ...]] = None,
+    ) -> Self:
+        # for individual strategy that needs to propagate, set the whole
+        # Load container to also propagate, so that it shows up in
+        # InstanceState.load_options
+        if propagate_to_loaders:
+            self.propagate_to_loaders = True
+
+        if self.path.is_token:
+            raise sa_exc.ArgumentError(
+                "Wildcard token cannot be followed by another entity"
+            )
+
+        elif path_is_property(self.path):
+            # re-use the lookup which will raise a nicely formatted
+            # LoaderStrategyException
+            if strategy:
+                self.path.prop._strategy_lookup(self.path.prop, strategy[0])
+            else:
+                raise sa_exc.ArgumentError(
+                    f"Mapped attribute '{self.path.prop}' does not "
+                    "refer to a mapped entity"
+                )
+
+        if attrs is None:
+            load_element = _ClassStrategyLoad.create(
+                self.path,
+                None,
+                strategy,
+                wildcard_key,
+                opts,
+                propagate_to_loaders,
+                attr_group=attr_group,
+                reconcile_to_other=reconcile_to_other,
+                extra_criteria=extra_criteria,
+            )
+            if load_element:
+                self.context += (load_element,)
+                assert opts is not None
+                self.additional_source_entities += cast(
+                    "Tuple[_InternalEntityType[Any]]", opts["entities"]
+                )
+
+        else:
+            for attr in attrs:
+                if isinstance(attr, str):
+                    load_element = _TokenStrategyLoad.create(
+                        self.path,
+                        attr,
+                        strategy,
+                        wildcard_key,
+                        opts,
+                        propagate_to_loaders,
+                        attr_group=attr_group,
+                        reconcile_to_other=reconcile_to_other,
+                        extra_criteria=extra_criteria,
+                    )
+                else:
+                    load_element = _AttributeStrategyLoad.create(
+                        self.path,
+                        attr,
+                        strategy,
+                        wildcard_key,
+                        opts,
+                        propagate_to_loaders,
+                        attr_group=attr_group,
+                        reconcile_to_other=reconcile_to_other,
+                        extra_criteria=extra_criteria,
+                    )
+
+                if load_element:
+                    # for relationship options, update self.path on this Load
+                    # object with the latest path.
+                    if wildcard_key is _RELATIONSHIP_TOKEN:
+                        self.path = load_element.path
+                    self.context += (load_element,)
+
+                    # this seems to be effective for selectinloader,
+                    # giving the extra match to one more level deep.
+                    # but does not work for immediateloader, which still
+                    # must add additional options at load time
+                    if load_element.local_opts.get("recursion_depth", False):
+                        r1 = load_element._recurse()
+                        self.context += (r1,)
+
+        return self
+
+    def __getstate__(self):
+        d = self._shallow_to_dict()
+        d["path"] = self.path.serialize()
+        return d
+
+    def __setstate__(self, state):
+        state["path"] = PathRegistry.deserialize(state["path"])
+        self._shallow_from_dict(state)
+
+
+class _WildcardLoad(_AbstractLoad):
+    """represent a standalone '*' load operation"""
+
+    __slots__ = ("strategy", "path", "local_opts")
+
+    _traverse_internals = [
+        ("strategy", visitors.ExtendedInternalTraversal.dp_plain_obj),
+        ("path", visitors.ExtendedInternalTraversal.dp_plain_obj),
+        (
+            "local_opts",
+            visitors.ExtendedInternalTraversal.dp_string_multi_dict,
+        ),
+    ]
+    cache_key_traversal: _CacheKeyTraversalType = None
+
+    strategy: Optional[Tuple[Any, ...]]
+    local_opts: _OptsType
+    path: Union[Tuple[()], Tuple[str]]
+    propagate_to_loaders = False
+
+    def __init__(self) -> None:
+        self.path = ()
+        self.strategy = None
+        self.local_opts = util.EMPTY_DICT
+
+    def _clone_for_bind_strategy(
+        self,
+        attrs,
+        strategy,
+        wildcard_key,
+        opts=None,
+        attr_group=None,
+        propagate_to_loaders=True,
+        reconcile_to_other=None,
+        extra_criteria=None,
+    ):
+        assert attrs is not None
+        attr = attrs[0]
+        assert (
+            wildcard_key
+            and isinstance(attr, str)
+            and attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN)
+        )
+
+        attr = f"{wildcard_key}:{attr}"
+
+        self.strategy = strategy
+        self.path = (attr,)
+        if opts:
+            self.local_opts = util.immutabledict(opts)
+
+        assert extra_criteria is None
+
+    def options(self, *opts: _AbstractLoad) -> Self:
+        raise NotImplementedError("Star option does not support sub-options")
+
+    def _apply_to_parent(self, parent: Load) -> None:
+        """apply this :class:`_orm._WildcardLoad` object as a sub-option of
+        a :class:`_orm.Load` object.
+
+        This method is used by the :meth:`_orm.Load.options` method.   Note
+        that :class:`_orm.WildcardLoad` itself can't have sub-options, but
+        it may be used as the sub-option of a :class:`_orm.Load` object.
+
+        """
+        assert self.path
+        attr = self.path[0]
+        if attr.endswith(_DEFAULT_TOKEN):
+            attr = f"{attr.split(':')[0]}:{_WILDCARD_TOKEN}"
+
+        effective_path = cast(AbstractEntityRegistry, parent.path).token(attr)
+
+        assert effective_path.is_token
+
+        loader = _TokenStrategyLoad.create(
+            effective_path,
+            None,
+            self.strategy,
+            None,
+            self.local_opts,
+            self.propagate_to_loaders,
+        )
+
+        parent.context += (loader,)
+
+    def _process(self, compile_state, mapper_entities, raiseerr):
+        is_refresh = compile_state.compile_options._for_refresh_state
+
+        if is_refresh and not self.propagate_to_loaders:
+            return
+
+        entities = [ent.entity_zero for ent in mapper_entities]
+        current_path = compile_state.current_path
+
+        start_path: _PathRepresentation = self.path
+
+        if current_path:
+            # TODO: no cases in test suite where we actually get
+            # None back here
+            new_path = self._chop_path(start_path, current_path)
+            if new_path is None:
+                return
+
+            # chop_path does not actually "chop" a wildcard token path,
+            # just returns it
+            assert new_path == start_path
+
+        # start_path is a single-token tuple
+        assert start_path and len(start_path) == 1
+
+        token = start_path[0]
+        assert isinstance(token, str)
+        entity = self._find_entity_basestring(entities, token, raiseerr)
+
+        if not entity:
+            return
+
+        path_element = entity
+
+        # transfer our entity-less state into a Load() object
+        # with a real entity path.  Start with the lead entity
+        # we just located, then go through the rest of our path
+        # tokens and populate into the Load().
+
+        assert isinstance(token, str)
+        loader = _TokenStrategyLoad.create(
+            path_element._path_registry,
+            token,
+            self.strategy,
+            None,
+            self.local_opts,
+            self.propagate_to_loaders,
+            raiseerr=raiseerr,
+        )
+        if not loader:
+            return
+
+        assert loader.path.is_token
+
+        # don't pass a reconciled lead entity here
+        loader.process_compile_state(
+            self, compile_state, mapper_entities, None, raiseerr
+        )
+
+        return loader
+
+    def _find_entity_basestring(
+        self,
+        entities: Iterable[_InternalEntityType[Any]],
+        token: str,
+        raiseerr: bool,
+    ) -> Optional[_InternalEntityType[Any]]:
+        if token.endswith(f":{_WILDCARD_TOKEN}"):
+            if len(list(entities)) != 1:
+                if raiseerr:
+                    raise sa_exc.ArgumentError(
+                        "Can't apply wildcard ('*') or load_only() "
+                        f"loader option to multiple entities "
+                        f"{', '.join(str(ent) for ent in entities)}. Specify "
+                        "loader options for each entity individually, such as "
+                        f"""{
+                            ", ".join(
+                                f"Load({ent}).some_option('*')"
+                                for ent in entities
+                            )
+                        }."""
+                    )
+        elif token.endswith(_DEFAULT_TOKEN):
+            raiseerr = False
+
+        for ent in entities:
+            # return only the first _MapperEntity when searching
+            # based on string prop name.   Ideally object
+            # attributes are used to specify more exactly.
+            return ent
+        else:
+            if raiseerr:
+                raise sa_exc.ArgumentError(
+                    "Query has only expression-based entities - "
+                    f'can\'t find property named "{token}".'
+                )
+            else:
+                return None
+
+    def __getstate__(self) -> Dict[str, Any]:
+        d = self._shallow_to_dict()
+        return d
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        self._shallow_from_dict(state)
+
+
+class _LoadElement(
+    cache_key.HasCacheKey, traversals.HasShallowCopy, visitors.Traversible
+):
+    """represents strategy information to select for a LoaderStrategy
+    and pass options to it.
+
+    :class:`._LoadElement` objects provide the inner datastructure
+    stored by a :class:`_orm.Load` object and are also the object passed
+    to methods like :meth:`.LoaderStrategy.setup_query`.
+
+    .. versionadded:: 2.0
+
+    """
+
+    __slots__ = (
+        "path",
+        "strategy",
+        "propagate_to_loaders",
+        "local_opts",
+        "_extra_criteria",
+        "_reconcile_to_other",
+    )
+    __visit_name__ = "load_element"
+
+    _traverse_internals = [
+        ("path", visitors.ExtendedInternalTraversal.dp_has_cache_key),
+        ("strategy", visitors.ExtendedInternalTraversal.dp_plain_obj),
+        (
+            "local_opts",
+            visitors.ExtendedInternalTraversal.dp_string_multi_dict,
+        ),
+        ("_extra_criteria", visitors.InternalTraversal.dp_clauseelement_list),
+        ("propagate_to_loaders", visitors.InternalTraversal.dp_plain_obj),
+        ("_reconcile_to_other", visitors.InternalTraversal.dp_plain_obj),
+    ]
+    _cache_key_traversal = None
+
+    _extra_criteria: Tuple[Any, ...]
+
+    _reconcile_to_other: Optional[bool]
+    strategy: Optional[_StrategyKey]
+    path: PathRegistry
+    propagate_to_loaders: bool
+
+    local_opts: util.immutabledict[str, Any]
+
+    is_token_strategy: bool
+    is_class_strategy: bool
+
+    def __hash__(self) -> int:
+        return id(self)
+
+    def __eq__(self, other):
+        return traversals.compare(self, other)
+
+    @property
+    def is_opts_only(self) -> bool:
+        return bool(self.local_opts and self.strategy is None)
+
+    def _clone(self, **kw: Any) -> _LoadElement:
+        cls = self.__class__
+        s = cls.__new__(cls)
+
+        self._shallow_copy_to(s)
+        return s
+
+    def _update_opts(self, **kw: Any) -> _LoadElement:
+        new = self._clone()
+        new.local_opts = new.local_opts.union(kw)
+        return new
+
+    def __getstate__(self) -> Dict[str, Any]:
+        d = self._shallow_to_dict()
+        d["path"] = self.path.serialize()
+        return d
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        state["path"] = PathRegistry.deserialize(state["path"])
+        self._shallow_from_dict(state)
+
+    def _raise_for_no_match(self, parent_loader, mapper_entities):
+        path = parent_loader.path
+
+        found_entities = False
+        for ent in mapper_entities:
+            ezero = ent.entity_zero
+            if ezero:
+                found_entities = True
+                break
+
+        if not found_entities:
+            raise sa_exc.ArgumentError(
+                "Query has only expression-based entities; "
+                f"attribute loader options for {path[0]} can't "
+                "be applied here."
+            )
+        else:
+            raise sa_exc.ArgumentError(
+                f"Mapped class {path[0]} does not apply to any of the "
+                f"root entities in this query, e.g. "
+                f"""{
+                    ", ".join(
+                        str(x.entity_zero)
+                        for x in mapper_entities if x.entity_zero
+                    )}. Please """
+                "specify the full path "
+                "from one of the root entities to the target "
+                "attribute. "
+            )
+
+    def _adjust_effective_path_for_current_path(
+        self, effective_path: PathRegistry, current_path: PathRegistry
+    ) -> Optional[PathRegistry]:
+        """receives the 'current_path' entry from an :class:`.ORMCompileState`
+        instance, which is set during lazy loads and secondary loader strategy
+        loads, and adjusts the given path to be relative to the
+        current_path.
+
+        E.g. given a loader path and current path:
+
+        .. sourcecode:: text
+
+            lp: User -> orders -> Order -> items -> Item -> keywords -> Keyword
+
+            cp: User -> orders -> Order -> items
+
+        The adjusted path would be:
+
+        .. sourcecode:: text
+
+            Item -> keywords -> Keyword
+
+
+        """
+        chopped_start_path = Load._chop_path(
+            effective_path.natural_path, current_path
+        )
+        if not chopped_start_path:
+            return None
+
+        tokens_removed_from_start_path = len(effective_path) - len(
+            chopped_start_path
+        )
+
+        loader_lead_path_element = self.path[tokens_removed_from_start_path]
+
+        effective_path = PathRegistry.coerce(
+            (loader_lead_path_element,) + chopped_start_path[1:]
+        )
+
+        return effective_path
+
+    def _init_path(
+        self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria
+    ):
+        """Apply ORM attributes and/or wildcard to an existing path, producing
+        a new path.
+
+        This method is used within the :meth:`.create` method to initialize
+        a :class:`._LoadElement` object.
+
+        """
+        raise NotImplementedError()
+
+    def _prepare_for_compile_state(
+        self,
+        parent_loader,
+        compile_state,
+        mapper_entities,
+        reconciled_lead_entity,
+        raiseerr,
+    ):
+        """implemented by subclasses."""
+        raise NotImplementedError()
+
+    def process_compile_state(
+        self,
+        parent_loader,
+        compile_state,
+        mapper_entities,
+        reconciled_lead_entity,
+        raiseerr,
+    ):
+        """populate ORMCompileState.attributes with loader state for this
+        _LoadElement.
+
+        """
+        keys = self._prepare_for_compile_state(
+            parent_loader,
+            compile_state,
+            mapper_entities,
+            reconciled_lead_entity,
+            raiseerr,
+        )
+        for key in keys:
+            if key in compile_state.attributes:
+                compile_state.attributes[key] = _LoadElement._reconcile(
+                    self, compile_state.attributes[key]
+                )
+            else:
+                compile_state.attributes[key] = self
+
+    @classmethod
+    def create(
+        cls,
+        path: PathRegistry,
+        attr: Union[_AttrType, _StrPathToken, None],
+        strategy: Optional[_StrategyKey],
+        wildcard_key: Optional[_WildcardKeyType],
+        local_opts: Optional[_OptsType],
+        propagate_to_loaders: bool,
+        raiseerr: bool = True,
+        attr_group: Optional[_AttrGroupType] = None,
+        reconcile_to_other: Optional[bool] = None,
+        extra_criteria: Optional[Tuple[Any, ...]] = None,
+    ) -> _LoadElement:
+        """Create a new :class:`._LoadElement` object."""
+
+        opt = cls.__new__(cls)
+        opt.path = path
+        opt.strategy = strategy
+        opt.propagate_to_loaders = propagate_to_loaders
+        opt.local_opts = (
+            util.immutabledict(local_opts) if local_opts else util.EMPTY_DICT
+        )
+        opt._extra_criteria = ()
+
+        if reconcile_to_other is not None:
+            opt._reconcile_to_other = reconcile_to_other
+        elif strategy is None and not local_opts:
+            opt._reconcile_to_other = True
+        else:
+            opt._reconcile_to_other = None
+
+        path = opt._init_path(
+            path, attr, wildcard_key, attr_group, raiseerr, extra_criteria
+        )
+
+        if not path:
+            return None  # type: ignore
+
+        assert opt.is_token_strategy == path.is_token
+
+        opt.path = path
+        return opt
+
+    def __init__(self) -> None:
+        raise NotImplementedError()
+
+    def _recurse(self) -> _LoadElement:
+        cloned = self._clone()
+        cloned.path = PathRegistry.coerce(self.path[:] + self.path[-2:])
+
+        return cloned
+
+    def _prepend_path_from(self, parent: Load) -> _LoadElement:
+        """adjust the path of this :class:`._LoadElement` to be
+        a subpath of that of the given parent :class:`_orm.Load` object's
+        path.
+
+        This is used by the :meth:`_orm.Load._apply_to_parent` method,
+        which is in turn part of the :meth:`_orm.Load.options` method.
+
+        """
+
+        if not any(
+            orm_util._entity_corresponds_to_use_path_impl(
+                elem,
+                self.path.odd_element(0),
+            )
+            for elem in (parent.path.odd_element(-1),)
+            + parent.additional_source_entities
+        ):
+            raise sa_exc.ArgumentError(
+                f'Attribute "{self.path[1]}" does not link '
+                f'from element "{parent.path[-1]}".'
+            )
+
+        return self._prepend_path(parent.path)
+
+    def _prepend_path(self, path: PathRegistry) -> _LoadElement:
+        cloned = self._clone()
+
+        assert cloned.strategy == self.strategy
+        assert cloned.local_opts == self.local_opts
+        assert cloned.is_class_strategy == self.is_class_strategy
+
+        cloned.path = PathRegistry.coerce(path[0:-1] + cloned.path[:])
+
+        return cloned
+
+    @staticmethod
+    def _reconcile(
+        replacement: _LoadElement, existing: _LoadElement
+    ) -> _LoadElement:
+        """define behavior for when two Load objects are to be put into
+        the context.attributes under the same key.
+
+        :param replacement: ``_LoadElement`` that seeks to replace the
+         existing one
+
+        :param existing: ``_LoadElement`` that is already present.
+
+        """
+        # mapper inheritance loading requires fine-grained "block other
+        # options" / "allow these options to be overridden" behaviors
+        # see test_poly_loading.py
+
+        if replacement._reconcile_to_other:
+            return existing
+        elif replacement._reconcile_to_other is False:
+            return replacement
+        elif existing._reconcile_to_other:
+            return replacement
+        elif existing._reconcile_to_other is False:
+            return existing
+
+        if existing is replacement:
+            return replacement
+        elif (
+            existing.strategy == replacement.strategy
+            and existing.local_opts == replacement.local_opts
+        ):
+            return replacement
+        elif replacement.is_opts_only:
+            existing = existing._clone()
+            existing.local_opts = existing.local_opts.union(
+                replacement.local_opts
+            )
+            existing._extra_criteria += replacement._extra_criteria
+            return existing
+        elif existing.is_opts_only:
+            replacement = replacement._clone()
+            replacement.local_opts = replacement.local_opts.union(
+                existing.local_opts
+            )
+            replacement._extra_criteria += existing._extra_criteria
+            return replacement
+        elif replacement.path.is_token:
+            # use 'last one wins' logic for wildcard options.  this is also
+            # kind of inconsistent vs. options that are specific paths which
+            # will raise as below
+            return replacement
+
+        raise sa_exc.InvalidRequestError(
+            f"Loader strategies for {replacement.path} conflict"
+        )
+
+
+class _AttributeStrategyLoad(_LoadElement):
+    """Loader strategies against specific relationship or column paths.
+
+    e.g.::
+
+        joinedload(User.addresses)
+        defer(Order.name)
+        selectinload(User.orders).lazyload(Order.items)
+
+    """
+
+    __slots__ = ("_of_type", "_path_with_polymorphic_path")
+
+    __visit_name__ = "attribute_strategy_load_element"
+
+    _traverse_internals = _LoadElement._traverse_internals + [
+        ("_of_type", visitors.ExtendedInternalTraversal.dp_multi),
+        (
+            "_path_with_polymorphic_path",
+            visitors.ExtendedInternalTraversal.dp_has_cache_key,
+        ),
+    ]
+
+    _of_type: Union[Mapper[Any], AliasedInsp[Any], None]
+    _path_with_polymorphic_path: Optional[PathRegistry]
+
+    is_class_strategy = False
+    is_token_strategy = False
+
+    def _init_path(
+        self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria
+    ):
+        assert attr is not None
+        self._of_type = None
+        self._path_with_polymorphic_path = None
+        insp, _, prop = _parse_attr_argument(attr)
+
+        if insp.is_property:
+            # direct property can be sent from internal strategy logic
+            # that sets up specific loaders, such as
+            # emit_lazyload->_lazyload_reverse
+            # prop = found_property = attr
+            prop = attr
+            path = path[prop]
+
+            if path.has_entity:
+                path = path.entity_path
+            return path
+
+        elif not insp.is_attribute:
+            # should not reach here;
+            assert False
+
+        # here we assume we have user-passed InstrumentedAttribute
+        if not orm_util._entity_corresponds_to_use_path_impl(
+            path[-1], attr.parent
+        ):
+            if raiseerr:
+                if attr_group and attr is not attr_group[0]:
+                    raise sa_exc.ArgumentError(
+                        "Can't apply wildcard ('*') or load_only() "
+                        "loader option to multiple entities in the "
+                        "same option. Use separate options per entity."
+                    )
+                else:
+                    _raise_for_does_not_link(path, str(attr), attr.parent)
+            else:
+                return None
+
+        # note the essential logic of this attribute was very different in
+        # 1.4, where there were caching failures in e.g.
+        # test_relationship_criteria.py::RelationshipCriteriaTest::
+        # test_selectinload_nested_criteria[True] if an existing
+        # "_extra_criteria" on a Load object were replaced with that coming
+        # from an attribute.   This appears to have been an artifact of how
+        # _UnboundLoad / Load interacted together, which was opaque and
+        # poorly defined.
+        if extra_criteria:
+            assert not attr._extra_criteria
+            self._extra_criteria = extra_criteria
+        else:
+            self._extra_criteria = attr._extra_criteria
+
+        if getattr(attr, "_of_type", None):
+            ac = attr._of_type
+            ext_info = inspect(ac)
+            self._of_type = ext_info
+
+            self._path_with_polymorphic_path = path.entity_path[prop]
+
+            path = path[prop][ext_info]
+
+        else:
+            path = path[prop]
+
+        if path.has_entity:
+            path = path.entity_path
+
+        return path
+
+    def _generate_extra_criteria(self, context):
+        """Apply the current bound parameters in a QueryContext to the
+        immediate "extra_criteria" stored with this Load object.
+
+        Load objects are typically pulled from the cached version of
+        the statement from a QueryContext.  The statement currently being
+        executed will have new values (and keys) for bound parameters in the
+        extra criteria which need to be applied by loader strategies when
+        they handle this criteria for a result set.
+
+        """
+
+        assert (
+            self._extra_criteria
+        ), "this should only be called if _extra_criteria is present"
+
+        orig_query = context.compile_state.select_statement
+        current_query = context.query
+
+        # NOTE: while it seems like we should not do the "apply" operation
+        # here if orig_query is current_query, skipping it in the "optimized"
+        # case causes the query to be different from a cache key perspective,
+        # because we are creating a copy of the criteria which is no longer
+        # the same identity of the _extra_criteria in the loader option
+        # itself.  cache key logic produces a different key for
+        # (A, copy_of_A) vs. (A, A), because in the latter case it shortens
+        # the second part of the key to just indicate on identity.
+
+        # if orig_query is current_query:
+        # not cached yet.   just do the and_()
+        #    return and_(*self._extra_criteria)
+
+        k1 = orig_query._generate_cache_key()
+        k2 = current_query._generate_cache_key()
+
+        return k2._apply_params_to_element(k1, and_(*self._extra_criteria))
+
+    def _set_of_type_info(self, context, current_path):
+        assert self._path_with_polymorphic_path
+
+        pwpi = self._of_type
+        assert pwpi
+        if not pwpi.is_aliased_class:
+            pwpi = inspect(
+                orm_util.AliasedInsp._with_polymorphic_factory(
+                    pwpi.mapper.base_mapper,
+                    (pwpi.mapper,),
+                    aliased=True,
+                    _use_mapper_path=True,
+                )
+            )
+        start_path = self._path_with_polymorphic_path
+        if current_path:
+            new_path = self._adjust_effective_path_for_current_path(
+                start_path, current_path
+            )
+            if new_path is None:
+                return
+            start_path = new_path
+
+        key = ("path_with_polymorphic", start_path.natural_path)
+        if key in context:
+            existing_aliased_insp = context[key]
+            this_aliased_insp = pwpi
+            new_aliased_insp = existing_aliased_insp._merge_with(
+                this_aliased_insp
+            )
+            context[key] = new_aliased_insp
+        else:
+            context[key] = pwpi
+
+    def _prepare_for_compile_state(
+        self,
+        parent_loader,
+        compile_state,
+        mapper_entities,
+        reconciled_lead_entity,
+        raiseerr,
+    ):
+        # _AttributeStrategyLoad
+
+        current_path = compile_state.current_path
+        is_refresh = compile_state.compile_options._for_refresh_state
+        assert not self.path.is_token
+
+        if is_refresh and not self.propagate_to_loaders:
+            return []
+
+        if self._of_type:
+            # apply additional with_polymorphic alias that may have been
+            # generated.  this has to happen even if this is a defaultload
+            self._set_of_type_info(compile_state.attributes, current_path)
+
+        # omit setting loader attributes for a "defaultload" type of option
+        if not self.strategy and not self.local_opts:
+            return []
+
+        if raiseerr and not reconciled_lead_entity:
+            self._raise_for_no_match(parent_loader, mapper_entities)
+
+        if self.path.has_entity:
+            effective_path = self.path.parent
+        else:
+            effective_path = self.path
+
+        if current_path:
+            assert effective_path is not None
+            effective_path = self._adjust_effective_path_for_current_path(
+                effective_path, current_path
+            )
+            if effective_path is None:
+                return []
+
+        return [("loader", cast(PathRegistry, effective_path).natural_path)]
+
+    def __getstate__(self):
+        d = super().__getstate__()
+
+        # can't pickle this.  See
+        # test_pickled.py -> test_lazyload_extra_criteria_not_supported
+        # where we should be emitting a warning for the usual case where this
+        # would be non-None
+        d["_extra_criteria"] = ()
+
+        if self._path_with_polymorphic_path:
+            d["_path_with_polymorphic_path"] = (
+                self._path_with_polymorphic_path.serialize()
+            )
+
+        if self._of_type:
+            if self._of_type.is_aliased_class:
+                d["_of_type"] = None
+            elif self._of_type.is_mapper:
+                d["_of_type"] = self._of_type.class_
+            else:
+                assert False, "unexpected object for _of_type"
+
+        return d
+
+    def __setstate__(self, state):
+        super().__setstate__(state)
+
+        if state.get("_path_with_polymorphic_path", None):
+            self._path_with_polymorphic_path = PathRegistry.deserialize(
+                state["_path_with_polymorphic_path"]
+            )
+        else:
+            self._path_with_polymorphic_path = None
+
+        if state.get("_of_type", None):
+            self._of_type = inspect(state["_of_type"])
+        else:
+            self._of_type = None
+
+
+class _TokenStrategyLoad(_LoadElement):
+    """Loader strategies against wildcard attributes
+
+    e.g.::
+
+        raiseload("*")
+        Load(User).lazyload("*")
+        defer("*")
+        load_only(User.name, User.email)  # will create a defer('*')
+        joinedload(User.addresses).raiseload("*")
+
+    """
+
+    __visit_name__ = "token_strategy_load_element"
+
+    inherit_cache = True
+    is_class_strategy = False
+    is_token_strategy = True
+
+    def _init_path(
+        self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria
+    ):
+        # assert isinstance(attr, str) or attr is None
+        if attr is not None:
+            default_token = attr.endswith(_DEFAULT_TOKEN)
+            if attr.endswith(_WILDCARD_TOKEN) or default_token:
+                if wildcard_key:
+                    attr = f"{wildcard_key}:{attr}"
+
+                path = path.token(attr)
+                return path
+            else:
+                raise sa_exc.ArgumentError(
+                    "Strings are not accepted for attribute names in loader "
+                    "options; please use class-bound attributes directly."
+                )
+        return path
+
+    def _prepare_for_compile_state(
+        self,
+        parent_loader,
+        compile_state,
+        mapper_entities,
+        reconciled_lead_entity,
+        raiseerr,
+    ):
+        # _TokenStrategyLoad
+
+        current_path = compile_state.current_path
+        is_refresh = compile_state.compile_options._for_refresh_state
+
+        assert self.path.is_token
+
+        if is_refresh and not self.propagate_to_loaders:
+            return []
+
+        # omit setting attributes for a "defaultload" type of option
+        if not self.strategy and not self.local_opts:
+            return []
+
+        effective_path = self.path
+        if reconciled_lead_entity:
+            effective_path = PathRegistry.coerce(
+                (reconciled_lead_entity,) + effective_path.path[1:]
+            )
+
+        if current_path:
+            new_effective_path = self._adjust_effective_path_for_current_path(
+                effective_path, current_path
+            )
+            if new_effective_path is None:
+                return []
+            effective_path = new_effective_path
+
+        # for a wildcard token, expand out the path we set
+        # to encompass everything from the query entity on
+        # forward.  not clear if this is necessary when current_path
+        # is set.
+
+        return [
+            ("loader", natural_path)
+            for natural_path in (
+                cast(
+                    TokenRegistry, effective_path
+                )._generate_natural_for_superclasses()
+            )
+        ]
+
+
+class _ClassStrategyLoad(_LoadElement):
+    """Loader strategies that deals with a class as a target, not
+    an attribute path
+
+    e.g.::
+
+        q = s.query(Person).options(
+            selectin_polymorphic(Person, [Engineer, Manager])
+        )
+
+    """
+
+    inherit_cache = True
+    is_class_strategy = True
+    is_token_strategy = False
+
+    __visit_name__ = "class_strategy_load_element"
+
+    def _init_path(
+        self, path, attr, wildcard_key, attr_group, raiseerr, extra_criteria
+    ):
+        return path
+
+    def _prepare_for_compile_state(
+        self,
+        parent_loader,
+        compile_state,
+        mapper_entities,
+        reconciled_lead_entity,
+        raiseerr,
+    ):
+        # _ClassStrategyLoad
+
+        current_path = compile_state.current_path
+        is_refresh = compile_state.compile_options._for_refresh_state
+
+        if is_refresh and not self.propagate_to_loaders:
+            return []
+
+        # omit setting attributes for a "defaultload" type of option
+        if not self.strategy and not self.local_opts:
+            return []
+
+        effective_path = self.path
+
+        if current_path:
+            new_effective_path = self._adjust_effective_path_for_current_path(
+                effective_path, current_path
+            )
+            if new_effective_path is None:
+                return []
+            effective_path = new_effective_path
+
+        return [("loader", effective_path.natural_path)]
+
+
+def _generate_from_keys(
+    meth: Callable[..., _AbstractLoad],
+    keys: Tuple[_AttrType, ...],
+    chained: bool,
+    kw: Any,
+) -> _AbstractLoad:
+    lead_element: Optional[_AbstractLoad] = None
+
+    attr: Any
+    for is_default, _keys in (True, keys[0:-1]), (False, keys[-1:]):
+        for attr in _keys:
+            if isinstance(attr, str):
+                if attr.startswith("." + _WILDCARD_TOKEN):
+                    util.warn_deprecated(
+                        "The undocumented `.{WILDCARD}` format is "
+                        "deprecated "
+                        "and will be removed in a future version as "
+                        "it is "
+                        "believed to be unused. "
+                        "If you have been using this functionality, "
+                        "please "
+                        "comment on Issue #4390 on the SQLAlchemy project "
+                        "tracker.",
+                        version="1.4",
+                    )
+                    attr = attr[1:]
+
+                if attr == _WILDCARD_TOKEN:
+                    if is_default:
+                        raise sa_exc.ArgumentError(
+                            "Wildcard token cannot be followed by "
+                            "another entity",
+                        )
+
+                    if lead_element is None:
+                        lead_element = _WildcardLoad()
+
+                    lead_element = meth(lead_element, _DEFAULT_TOKEN, **kw)
+
+                else:
+                    raise sa_exc.ArgumentError(
+                        "Strings are not accepted for attribute names in "
+                        "loader options; please use class-bound "
+                        "attributes directly.",
+                    )
+            else:
+                if lead_element is None:
+                    _, lead_entity, _ = _parse_attr_argument(attr)
+                    lead_element = Load(lead_entity)
+
+                if is_default:
+                    if not chained:
+                        lead_element = lead_element.defaultload(attr)
+                    else:
+                        lead_element = meth(
+                            lead_element, attr, _is_chain=True, **kw
+                        )
+                else:
+                    lead_element = meth(lead_element, attr, **kw)
+
+    assert lead_element
+    return lead_element
+
+
+def _parse_attr_argument(
+    attr: _AttrType,
+) -> Tuple[InspectionAttr, _InternalEntityType[Any], MapperProperty[Any]]:
+    """parse an attribute or wildcard argument to produce an
+    :class:`._AbstractLoad` instance.
+
+    This is used by the standalone loader strategy functions like
+    ``joinedload()``, ``defer()``, etc. to produce :class:`_orm.Load` or
+    :class:`._WildcardLoad` objects.
+
+    """
+    try:
+        # TODO: need to figure out this None thing being returned by
+        # inspect(), it should not have None as an option in most cases
+        # if at all
+        insp: InspectionAttr = inspect(attr)  # type: ignore
+    except sa_exc.NoInspectionAvailable as err:
+        raise sa_exc.ArgumentError(
+            "expected ORM mapped attribute for loader strategy argument"
+        ) from err
+
+    lead_entity: _InternalEntityType[Any]
+
+    if insp_is_mapper_property(insp):
+        lead_entity = insp.parent
+        prop = insp
+    elif insp_is_attribute(insp):
+        lead_entity = insp.parent
+        prop = insp.prop
+    else:
+        raise sa_exc.ArgumentError(
+            "expected ORM mapped attribute for loader strategy argument"
+        )
+
+    return insp, lead_entity, prop
+
+
+def loader_unbound_fn(fn: _FN) -> _FN:
+    """decorator that applies docstrings between standalone loader functions
+    and the loader methods on :class:`._AbstractLoad`.
+
+    """
+    bound_fn = getattr(_AbstractLoad, fn.__name__)
+    fn_doc = bound_fn.__doc__
+    bound_fn.__doc__ = f"""Produce a new :class:`_orm.Load` object with the
+:func:`_orm.{fn.__name__}` option applied.
+
+See :func:`_orm.{fn.__name__}` for usage examples.
+
+"""
+
+    fn.__doc__ = fn_doc
+    return fn
+
+
+# standalone functions follow.  docstrings are filled in
+# by the ``@loader_unbound_fn`` decorator.
+
+
+@loader_unbound_fn
+def contains_eager(*keys: _AttrType, **kw: Any) -> _AbstractLoad:
+    return _generate_from_keys(Load.contains_eager, keys, True, kw)
+
+
+@loader_unbound_fn
+def load_only(*attrs: _AttrType, raiseload: bool = False) -> _AbstractLoad:
+    # TODO: attrs against different classes.  we likely have to
+    # add some extra state to Load of some kind
+    _, lead_element, _ = _parse_attr_argument(attrs[0])
+    return Load(lead_element).load_only(*attrs, raiseload=raiseload)
+
+
+@loader_unbound_fn
+def joinedload(*keys: _AttrType, **kw: Any) -> _AbstractLoad:
+    return _generate_from_keys(Load.joinedload, keys, False, kw)
+
+
+@loader_unbound_fn
+def subqueryload(*keys: _AttrType) -> _AbstractLoad:
+    return _generate_from_keys(Load.subqueryload, keys, False, {})
+
+
+@loader_unbound_fn
+def selectinload(
+    *keys: _AttrType, recursion_depth: Optional[int] = None
+) -> _AbstractLoad:
+    return _generate_from_keys(
+        Load.selectinload, keys, False, {"recursion_depth": recursion_depth}
+    )
+
+
+@loader_unbound_fn
+def lazyload(*keys: _AttrType) -> _AbstractLoad:
+    return _generate_from_keys(Load.lazyload, keys, False, {})
+
+
+@loader_unbound_fn
+def immediateload(
+    *keys: _AttrType, recursion_depth: Optional[int] = None
+) -> _AbstractLoad:
+    return _generate_from_keys(
+        Load.immediateload, keys, False, {"recursion_depth": recursion_depth}
+    )
+
+
+@loader_unbound_fn
+def noload(*keys: _AttrType) -> _AbstractLoad:
+    return _generate_from_keys(Load.noload, keys, False, {})
+
+
+@loader_unbound_fn
+def raiseload(*keys: _AttrType, **kw: Any) -> _AbstractLoad:
+    return _generate_from_keys(Load.raiseload, keys, False, kw)
+
+
+@loader_unbound_fn
+def defaultload(*keys: _AttrType) -> _AbstractLoad:
+    return _generate_from_keys(Load.defaultload, keys, False, {})
+
+
+@loader_unbound_fn
+def defer(
+    key: _AttrType, *addl_attrs: _AttrType, raiseload: bool = False
+) -> _AbstractLoad:
+    if addl_attrs:
+        util.warn_deprecated(
+            "The *addl_attrs on orm.defer is deprecated.  Please use "
+            "method chaining in conjunction with defaultload() to "
+            "indicate a path.",
+            version="1.3",
+        )
+
+    if raiseload:
+        kw = {"raiseload": raiseload}
+    else:
+        kw = {}
+
+    return _generate_from_keys(Load.defer, (key,) + addl_attrs, False, kw)
+
+
+@loader_unbound_fn
+def undefer(key: _AttrType, *addl_attrs: _AttrType) -> _AbstractLoad:
+    if addl_attrs:
+        util.warn_deprecated(
+            "The *addl_attrs on orm.undefer is deprecated.  Please use "
+            "method chaining in conjunction with defaultload() to "
+            "indicate a path.",
+            version="1.3",
+        )
+    return _generate_from_keys(Load.undefer, (key,) + addl_attrs, False, {})
+
+
+@loader_unbound_fn
+def undefer_group(name: str) -> _AbstractLoad:
+    element = _WildcardLoad()
+    return element.undefer_group(name)
+
+
+@loader_unbound_fn
+def with_expression(
+    key: _AttrType, expression: _ColumnExpressionArgument[Any]
+) -> _AbstractLoad:
+    return _generate_from_keys(
+        Load.with_expression, (key,), False, {"expression": expression}
+    )
+
+
+@loader_unbound_fn
+def selectin_polymorphic(
+    base_cls: _EntityType[Any], classes: Iterable[Type[Any]]
+) -> _AbstractLoad:
+    ul = Load(base_cls)
+    return ul.selectin_polymorphic(classes)
+
+
+def _raise_for_does_not_link(path, attrname, parent_entity):
+    if len(path) > 1:
+        path_is_of_type = path[-1].entity is not path[-2].mapper.class_
+        if insp_is_aliased_class(parent_entity):
+            parent_entity_str = str(parent_entity)
+        else:
+            parent_entity_str = parent_entity.class_.__name__
+
+        raise sa_exc.ArgumentError(
+            f'ORM mapped entity or attribute "{attrname}" does not '
+            f'link from relationship "{path[-2]}%s".%s'
+            % (
+                f".of_type({path[-1]})" if path_is_of_type else "",
+                (
+                    "  Did you mean to use "
+                    f'"{path[-2]}'
+                    f'.of_type({parent_entity_str})" or "loadopt.options('
+                    f"selectin_polymorphic({path[-2].mapper.class_.__name__}, "
+                    f'[{parent_entity_str}]), ...)" ?'
+                    if not path_is_of_type
+                    and not path[-1].is_aliased_class
+                    and orm_util._entity_corresponds_to(
+                        path.entity, inspect(parent_entity).mapper
+                    )
+                    else ""
+                ),
+            )
+        )
+    else:
+        raise sa_exc.ArgumentError(
+            f'ORM mapped attribute "{attrname}" does not '
+            f'link mapped class "{path[-1]}"'
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py
new file mode 100644
index 00000000..8f85a41a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/sync.py
@@ -0,0 +1,164 @@
+# orm/sync.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+
+"""private module containing functions used for copying data
+between instances based on join conditions.
+
+"""
+
+from __future__ import annotations
+
+from . import exc
+from . import util as orm_util
+from .base import PassiveFlag
+
+
+def populate(
+    source,
+    source_mapper,
+    dest,
+    dest_mapper,
+    synchronize_pairs,
+    uowcommit,
+    flag_cascaded_pks,
+):
+    source_dict = source.dict
+    dest_dict = dest.dict
+
+    for l, r in synchronize_pairs:
+        try:
+            # inline of source_mapper._get_state_attr_by_column
+            prop = source_mapper._columntoproperty[l]
+            value = source.manager[prop.key].impl.get(
+                source, source_dict, PassiveFlag.PASSIVE_OFF
+            )
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(False, source_mapper, l, dest_mapper, r, err)
+
+        try:
+            # inline of dest_mapper._set_state_attr_by_column
+            prop = dest_mapper._columntoproperty[r]
+            dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(True, source_mapper, l, dest_mapper, r, err)
+
+        # technically the "r.primary_key" check isn't
+        # needed here, but we check for this condition to limit
+        # how often this logic is invoked for memory/performance
+        # reasons, since we only need this info for a primary key
+        # destination.
+        if (
+            flag_cascaded_pks
+            and l.primary_key
+            and r.primary_key
+            and r.references(l)
+        ):
+            uowcommit.attributes[("pk_cascaded", dest, r)] = True
+
+
+def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs):
+    # a simplified version of populate() used by bulk insert mode
+    for l, r in synchronize_pairs:
+        try:
+            prop = source_mapper._columntoproperty[l]
+            value = source_dict[prop.key]
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(False, source_mapper, l, source_mapper, r, err)
+
+        try:
+            prop = source_mapper._columntoproperty[r]
+            source_dict[prop.key] = value
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(True, source_mapper, l, source_mapper, r, err)
+
+
+def clear(dest, dest_mapper, synchronize_pairs):
+    for l, r in synchronize_pairs:
+        if (
+            r.primary_key
+            and dest_mapper._get_state_attr_by_column(dest, dest.dict, r)
+            not in orm_util._none_set
+        ):
+            raise AssertionError(
+                f"Dependency rule on column '{l}' "
+                "tried to blank-out primary key "
+                f"column '{r}' on instance '{orm_util.state_str(dest)}'"
+            )
+        try:
+            dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(True, None, l, dest_mapper, r, err)
+
+
+def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
+    for l, r in synchronize_pairs:
+        try:
+            oldvalue = source_mapper._get_committed_attr_by_column(
+                source.obj(), l
+            )
+            value = source_mapper._get_state_attr_by_column(
+                source, source.dict, l, passive=PassiveFlag.PASSIVE_OFF
+            )
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(False, source_mapper, l, None, r, err)
+        dest[r.key] = value
+        dest[old_prefix + r.key] = oldvalue
+
+
+def populate_dict(source, source_mapper, dict_, synchronize_pairs):
+    for l, r in synchronize_pairs:
+        try:
+            value = source_mapper._get_state_attr_by_column(
+                source, source.dict, l, passive=PassiveFlag.PASSIVE_OFF
+            )
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(False, source_mapper, l, None, r, err)
+
+        dict_[r.key] = value
+
+
+def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
+    """return true if the source object has changes from an old to a
+    new value on the given synchronize pairs
+
+    """
+    for l, r in synchronize_pairs:
+        try:
+            prop = source_mapper._columntoproperty[l]
+        except exc.UnmappedColumnError as err:
+            _raise_col_to_prop(False, source_mapper, l, None, r, err)
+        history = uowcommit.get_attribute_history(
+            source, prop.key, PassiveFlag.PASSIVE_NO_INITIALIZE
+        )
+        if bool(history.deleted):
+            return True
+    else:
+        return False
+
+
+def _raise_col_to_prop(
+    isdest, source_mapper, source_column, dest_mapper, dest_column, err
+):
+    if isdest:
+        raise exc.UnmappedColumnError(
+            "Can't execute sync rule for "
+            "destination column '%s'; mapper '%s' does not map "
+            "this column.  Try using an explicit `foreign_keys` "
+            "collection which does not include this column (or use "
+            "a viewonly=True relation)." % (dest_column, dest_mapper)
+        ) from err
+    else:
+        raise exc.UnmappedColumnError(
+            "Can't execute sync rule for "
+            "source column '%s'; mapper '%s' does not map this "
+            "column.  Try using an explicit `foreign_keys` "
+            "collection which does not include destination column "
+            "'%s' (or use a viewonly=True relation)."
+            % (source_column, source_mapper, dest_column)
+        ) from err
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py
new file mode 100644
index 00000000..80897f29
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py
@@ -0,0 +1,796 @@
+# orm/unitofwork.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""The internals for the unit of work system.
+
+The session's flush() process passes objects to a contextual object
+here, which assembles flush tasks based on mappers and their properties,
+organizes them in order of dependency, and executes.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import Set
+from typing import TYPE_CHECKING
+
+from . import attributes
+from . import exc as orm_exc
+from . import util as orm_util
+from .. import event
+from .. import util
+from ..util import topological
+
+
+if TYPE_CHECKING:
+    from .dependency import DependencyProcessor
+    from .interfaces import MapperProperty
+    from .mapper import Mapper
+    from .session import Session
+    from .session import SessionTransaction
+    from .state import InstanceState
+
+
+def track_cascade_events(descriptor, prop):
+    """Establish event listeners on object attributes which handle
+    cascade-on-set/append.
+
+    """
+    key = prop.key
+
+    def append(state, item, initiator, **kw):
+        # process "save_update" cascade rules for when
+        # an instance is appended to the list of another instance
+
+        if item is None:
+            return
+
+        sess = state.session
+        if sess:
+            if sess._warn_on_events:
+                sess._flush_warning("collection append")
+
+            prop = state.manager.mapper._props[key]
+            item_state = attributes.instance_state(item)
+
+            if (
+                prop._cascade.save_update
+                and (key == initiator.key)
+                and not sess._contains_state(item_state)
+            ):
+                sess._save_or_update_state(item_state)
+        return item
+
+    def remove(state, item, initiator, **kw):
+        if item is None:
+            return
+
+        sess = state.session
+
+        prop = state.manager.mapper._props[key]
+
+        if sess and sess._warn_on_events:
+            sess._flush_warning(
+                "collection remove"
+                if prop.uselist
+                else "related attribute delete"
+            )
+
+        if (
+            item is not None
+            and item is not attributes.NEVER_SET
+            and item is not attributes.PASSIVE_NO_RESULT
+            and prop._cascade.delete_orphan
+        ):
+            # expunge pending orphans
+            item_state = attributes.instance_state(item)
+
+            if prop.mapper._is_orphan(item_state):
+                if sess and item_state in sess._new:
+                    sess.expunge(item)
+                else:
+                    # the related item may or may not itself be in a
+                    # Session, however the parent for which we are catching
+                    # the event is not in a session, so memoize this on the
+                    # item
+                    item_state._orphaned_outside_of_session = True
+
+    def set_(state, newvalue, oldvalue, initiator, **kw):
+        # process "save_update" cascade rules for when an instance
+        # is attached to another instance
+        if oldvalue is newvalue:
+            return newvalue
+
+        sess = state.session
+        if sess:
+            if sess._warn_on_events:
+                sess._flush_warning("related attribute set")
+
+            prop = state.manager.mapper._props[key]
+            if newvalue is not None:
+                newvalue_state = attributes.instance_state(newvalue)
+                if (
+                    prop._cascade.save_update
+                    and (key == initiator.key)
+                    and not sess._contains_state(newvalue_state)
+                ):
+                    sess._save_or_update_state(newvalue_state)
+
+            if (
+                oldvalue is not None
+                and oldvalue is not attributes.NEVER_SET
+                and oldvalue is not attributes.PASSIVE_NO_RESULT
+                and prop._cascade.delete_orphan
+            ):
+                # possible to reach here with attributes.NEVER_SET ?
+                oldvalue_state = attributes.instance_state(oldvalue)
+
+                if oldvalue_state in sess._new and prop.mapper._is_orphan(
+                    oldvalue_state
+                ):
+                    sess.expunge(oldvalue)
+        return newvalue
+
+    event.listen(
+        descriptor, "append_wo_mutation", append, raw=True, include_key=True
+    )
+    event.listen(
+        descriptor, "append", append, raw=True, retval=True, include_key=True
+    )
+    event.listen(
+        descriptor, "remove", remove, raw=True, retval=True, include_key=True
+    )
+    event.listen(
+        descriptor, "set", set_, raw=True, retval=True, include_key=True
+    )
+
+
+class UOWTransaction:
+    session: Session
+    transaction: SessionTransaction
+    attributes: Dict[str, Any]
+    deps: util.defaultdict[Mapper[Any], Set[DependencyProcessor]]
+    mappers: util.defaultdict[Mapper[Any], Set[InstanceState[Any]]]
+
+    def __init__(self, session: Session):
+        self.session = session
+
+        # dictionary used by external actors to
+        # store arbitrary state information.
+        self.attributes = {}
+
+        # dictionary of mappers to sets of
+        # DependencyProcessors, which are also
+        # set to be part of the sorted flush actions,
+        # which have that mapper as a parent.
+        self.deps = util.defaultdict(set)
+
+        # dictionary of mappers to sets of InstanceState
+        # items pending for flush which have that mapper
+        # as a parent.
+        self.mappers = util.defaultdict(set)
+
+        # a dictionary of Preprocess objects, which gather
+        # additional states impacted by the flush
+        # and determine if a flush action is needed
+        self.presort_actions = {}
+
+        # dictionary of PostSortRec objects, each
+        # one issues work during the flush within
+        # a certain ordering.
+        self.postsort_actions = {}
+
+        # a set of 2-tuples, each containing two
+        # PostSortRec objects where the second
+        # is dependent on the first being executed
+        # first
+        self.dependencies = set()
+
+        # dictionary of InstanceState-> (isdelete, listonly)
+        # tuples, indicating if this state is to be deleted
+        # or insert/updated, or just refreshed
+        self.states = {}
+
+        # tracks InstanceStates which will be receiving
+        # a "post update" call.  Keys are mappers,
+        # values are a set of states and a set of the
+        # columns which should be included in the update.
+        self.post_update_states = util.defaultdict(lambda: (set(), set()))
+
+    @property
+    def has_work(self):
+        return bool(self.states)
+
+    def was_already_deleted(self, state):
+        """Return ``True`` if the given state is expired and was deleted
+        previously.
+        """
+        if state.expired:
+            try:
+                state._load_expired(state, attributes.PASSIVE_OFF)
+            except orm_exc.ObjectDeletedError:
+                self.session._remove_newly_deleted([state])
+                return True
+        return False
+
+    def is_deleted(self, state):
+        """Return ``True`` if the given state is marked as deleted
+        within this uowtransaction."""
+
+        return state in self.states and self.states[state][0]
+
+    def memo(self, key, callable_):
+        if key in self.attributes:
+            return self.attributes[key]
+        else:
+            self.attributes[key] = ret = callable_()
+            return ret
+
+    def remove_state_actions(self, state):
+        """Remove pending actions for a state from the uowtransaction."""
+
+        isdelete = self.states[state][0]
+
+        self.states[state] = (isdelete, True)
+
+    def get_attribute_history(
+        self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE
+    ):
+        """Facade to attributes.get_state_history(), including
+        caching of results."""
+
+        hashkey = ("history", state, key)
+
+        # cache the objects, not the states; the strong reference here
+        # prevents newly loaded objects from being dereferenced during the
+        # flush process
+
+        if hashkey in self.attributes:
+            history, state_history, cached_passive = self.attributes[hashkey]
+            # if the cached lookup was "passive" and now
+            # we want non-passive, do a non-passive lookup and re-cache
+
+            if (
+                not cached_passive & attributes.SQL_OK
+                and passive & attributes.SQL_OK
+            ):
+                impl = state.manager[key].impl
+                history = impl.get_history(
+                    state,
+                    state.dict,
+                    attributes.PASSIVE_OFF
+                    | attributes.LOAD_AGAINST_COMMITTED
+                    | attributes.NO_RAISE,
+                )
+                if history and impl.uses_objects:
+                    state_history = history.as_state()
+                else:
+                    state_history = history
+                self.attributes[hashkey] = (history, state_history, passive)
+        else:
+            impl = state.manager[key].impl
+            # TODO: store the history as (state, object) tuples
+            # so we don't have to keep converting here
+            history = impl.get_history(
+                state,
+                state.dict,
+                passive
+                | attributes.LOAD_AGAINST_COMMITTED
+                | attributes.NO_RAISE,
+            )
+            if history and impl.uses_objects:
+                state_history = history.as_state()
+            else:
+                state_history = history
+            self.attributes[hashkey] = (history, state_history, passive)
+
+        return state_history
+
+    def has_dep(self, processor):
+        return (processor, True) in self.presort_actions
+
+    def register_preprocessor(self, processor, fromparent):
+        key = (processor, fromparent)
+        if key not in self.presort_actions:
+            self.presort_actions[key] = Preprocess(processor, fromparent)
+
+    def register_object(
+        self,
+        state: InstanceState[Any],
+        isdelete: bool = False,
+        listonly: bool = False,
+        cancel_delete: bool = False,
+        operation: Optional[str] = None,
+        prop: Optional[MapperProperty] = None,
+    ) -> bool:
+        if not self.session._contains_state(state):
+            # this condition is normal when objects are registered
+            # as part of a relationship cascade operation.  it should
+            # not occur for the top-level register from Session.flush().
+            if not state.deleted and operation is not None:
+                util.warn(
+                    "Object of type %s not in session, %s operation "
+                    "along '%s' will not proceed"
+                    % (orm_util.state_class_str(state), operation, prop)
+                )
+            return False
+
+        if state not in self.states:
+            mapper = state.manager.mapper
+
+            if mapper not in self.mappers:
+                self._per_mapper_flush_actions(mapper)
+
+            self.mappers[mapper].add(state)
+            self.states[state] = (isdelete, listonly)
+        else:
+            if not listonly and (isdelete or cancel_delete):
+                self.states[state] = (isdelete, False)
+        return True
+
+    def register_post_update(self, state, post_update_cols):
+        mapper = state.manager.mapper.base_mapper
+        states, cols = self.post_update_states[mapper]
+        states.add(state)
+        cols.update(post_update_cols)
+
+    def _per_mapper_flush_actions(self, mapper):
+        saves = SaveUpdateAll(self, mapper.base_mapper)
+        deletes = DeleteAll(self, mapper.base_mapper)
+        self.dependencies.add((saves, deletes))
+
+        for dep in mapper._dependency_processors:
+            dep.per_property_preprocessors(self)
+
+        for prop in mapper.relationships:
+            if prop.viewonly:
+                continue
+            dep = prop._dependency_processor
+            dep.per_property_preprocessors(self)
+
+    @util.memoized_property
+    def _mapper_for_dep(self):
+        """return a dynamic mapping of (Mapper, DependencyProcessor) to
+        True or False, indicating if the DependencyProcessor operates
+        on objects of that Mapper.
+
+        The result is stored in the dictionary persistently once
+        calculated.
+
+        """
+        return util.PopulateDict(
+            lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
+        )
+
+    def filter_states_for_dep(self, dep, states):
+        """Filter the given list of InstanceStates to those relevant to the
+        given DependencyProcessor.
+
+        """
+        mapper_for_dep = self._mapper_for_dep
+        return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
+
+    def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
+        checktup = (isdelete, listonly)
+        for mapper in mapper.base_mapper.self_and_descendants:
+            for state in self.mappers[mapper]:
+                if self.states[state] == checktup:
+                    yield state
+
+    def _generate_actions(self):
+        """Generate the full, unsorted collection of PostSortRecs as
+        well as dependency pairs for this UOWTransaction.
+
+        """
+        # execute presort_actions, until all states
+        # have been processed.   a presort_action might
+        # add new states to the uow.
+        while True:
+            ret = False
+            for action in list(self.presort_actions.values()):
+                if action.execute(self):
+                    ret = True
+            if not ret:
+                break
+
+        # see if the graph of mapper dependencies has cycles.
+        self.cycles = cycles = topological.find_cycles(
+            self.dependencies, list(self.postsort_actions.values())
+        )
+
+        if cycles:
+            # if yes, break the per-mapper actions into
+            # per-state actions
+            convert = {
+                rec: set(rec.per_state_flush_actions(self)) for rec in cycles
+            }
+
+            # rewrite the existing dependencies to point to
+            # the per-state actions for those per-mapper actions
+            # that were broken up.
+            for edge in list(self.dependencies):
+                if (
+                    None in edge
+                    or edge[0].disabled
+                    or edge[1].disabled
+                    or cycles.issuperset(edge)
+                ):
+                    self.dependencies.remove(edge)
+                elif edge[0] in cycles:
+                    self.dependencies.remove(edge)
+                    for dep in convert[edge[0]]:
+                        self.dependencies.add((dep, edge[1]))
+                elif edge[1] in cycles:
+                    self.dependencies.remove(edge)
+                    for dep in convert[edge[1]]:
+                        self.dependencies.add((edge[0], dep))
+
+        return {
+            a for a in self.postsort_actions.values() if not a.disabled
+        }.difference(cycles)
+
+    def execute(self) -> None:
+        postsort_actions = self._generate_actions()
+
+        postsort_actions = sorted(
+            postsort_actions,
+            key=lambda item: item.sort_key,
+        )
+        # sort = topological.sort(self.dependencies, postsort_actions)
+        # print "--------------"
+        # print "\ndependencies:", self.dependencies
+        # print "\ncycles:", self.cycles
+        # print "\nsort:", list(sort)
+        # print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
+
+        # execute
+        if self.cycles:
+            for subset in topological.sort_as_subsets(
+                self.dependencies, postsort_actions
+            ):
+                set_ = set(subset)
+                while set_:
+                    n = set_.pop()
+                    n.execute_aggregate(self, set_)
+        else:
+            for rec in topological.sort(self.dependencies, postsort_actions):
+                rec.execute(self)
+
+    def finalize_flush_changes(self) -> None:
+        """Mark processed objects as clean / deleted after a successful
+        flush().
+
+        This method is called within the flush() method after the
+        execute() method has succeeded and the transaction has been committed.
+
+        """
+        if not self.states:
+            return
+
+        states = set(self.states)
+        isdel = {
+            s for (s, (isdelete, listonly)) in self.states.items() if isdelete
+        }
+        other = states.difference(isdel)
+        if isdel:
+            self.session._remove_newly_deleted(isdel)
+        if other:
+            self.session._register_persistent(other)
+
+
+class IterateMappersMixin:
+    __slots__ = ()
+
+    def _mappers(self, uow):
+        if self.fromparent:
+            return iter(
+                m
+                for m in self.dependency_processor.parent.self_and_descendants
+                if uow._mapper_for_dep[(m, self.dependency_processor)]
+            )
+        else:
+            return self.dependency_processor.mapper.self_and_descendants
+
+
+class Preprocess(IterateMappersMixin):
+    __slots__ = (
+        "dependency_processor",
+        "fromparent",
+        "processed",
+        "setup_flush_actions",
+    )
+
+    def __init__(self, dependency_processor, fromparent):
+        self.dependency_processor = dependency_processor
+        self.fromparent = fromparent
+        self.processed = set()
+        self.setup_flush_actions = False
+
+    def execute(self, uow):
+        delete_states = set()
+        save_states = set()
+
+        for mapper in self._mappers(uow):
+            for state in uow.mappers[mapper].difference(self.processed):
+                (isdelete, listonly) = uow.states[state]
+                if not listonly:
+                    if isdelete:
+                        delete_states.add(state)
+                    else:
+                        save_states.add(state)
+
+        if delete_states:
+            self.dependency_processor.presort_deletes(uow, delete_states)
+            self.processed.update(delete_states)
+        if save_states:
+            self.dependency_processor.presort_saves(uow, save_states)
+            self.processed.update(save_states)
+
+        if delete_states or save_states:
+            if not self.setup_flush_actions and (
+                self.dependency_processor.prop_has_changes(
+                    uow, delete_states, True
+                )
+                or self.dependency_processor.prop_has_changes(
+                    uow, save_states, False
+                )
+            ):
+                self.dependency_processor.per_property_flush_actions(uow)
+                self.setup_flush_actions = True
+            return True
+        else:
+            return False
+
+
+class PostSortRec:
+    __slots__ = ("disabled",)
+
+    def __new__(cls, uow, *args):
+        key = (cls,) + args
+        if key in uow.postsort_actions:
+            return uow.postsort_actions[key]
+        else:
+            uow.postsort_actions[key] = ret = object.__new__(cls)
+            ret.disabled = False
+            return ret
+
+    def execute_aggregate(self, uow, recs):
+        self.execute(uow)
+
+
+class ProcessAll(IterateMappersMixin, PostSortRec):
+    __slots__ = "dependency_processor", "isdelete", "fromparent", "sort_key"
+
+    def __init__(self, uow, dependency_processor, isdelete, fromparent):
+        self.dependency_processor = dependency_processor
+        self.sort_key = (
+            "ProcessAll",
+            self.dependency_processor.sort_key,
+            isdelete,
+        )
+        self.isdelete = isdelete
+        self.fromparent = fromparent
+        uow.deps[dependency_processor.parent.base_mapper].add(
+            dependency_processor
+        )
+
+    def execute(self, uow):
+        states = self._elements(uow)
+        if self.isdelete:
+            self.dependency_processor.process_deletes(uow, states)
+        else:
+            self.dependency_processor.process_saves(uow, states)
+
+    def per_state_flush_actions(self, uow):
+        # this is handled by SaveUpdateAll and DeleteAll,
+        # since a ProcessAll should unconditionally be pulled
+        # into per-state if either the parent/child mappers
+        # are part of a cycle
+        return iter([])
+
+    def __repr__(self):
+        return "%s(%s, isdelete=%s)" % (
+            self.__class__.__name__,
+            self.dependency_processor,
+            self.isdelete,
+        )
+
+    def _elements(self, uow):
+        for mapper in self._mappers(uow):
+            for state in uow.mappers[mapper]:
+                (isdelete, listonly) = uow.states[state]
+                if isdelete == self.isdelete and not listonly:
+                    yield state
+
+
+class PostUpdateAll(PostSortRec):
+    __slots__ = "mapper", "isdelete", "sort_key"
+
+    def __init__(self, uow, mapper, isdelete):
+        self.mapper = mapper
+        self.isdelete = isdelete
+        self.sort_key = ("PostUpdateAll", mapper._sort_key, isdelete)
+
+    @util.preload_module("sqlalchemy.orm.persistence")
+    def execute(self, uow):
+        persistence = util.preloaded.orm_persistence
+        states, cols = uow.post_update_states[self.mapper]
+        states = [s for s in states if uow.states[s][0] == self.isdelete]
+
+        persistence.post_update(self.mapper, states, uow, cols)
+
+
+class SaveUpdateAll(PostSortRec):
+    __slots__ = ("mapper", "sort_key")
+
+    def __init__(self, uow, mapper):
+        self.mapper = mapper
+        self.sort_key = ("SaveUpdateAll", mapper._sort_key)
+        assert mapper is mapper.base_mapper
+
+    @util.preload_module("sqlalchemy.orm.persistence")
+    def execute(self, uow):
+        util.preloaded.orm_persistence.save_obj(
+            self.mapper,
+            uow.states_for_mapper_hierarchy(self.mapper, False, False),
+            uow,
+        )
+
+    def per_state_flush_actions(self, uow):
+        states = list(
+            uow.states_for_mapper_hierarchy(self.mapper, False, False)
+        )
+        base_mapper = self.mapper.base_mapper
+        delete_all = DeleteAll(uow, base_mapper)
+        for state in states:
+            # keep saves before deletes -
+            # this ensures 'row switch' operations work
+            action = SaveUpdateState(uow, state)
+            uow.dependencies.add((action, delete_all))
+            yield action
+
+        for dep in uow.deps[self.mapper]:
+            states_for_prop = uow.filter_states_for_dep(dep, states)
+            dep.per_state_flush_actions(uow, states_for_prop, False)
+
+    def __repr__(self):
+        return "%s(%s)" % (self.__class__.__name__, self.mapper)
+
+
+class DeleteAll(PostSortRec):
+    __slots__ = ("mapper", "sort_key")
+
+    def __init__(self, uow, mapper):
+        self.mapper = mapper
+        self.sort_key = ("DeleteAll", mapper._sort_key)
+        assert mapper is mapper.base_mapper
+
+    @util.preload_module("sqlalchemy.orm.persistence")
+    def execute(self, uow):
+        util.preloaded.orm_persistence.delete_obj(
+            self.mapper,
+            uow.states_for_mapper_hierarchy(self.mapper, True, False),
+            uow,
+        )
+
+    def per_state_flush_actions(self, uow):
+        states = list(
+            uow.states_for_mapper_hierarchy(self.mapper, True, False)
+        )
+        base_mapper = self.mapper.base_mapper
+        save_all = SaveUpdateAll(uow, base_mapper)
+        for state in states:
+            # keep saves before deletes -
+            # this ensures 'row switch' operations work
+            action = DeleteState(uow, state)
+            uow.dependencies.add((save_all, action))
+            yield action
+
+        for dep in uow.deps[self.mapper]:
+            states_for_prop = uow.filter_states_for_dep(dep, states)
+            dep.per_state_flush_actions(uow, states_for_prop, True)
+
+    def __repr__(self):
+        return "%s(%s)" % (self.__class__.__name__, self.mapper)
+
+
+class ProcessState(PostSortRec):
+    __slots__ = "dependency_processor", "isdelete", "state", "sort_key"
+
+    def __init__(self, uow, dependency_processor, isdelete, state):
+        self.dependency_processor = dependency_processor
+        self.sort_key = ("ProcessState", dependency_processor.sort_key)
+        self.isdelete = isdelete
+        self.state = state
+
+    def execute_aggregate(self, uow, recs):
+        cls_ = self.__class__
+        dependency_processor = self.dependency_processor
+        isdelete = self.isdelete
+        our_recs = [
+            r
+            for r in recs
+            if r.__class__ is cls_
+            and r.dependency_processor is dependency_processor
+            and r.isdelete is isdelete
+        ]
+        recs.difference_update(our_recs)
+        states = [self.state] + [r.state for r in our_recs]
+        if isdelete:
+            dependency_processor.process_deletes(uow, states)
+        else:
+            dependency_processor.process_saves(uow, states)
+
+    def __repr__(self):
+        return "%s(%s, %s, delete=%s)" % (
+            self.__class__.__name__,
+            self.dependency_processor,
+            orm_util.state_str(self.state),
+            self.isdelete,
+        )
+
+
+class SaveUpdateState(PostSortRec):
+    __slots__ = "state", "mapper", "sort_key"
+
+    def __init__(self, uow, state):
+        self.state = state
+        self.mapper = state.mapper.base_mapper
+        self.sort_key = ("ProcessState", self.mapper._sort_key)
+
+    @util.preload_module("sqlalchemy.orm.persistence")
+    def execute_aggregate(self, uow, recs):
+        persistence = util.preloaded.orm_persistence
+        cls_ = self.__class__
+        mapper = self.mapper
+        our_recs = [
+            r for r in recs if r.__class__ is cls_ and r.mapper is mapper
+        ]
+        recs.difference_update(our_recs)
+        persistence.save_obj(
+            mapper, [self.state] + [r.state for r in our_recs], uow
+        )
+
+    def __repr__(self):
+        return "%s(%s)" % (
+            self.__class__.__name__,
+            orm_util.state_str(self.state),
+        )
+
+
+class DeleteState(PostSortRec):
+    __slots__ = "state", "mapper", "sort_key"
+
+    def __init__(self, uow, state):
+        self.state = state
+        self.mapper = state.mapper.base_mapper
+        self.sort_key = ("DeleteState", self.mapper._sort_key)
+
+    @util.preload_module("sqlalchemy.orm.persistence")
+    def execute_aggregate(self, uow, recs):
+        persistence = util.preloaded.orm_persistence
+        cls_ = self.__class__
+        mapper = self.mapper
+        our_recs = [
+            r for r in recs if r.__class__ is cls_ and r.mapper is mapper
+        ]
+        recs.difference_update(our_recs)
+        states = [self.state] + [r.state for r in our_recs]
+        persistence.delete_obj(
+            mapper, [s for s in states if uow.states[s][0]], uow
+        )
+
+    def __repr__(self):
+        return "%s(%s)" % (
+            self.__class__.__name__,
+            orm_util.state_str(self.state),
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py
new file mode 100644
index 00000000..48282b2d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/util.py
@@ -0,0 +1,2402 @@
+# orm/util.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+from __future__ import annotations
+
+import enum
+import functools
+import re
+import types
+import typing
+from typing import AbstractSet
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Match
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import attributes  # noqa
+from . import exc
+from ._typing import _O
+from ._typing import insp_is_aliased_class
+from ._typing import insp_is_mapper
+from ._typing import prop_is_relationship
+from .base import _class_to_mapper as _class_to_mapper
+from .base import _MappedAnnotationBase
+from .base import _never_set as _never_set  # noqa: F401
+from .base import _none_only_set as _none_only_set  # noqa: F401
+from .base import _none_set as _none_set  # noqa: F401
+from .base import attribute_str as attribute_str  # noqa: F401
+from .base import class_mapper as class_mapper
+from .base import DynamicMapped
+from .base import InspectionAttr as InspectionAttr
+from .base import instance_str as instance_str  # noqa: F401
+from .base import Mapped
+from .base import object_mapper as object_mapper
+from .base import object_state as object_state  # noqa: F401
+from .base import opt_manager_of_class
+from .base import ORMDescriptor
+from .base import state_attribute_str as state_attribute_str  # noqa: F401
+from .base import state_class_str as state_class_str  # noqa: F401
+from .base import state_str as state_str  # noqa: F401
+from .base import WriteOnlyMapped
+from .interfaces import CriteriaOption
+from .interfaces import MapperProperty as MapperProperty
+from .interfaces import ORMColumnsClauseRole
+from .interfaces import ORMEntityColumnsClauseRole
+from .interfaces import ORMFromClauseRole
+from .path_registry import PathRegistry as PathRegistry
+from .. import event
+from .. import exc as sa_exc
+from .. import inspection
+from .. import sql
+from .. import util
+from ..engine.result import result_tuple
+from ..sql import coercions
+from ..sql import expression
+from ..sql import lambdas
+from ..sql import roles
+from ..sql import util as sql_util
+from ..sql import visitors
+from ..sql._typing import is_selectable
+from ..sql.annotation import SupportsCloneAnnotations
+from ..sql.base import ColumnCollection
+from ..sql.cache_key import HasCacheKey
+from ..sql.cache_key import MemoizedHasCacheKey
+from ..sql.elements import ColumnElement
+from ..sql.elements import KeyedColumnElement
+from ..sql.selectable import FromClause
+from ..util.langhelpers import MemoizedSlots
+from ..util.typing import de_stringify_annotation as _de_stringify_annotation
+from ..util.typing import eval_name_only as _eval_name_only
+from ..util.typing import fixup_container_fwd_refs
+from ..util.typing import get_origin
+from ..util.typing import is_origin_of_cls
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EntityType
+    from ._typing import _IdentityKeyType
+    from ._typing import _InternalEntityType
+    from ._typing import _ORMCOLEXPR
+    from .context import _MapperEntity
+    from .context import ORMCompileState
+    from .mapper import Mapper
+    from .path_registry import AbstractEntityRegistry
+    from .query import Query
+    from .relationships import RelationshipProperty
+    from ..engine import Row
+    from ..engine import RowMapping
+    from ..sql._typing import _CE
+    from ..sql._typing import _ColumnExpressionArgument
+    from ..sql._typing import _EquivalentColumnMap
+    from ..sql._typing import _FromClauseArgument
+    from ..sql._typing import _OnClauseArgument
+    from ..sql._typing import _PropagateAttrsType
+    from ..sql.annotation import _SA
+    from ..sql.base import ReadOnlyColumnCollection
+    from ..sql.elements import BindParameter
+    from ..sql.selectable import _ColumnsClauseElement
+    from ..sql.selectable import Select
+    from ..sql.selectable import Selectable
+    from ..sql.visitors import anon_map
+    from ..util.typing import _AnnotationScanType
+
+_T = TypeVar("_T", bound=Any)
+
+all_cascades = frozenset(
+    (
+        "delete",
+        "delete-orphan",
+        "all",
+        "merge",
+        "expunge",
+        "save-update",
+        "refresh-expire",
+        "none",
+    )
+)
+
+_de_stringify_partial = functools.partial(
+    functools.partial,
+    locals_=util.immutabledict(
+        {
+            "Mapped": Mapped,
+            "WriteOnlyMapped": WriteOnlyMapped,
+            "DynamicMapped": DynamicMapped,
+        }
+    ),
+)
+
+# partial is practically useless as we have to write out the whole
+# function and maintain the signature anyway
+
+
+class _DeStringifyAnnotation(Protocol):
+    def __call__(
+        self,
+        cls: Type[Any],
+        annotation: _AnnotationScanType,
+        originating_module: str,
+        *,
+        str_cleanup_fn: Optional[Callable[[str, str], str]] = None,
+        include_generic: bool = False,
+    ) -> Type[Any]: ...
+
+
+de_stringify_annotation = cast(
+    _DeStringifyAnnotation, _de_stringify_partial(_de_stringify_annotation)
+)
+
+
+class _EvalNameOnly(Protocol):
+    def __call__(self, name: str, module_name: str) -> Any: ...
+
+
+eval_name_only = cast(_EvalNameOnly, _de_stringify_partial(_eval_name_only))
+
+
+class CascadeOptions(FrozenSet[str]):
+    """Keeps track of the options sent to
+    :paramref:`.relationship.cascade`"""
+
+    _add_w_all_cascades = all_cascades.difference(
+        ["all", "none", "delete-orphan"]
+    )
+    _allowed_cascades = all_cascades
+
+    _viewonly_cascades = ["expunge", "all", "none", "refresh-expire", "merge"]
+
+    __slots__ = (
+        "save_update",
+        "delete",
+        "refresh_expire",
+        "merge",
+        "expunge",
+        "delete_orphan",
+    )
+
+    save_update: bool
+    delete: bool
+    refresh_expire: bool
+    merge: bool
+    expunge: bool
+    delete_orphan: bool
+
+    def __new__(
+        cls, value_list: Optional[Union[Iterable[str], str]]
+    ) -> CascadeOptions:
+        if isinstance(value_list, str) or value_list is None:
+            return cls.from_string(value_list)  # type: ignore
+        values = set(value_list)
+        if values.difference(cls._allowed_cascades):
+            raise sa_exc.ArgumentError(
+                "Invalid cascade option(s): %s"
+                % ", ".join(
+                    [
+                        repr(x)
+                        for x in sorted(
+                            values.difference(cls._allowed_cascades)
+                        )
+                    ]
+                )
+            )
+
+        if "all" in values:
+            values.update(cls._add_w_all_cascades)
+        if "none" in values:
+            values.clear()
+        values.discard("all")
+
+        self = super().__new__(cls, values)
+        self.save_update = "save-update" in values
+        self.delete = "delete" in values
+        self.refresh_expire = "refresh-expire" in values
+        self.merge = "merge" in values
+        self.expunge = "expunge" in values
+        self.delete_orphan = "delete-orphan" in values
+
+        if self.delete_orphan and not self.delete:
+            util.warn("The 'delete-orphan' cascade option requires 'delete'.")
+        return self
+
+    def __repr__(self):
+        return "CascadeOptions(%r)" % (",".join([x for x in sorted(self)]))
+
+    @classmethod
+    def from_string(cls, arg):
+        values = [c for c in re.split(r"\s*,\s*", arg or "") if c]
+        return cls(values)
+
+
+def _validator_events(desc, key, validator, include_removes, include_backrefs):
+    """Runs a validation method on an attribute value to be set or
+    appended.
+    """
+
+    if not include_backrefs:
+
+        def detect_is_backref(state, initiator):
+            impl = state.manager[key].impl
+            return initiator.impl is not impl
+
+    if include_removes:
+
+        def append(state, value, initiator):
+            if initiator.op is not attributes.OP_BULK_REPLACE and (
+                include_backrefs or not detect_is_backref(state, initiator)
+            ):
+                return validator(state.obj(), key, value, False)
+            else:
+                return value
+
+        def bulk_set(state, values, initiator):
+            if include_backrefs or not detect_is_backref(state, initiator):
+                obj = state.obj()
+                values[:] = [
+                    validator(obj, key, value, False) for value in values
+                ]
+
+        def set_(state, value, oldvalue, initiator):
+            if include_backrefs or not detect_is_backref(state, initiator):
+                return validator(state.obj(), key, value, False)
+            else:
+                return value
+
+        def remove(state, value, initiator):
+            if include_backrefs or not detect_is_backref(state, initiator):
+                validator(state.obj(), key, value, True)
+
+    else:
+
+        def append(state, value, initiator):
+            if initiator.op is not attributes.OP_BULK_REPLACE and (
+                include_backrefs or not detect_is_backref(state, initiator)
+            ):
+                return validator(state.obj(), key, value)
+            else:
+                return value
+
+        def bulk_set(state, values, initiator):
+            if include_backrefs or not detect_is_backref(state, initiator):
+                obj = state.obj()
+                values[:] = [validator(obj, key, value) for value in values]
+
+        def set_(state, value, oldvalue, initiator):
+            if include_backrefs or not detect_is_backref(state, initiator):
+                return validator(state.obj(), key, value)
+            else:
+                return value
+
+    event.listen(desc, "append", append, raw=True, retval=True)
+    event.listen(desc, "bulk_replace", bulk_set, raw=True)
+    event.listen(desc, "set", set_, raw=True, retval=True)
+    if include_removes:
+        event.listen(desc, "remove", remove, raw=True, retval=True)
+
+
+def polymorphic_union(
+    table_map, typecolname, aliasname="p_union", cast_nulls=True
+):
+    """Create a ``UNION`` statement used by a polymorphic mapper.
+
+    See  :ref:`concrete_inheritance` for an example of how
+    this is used.
+
+    :param table_map: mapping of polymorphic identities to
+     :class:`_schema.Table` objects.
+    :param typecolname: string name of a "discriminator" column, which will be
+     derived from the query, producing the polymorphic identity for
+     each row.  If ``None``, no polymorphic discriminator is generated.
+    :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()`
+     construct generated.
+    :param cast_nulls: if True, non-existent columns, which are represented
+     as labeled NULLs, will be passed into CAST.   This is a legacy behavior
+     that is problematic on some backends such as Oracle - in which case it
+     can be set to False.
+
+    """
+
+    colnames: util.OrderedSet[str] = util.OrderedSet()
+    colnamemaps = {}
+    types = {}
+    for key in table_map:
+        table = table_map[key]
+
+        table = coercions.expect(
+            roles.StrictFromClauseRole, table, allow_select=True
+        )
+        table_map[key] = table
+
+        m = {}
+        for c in table.c:
+            if c.key == typecolname:
+                raise sa_exc.InvalidRequestError(
+                    "Polymorphic union can't use '%s' as the discriminator "
+                    "column due to mapped column %r; please apply the "
+                    "'typecolname' "
+                    "argument; this is available on "
+                    "ConcreteBase as '_concrete_discriminator_name'"
+                    % (typecolname, c)
+                )
+            colnames.add(c.key)
+            m[c.key] = c
+            types[c.key] = c.type
+        colnamemaps[table] = m
+
+    def col(name, table):
+        try:
+            return colnamemaps[table][name]
+        except KeyError:
+            if cast_nulls:
+                return sql.cast(sql.null(), types[name]).label(name)
+            else:
+                return sql.type_coerce(sql.null(), types[name]).label(name)
+
+    result = []
+    for type_, table in table_map.items():
+        if typecolname is not None:
+            result.append(
+                sql.select(
+                    *(
+                        [col(name, table) for name in colnames]
+                        + [
+                            sql.literal_column(
+                                sql_util._quote_ddl_expr(type_)
+                            ).label(typecolname)
+                        ]
+                    )
+                ).select_from(table)
+            )
+        else:
+            result.append(
+                sql.select(
+                    *[col(name, table) for name in colnames]
+                ).select_from(table)
+            )
+    return sql.union_all(*result).alias(aliasname)
+
+
+def identity_key(
+    class_: Optional[Type[_T]] = None,
+    ident: Union[Any, Tuple[Any, ...]] = None,
+    *,
+    instance: Optional[_T] = None,
+    row: Optional[Union[Row[Any], RowMapping]] = None,
+    identity_token: Optional[Any] = None,
+) -> _IdentityKeyType[_T]:
+    r"""Generate "identity key" tuples, as are used as keys in the
+    :attr:`.Session.identity_map` dictionary.
+
+    This function has several call styles:
+
+    * ``identity_key(class, ident, identity_token=token)``
+
+      This form receives a mapped class and a primary key scalar or
+      tuple as an argument.
+
+      E.g.::
+
+        >>> identity_key(MyClass, (1, 2))
+        (<class '__main__.MyClass'>, (1, 2), None)
+
+      :param class: mapped class (must be a positional argument)
+      :param ident: primary key, may be a scalar or tuple argument.
+      :param identity_token: optional identity token
+
+        .. versionadded:: 1.2 added identity_token
+
+
+    * ``identity_key(instance=instance)``
+
+      This form will produce the identity key for a given instance.  The
+      instance need not be persistent, only that its primary key attributes
+      are populated (else the key will contain ``None`` for those missing
+      values).
+
+      E.g.::
+
+        >>> instance = MyClass(1, 2)
+        >>> identity_key(instance=instance)
+        (<class '__main__.MyClass'>, (1, 2), None)
+
+      In this form, the given instance is ultimately run though
+      :meth:`_orm.Mapper.identity_key_from_instance`, which will have the
+      effect of performing a database check for the corresponding row
+      if the object is expired.
+
+      :param instance: object instance (must be given as a keyword arg)
+
+    * ``identity_key(class, row=row, identity_token=token)``
+
+      This form is similar to the class/tuple form, except is passed a
+      database result row as a :class:`.Row` or :class:`.RowMapping` object.
+
+      E.g.::
+
+        >>> row = engine.execute(text("select * from table where a=1 and b=2")).first()
+        >>> identity_key(MyClass, row=row)
+        (<class '__main__.MyClass'>, (1, 2), None)
+
+      :param class: mapped class (must be a positional argument)
+      :param row: :class:`.Row` row returned by a :class:`_engine.CursorResult`
+       (must be given as a keyword arg)
+      :param identity_token: optional identity token
+
+        .. versionadded:: 1.2 added identity_token
+
+    """  # noqa: E501
+    if class_ is not None:
+        mapper = class_mapper(class_)
+        if row is None:
+            if ident is None:
+                raise sa_exc.ArgumentError("ident or row is required")
+            return mapper.identity_key_from_primary_key(
+                tuple(util.to_list(ident)), identity_token=identity_token
+            )
+        else:
+            return mapper.identity_key_from_row(
+                row, identity_token=identity_token
+            )
+    elif instance is not None:
+        mapper = object_mapper(instance)
+        return mapper.identity_key_from_instance(instance)
+    else:
+        raise sa_exc.ArgumentError("class or instance is required")
+
+
+class _TraceAdaptRole(enum.Enum):
+    """Enumeration of all the use cases for ORMAdapter.
+
+    ORMAdapter remains one of the most complicated aspects of the ORM, as it is
+    used for in-place adaption of column expressions to be applied to a SELECT,
+    replacing :class:`.Table` and other objects that are mapped to classes with
+    aliases of those tables in the case of joined eager loading, or in the case
+    of polymorphic loading as used with concrete mappings or other custom "with
+    polymorphic" parameters, with whole user-defined subqueries. The
+    enumerations provide an overview of all the use cases used by ORMAdapter, a
+    layer of formality as to the introduction of new ORMAdapter use cases (of
+    which none are anticipated), as well as a means to trace the origins of a
+    particular ORMAdapter within runtime debugging.
+
+    SQLAlchemy 2.0 has greatly scaled back ORM features which relied heavily on
+    open-ended statement adaption, including the ``Query.with_polymorphic()``
+    method and the ``Query.select_from_entity()`` methods, favoring
+    user-explicit aliasing schemes using the ``aliased()`` and
+    ``with_polymorphic()`` standalone constructs; these still use adaption,
+    however the adaption is applied in a narrower scope.
+
+    """
+
+    # aliased() use that is used to adapt individual attributes at query
+    # construction time
+    ALIASED_INSP = enum.auto()
+
+    # joinedload cases; typically adapt an ON clause of a relationship
+    # join
+    JOINEDLOAD_USER_DEFINED_ALIAS = enum.auto()
+    JOINEDLOAD_PATH_WITH_POLYMORPHIC = enum.auto()
+    JOINEDLOAD_MEMOIZED_ADAPTER = enum.auto()
+
+    # polymorphic cases - these are complex ones that replace FROM
+    # clauses, replacing tables with subqueries
+    MAPPER_POLYMORPHIC_ADAPTER = enum.auto()
+    WITH_POLYMORPHIC_ADAPTER = enum.auto()
+    WITH_POLYMORPHIC_ADAPTER_RIGHT_JOIN = enum.auto()
+    DEPRECATED_JOIN_ADAPT_RIGHT_SIDE = enum.auto()
+
+    # the from_statement() case, used only to adapt individual attributes
+    # from a given statement to local ORM attributes at result fetching
+    # time.  assigned to ORMCompileState._from_obj_alias
+    ADAPT_FROM_STATEMENT = enum.auto()
+
+    # the joinedload for queries that have LIMIT/OFFSET/DISTINCT case;
+    # the query is placed inside of a subquery with the LIMIT/OFFSET/etc.,
+    # joinedloads are then placed on the outside.
+    # assigned to ORMCompileState.compound_eager_adapter
+    COMPOUND_EAGER_STATEMENT = enum.auto()
+
+    # the legacy Query._set_select_from() case.
+    # this is needed for Query's set operations (i.e. UNION, etc. )
+    # as well as "legacy from_self()", which while removed from 2.0 as
+    # public API, is used for the Query.count() method.  this one
+    # still does full statement traversal
+    # assigned to ORMCompileState._from_obj_alias
+    LEGACY_SELECT_FROM_ALIAS = enum.auto()
+
+
+class ORMStatementAdapter(sql_util.ColumnAdapter):
+    """ColumnAdapter which includes a role attribute."""
+
+    __slots__ = ("role",)
+
+    def __init__(
+        self,
+        role: _TraceAdaptRole,
+        selectable: Selectable,
+        *,
+        equivalents: Optional[_EquivalentColumnMap] = None,
+        adapt_required: bool = False,
+        allow_label_resolve: bool = True,
+        anonymize_labels: bool = False,
+        adapt_on_names: bool = False,
+        adapt_from_selectables: Optional[AbstractSet[FromClause]] = None,
+    ):
+        self.role = role
+        super().__init__(
+            selectable,
+            equivalents=equivalents,
+            adapt_required=adapt_required,
+            allow_label_resolve=allow_label_resolve,
+            anonymize_labels=anonymize_labels,
+            adapt_on_names=adapt_on_names,
+            adapt_from_selectables=adapt_from_selectables,
+        )
+
+
+class ORMAdapter(sql_util.ColumnAdapter):
+    """ColumnAdapter subclass which excludes adaptation of entities from
+    non-matching mappers.
+
+    """
+
+    __slots__ = ("role", "mapper", "is_aliased_class", "aliased_insp")
+
+    is_aliased_class: bool
+    aliased_insp: Optional[AliasedInsp[Any]]
+
+    def __init__(
+        self,
+        role: _TraceAdaptRole,
+        entity: _InternalEntityType[Any],
+        *,
+        equivalents: Optional[_EquivalentColumnMap] = None,
+        adapt_required: bool = False,
+        allow_label_resolve: bool = True,
+        anonymize_labels: bool = False,
+        selectable: Optional[Selectable] = None,
+        limit_on_entity: bool = True,
+        adapt_on_names: bool = False,
+        adapt_from_selectables: Optional[AbstractSet[FromClause]] = None,
+    ):
+        self.role = role
+        self.mapper = entity.mapper
+        if selectable is None:
+            selectable = entity.selectable
+        if insp_is_aliased_class(entity):
+            self.is_aliased_class = True
+            self.aliased_insp = entity
+        else:
+            self.is_aliased_class = False
+            self.aliased_insp = None
+
+        super().__init__(
+            selectable,
+            equivalents,
+            adapt_required=adapt_required,
+            allow_label_resolve=allow_label_resolve,
+            anonymize_labels=anonymize_labels,
+            include_fn=self._include_fn if limit_on_entity else None,
+            adapt_on_names=adapt_on_names,
+            adapt_from_selectables=adapt_from_selectables,
+        )
+
+    def _include_fn(self, elem):
+        entity = elem._annotations.get("parentmapper", None)
+
+        return not entity or entity.isa(self.mapper) or self.mapper.isa(entity)
+
+
+class AliasedClass(
+    inspection.Inspectable["AliasedInsp[_O]"], ORMColumnsClauseRole[_O]
+):
+    r"""Represents an "aliased" form of a mapped class for usage with Query.
+
+    The ORM equivalent of a :func:`~sqlalchemy.sql.expression.alias`
+    construct, this object mimics the mapped class using a
+    ``__getattr__`` scheme and maintains a reference to a
+    real :class:`~sqlalchemy.sql.expression.Alias` object.
+
+    A primary purpose of :class:`.AliasedClass` is to serve as an alternate
+    within a SQL statement generated by the ORM, such that an existing
+    mapped entity can be used in multiple contexts.   A simple example::
+
+        # find all pairs of users with the same name
+        user_alias = aliased(User)
+        session.query(User, user_alias).join(
+            (user_alias, User.id > user_alias.id)
+        ).filter(User.name == user_alias.name)
+
+    :class:`.AliasedClass` is also capable of mapping an existing mapped
+    class to an entirely new selectable, provided this selectable is column-
+    compatible with the existing mapped selectable, and it can also be
+    configured in a mapping as the target of a :func:`_orm.relationship`.
+    See the links below for examples.
+
+    The :class:`.AliasedClass` object is constructed typically using the
+    :func:`_orm.aliased` function.   It also is produced with additional
+    configuration when using the :func:`_orm.with_polymorphic` function.
+
+    The resulting object is an instance of :class:`.AliasedClass`.
+    This object implements an attribute scheme which produces the
+    same attribute and method interface as the original mapped
+    class, allowing :class:`.AliasedClass` to be compatible
+    with any attribute technique which works on the original class,
+    including hybrid attributes (see :ref:`hybrids_toplevel`).
+
+    The :class:`.AliasedClass` can be inspected for its underlying
+    :class:`_orm.Mapper`, aliased selectable, and other information
+    using :func:`_sa.inspect`::
+
+        from sqlalchemy import inspect
+
+        my_alias = aliased(MyClass)
+        insp = inspect(my_alias)
+
+    The resulting inspection object is an instance of :class:`.AliasedInsp`.
+
+
+    .. seealso::
+
+        :func:`.aliased`
+
+        :func:`.with_polymorphic`
+
+        :ref:`relationship_aliased_class`
+
+        :ref:`relationship_to_window_function`
+
+
+    """
+
+    __name__: str
+
+    def __init__(
+        self,
+        mapped_class_or_ac: _EntityType[_O],
+        alias: Optional[FromClause] = None,
+        name: Optional[str] = None,
+        flat: bool = False,
+        adapt_on_names: bool = False,
+        with_polymorphic_mappers: Optional[Sequence[Mapper[Any]]] = None,
+        with_polymorphic_discriminator: Optional[ColumnElement[Any]] = None,
+        base_alias: Optional[AliasedInsp[Any]] = None,
+        use_mapper_path: bool = False,
+        represents_outer_join: bool = False,
+    ):
+        insp = cast(
+            "_InternalEntityType[_O]", inspection.inspect(mapped_class_or_ac)
+        )
+        mapper = insp.mapper
+
+        nest_adapters = False
+
+        if alias is None:
+            if insp.is_aliased_class and insp.selectable._is_subquery:
+                alias = insp.selectable.alias()
+            else:
+                alias = (
+                    mapper._with_polymorphic_selectable._anonymous_fromclause(
+                        name=name,
+                        flat=flat,
+                    )
+                )
+        elif insp.is_aliased_class:
+            nest_adapters = True
+
+        assert alias is not None
+        self._aliased_insp = AliasedInsp(
+            self,
+            insp,
+            alias,
+            name,
+            (
+                with_polymorphic_mappers
+                if with_polymorphic_mappers
+                else mapper.with_polymorphic_mappers
+            ),
+            (
+                with_polymorphic_discriminator
+                if with_polymorphic_discriminator is not None
+                else mapper.polymorphic_on
+            ),
+            base_alias,
+            use_mapper_path,
+            adapt_on_names,
+            represents_outer_join,
+            nest_adapters,
+        )
+
+        self.__name__ = f"aliased({mapper.class_.__name__})"
+
+    @classmethod
+    def _reconstitute_from_aliased_insp(
+        cls, aliased_insp: AliasedInsp[_O]
+    ) -> AliasedClass[_O]:
+        obj = cls.__new__(cls)
+        obj.__name__ = f"aliased({aliased_insp.mapper.class_.__name__})"
+        obj._aliased_insp = aliased_insp
+
+        if aliased_insp._is_with_polymorphic:
+            for sub_aliased_insp in aliased_insp._with_polymorphic_entities:
+                if sub_aliased_insp is not aliased_insp:
+                    ent = AliasedClass._reconstitute_from_aliased_insp(
+                        sub_aliased_insp
+                    )
+                    setattr(obj, sub_aliased_insp.class_.__name__, ent)
+
+        return obj
+
+    def __getattr__(self, key: str) -> Any:
+        try:
+            _aliased_insp = self.__dict__["_aliased_insp"]
+        except KeyError:
+            raise AttributeError()
+        else:
+            target = _aliased_insp._target
+            # maintain all getattr mechanics
+            attr = getattr(target, key)
+
+        # attribute is a method, that will be invoked against a
+        # "self"; so just return a new method with the same function and
+        # new self
+        if hasattr(attr, "__call__") and hasattr(attr, "__self__"):
+            return types.MethodType(attr.__func__, self)
+
+        # attribute is a descriptor, that will be invoked against a
+        # "self"; so invoke the descriptor against this self
+        if hasattr(attr, "__get__"):
+            attr = attr.__get__(None, self)
+
+        # attributes within the QueryableAttribute system will want this
+        # to be invoked so the object can be adapted
+        if hasattr(attr, "adapt_to_entity"):
+            attr = attr.adapt_to_entity(_aliased_insp)
+            setattr(self, key, attr)
+
+        return attr
+
+    def _get_from_serialized(
+        self, key: str, mapped_class: _O, aliased_insp: AliasedInsp[_O]
+    ) -> Any:
+        # this method is only used in terms of the
+        # sqlalchemy.ext.serializer extension
+        attr = getattr(mapped_class, key)
+        if hasattr(attr, "__call__") and hasattr(attr, "__self__"):
+            return types.MethodType(attr.__func__, self)
+
+        # attribute is a descriptor, that will be invoked against a
+        # "self"; so invoke the descriptor against this self
+        if hasattr(attr, "__get__"):
+            attr = attr.__get__(None, self)
+
+        # attributes within the QueryableAttribute system will want this
+        # to be invoked so the object can be adapted
+        if hasattr(attr, "adapt_to_entity"):
+            aliased_insp._weak_entity = weakref.ref(self)
+            attr = attr.adapt_to_entity(aliased_insp)
+            setattr(self, key, attr)
+
+        return attr
+
+    def __repr__(self) -> str:
+        return "<AliasedClass at 0x%x; %s>" % (
+            id(self),
+            self._aliased_insp._target.__name__,
+        )
+
+    def __str__(self) -> str:
+        return str(self._aliased_insp)
+
+
+@inspection._self_inspects
+class AliasedInsp(
+    ORMEntityColumnsClauseRole[_O],
+    ORMFromClauseRole,
+    HasCacheKey,
+    InspectionAttr,
+    MemoizedSlots,
+    inspection.Inspectable["AliasedInsp[_O]"],
+    Generic[_O],
+):
+    """Provide an inspection interface for an
+    :class:`.AliasedClass` object.
+
+    The :class:`.AliasedInsp` object is returned
+    given an :class:`.AliasedClass` using the
+    :func:`_sa.inspect` function::
+
+        from sqlalchemy import inspect
+        from sqlalchemy.orm import aliased
+
+        my_alias = aliased(MyMappedClass)
+        insp = inspect(my_alias)
+
+    Attributes on :class:`.AliasedInsp`
+    include:
+
+    * ``entity`` - the :class:`.AliasedClass` represented.
+    * ``mapper`` - the :class:`_orm.Mapper` mapping the underlying class.
+    * ``selectable`` - the :class:`_expression.Alias`
+      construct which ultimately
+      represents an aliased :class:`_schema.Table` or
+      :class:`_expression.Select`
+      construct.
+    * ``name`` - the name of the alias.  Also is used as the attribute
+      name when returned in a result tuple from :class:`_query.Query`.
+    * ``with_polymorphic_mappers`` - collection of :class:`_orm.Mapper`
+      objects
+      indicating all those mappers expressed in the select construct
+      for the :class:`.AliasedClass`.
+    * ``polymorphic_on`` - an alternate column or SQL expression which
+      will be used as the "discriminator" for a polymorphic load.
+
+    .. seealso::
+
+        :ref:`inspection_toplevel`
+
+    """
+
+    __slots__ = (
+        "__weakref__",
+        "_weak_entity",
+        "mapper",
+        "selectable",
+        "name",
+        "_adapt_on_names",
+        "with_polymorphic_mappers",
+        "polymorphic_on",
+        "_use_mapper_path",
+        "_base_alias",
+        "represents_outer_join",
+        "persist_selectable",
+        "local_table",
+        "_is_with_polymorphic",
+        "_with_polymorphic_entities",
+        "_adapter",
+        "_target",
+        "__clause_element__",
+        "_memoized_values",
+        "_all_column_expressions",
+        "_nest_adapters",
+    )
+
+    _cache_key_traversal = [
+        ("name", visitors.ExtendedInternalTraversal.dp_string),
+        ("_adapt_on_names", visitors.ExtendedInternalTraversal.dp_boolean),
+        ("_use_mapper_path", visitors.ExtendedInternalTraversal.dp_boolean),
+        ("_target", visitors.ExtendedInternalTraversal.dp_inspectable),
+        ("selectable", visitors.ExtendedInternalTraversal.dp_clauseelement),
+        (
+            "with_polymorphic_mappers",
+            visitors.InternalTraversal.dp_has_cache_key_list,
+        ),
+        ("polymorphic_on", visitors.InternalTraversal.dp_clauseelement),
+    ]
+
+    mapper: Mapper[_O]
+    selectable: FromClause
+    _adapter: ORMAdapter
+    with_polymorphic_mappers: Sequence[Mapper[Any]]
+    _with_polymorphic_entities: Sequence[AliasedInsp[Any]]
+
+    _weak_entity: weakref.ref[AliasedClass[_O]]
+    """the AliasedClass that refers to this AliasedInsp"""
+
+    _target: Union[Type[_O], AliasedClass[_O]]
+    """the thing referenced by the AliasedClass/AliasedInsp.
+
+    In the vast majority of cases, this is the mapped class.  However
+    it may also be another AliasedClass (alias of alias).
+
+    """
+
+    def __init__(
+        self,
+        entity: AliasedClass[_O],
+        inspected: _InternalEntityType[_O],
+        selectable: FromClause,
+        name: Optional[str],
+        with_polymorphic_mappers: Optional[Sequence[Mapper[Any]]],
+        polymorphic_on: Optional[ColumnElement[Any]],
+        _base_alias: Optional[AliasedInsp[Any]],
+        _use_mapper_path: bool,
+        adapt_on_names: bool,
+        represents_outer_join: bool,
+        nest_adapters: bool,
+    ):
+        mapped_class_or_ac = inspected.entity
+        mapper = inspected.mapper
+
+        self._weak_entity = weakref.ref(entity)
+        self.mapper = mapper
+        self.selectable = self.persist_selectable = self.local_table = (
+            selectable
+        )
+        self.name = name
+        self.polymorphic_on = polymorphic_on
+        self._base_alias = weakref.ref(_base_alias or self)
+        self._use_mapper_path = _use_mapper_path
+        self.represents_outer_join = represents_outer_join
+        self._nest_adapters = nest_adapters
+
+        if with_polymorphic_mappers:
+            self._is_with_polymorphic = True
+            self.with_polymorphic_mappers = with_polymorphic_mappers
+            self._with_polymorphic_entities = []
+            for poly in self.with_polymorphic_mappers:
+                if poly is not mapper:
+                    ent = AliasedClass(
+                        poly.class_,
+                        selectable,
+                        base_alias=self,
+                        adapt_on_names=adapt_on_names,
+                        use_mapper_path=_use_mapper_path,
+                    )
+
+                    setattr(self.entity, poly.class_.__name__, ent)
+                    self._with_polymorphic_entities.append(ent._aliased_insp)
+
+        else:
+            self._is_with_polymorphic = False
+            self.with_polymorphic_mappers = [mapper]
+
+        self._adapter = ORMAdapter(
+            _TraceAdaptRole.ALIASED_INSP,
+            mapper,
+            selectable=selectable,
+            equivalents=mapper._equivalent_columns,
+            adapt_on_names=adapt_on_names,
+            anonymize_labels=True,
+            # make sure the adapter doesn't try to grab other tables that
+            # are not even the thing we are mapping, such as embedded
+            # selectables in subqueries or CTEs.  See issue #6060
+            adapt_from_selectables={
+                m.selectable
+                for m in self.with_polymorphic_mappers
+                if not adapt_on_names
+            },
+            limit_on_entity=False,
+        )
+
+        if nest_adapters:
+            # supports "aliased class of aliased class" use case
+            assert isinstance(inspected, AliasedInsp)
+            self._adapter = inspected._adapter.wrap(self._adapter)
+
+        self._adapt_on_names = adapt_on_names
+        self._target = mapped_class_or_ac
+
+    @classmethod
+    def _alias_factory(
+        cls,
+        element: Union[_EntityType[_O], FromClause],
+        alias: Optional[FromClause] = None,
+        name: Optional[str] = None,
+        flat: bool = False,
+        adapt_on_names: bool = False,
+    ) -> Union[AliasedClass[_O], FromClause]:
+        if isinstance(element, FromClause):
+            if adapt_on_names:
+                raise sa_exc.ArgumentError(
+                    "adapt_on_names only applies to ORM elements"
+                )
+            if name:
+                return element.alias(name=name, flat=flat)
+            else:
+                return coercions.expect(
+                    roles.AnonymizedFromClauseRole, element, flat=flat
+                )
+        else:
+            return AliasedClass(
+                element,
+                alias=alias,
+                flat=flat,
+                name=name,
+                adapt_on_names=adapt_on_names,
+            )
+
+    @classmethod
+    def _with_polymorphic_factory(
+        cls,
+        base: Union[Type[_O], Mapper[_O]],
+        classes: Union[Literal["*"], Iterable[_EntityType[Any]]],
+        selectable: Union[Literal[False, None], FromClause] = False,
+        flat: bool = False,
+        polymorphic_on: Optional[ColumnElement[Any]] = None,
+        aliased: bool = False,
+        innerjoin: bool = False,
+        adapt_on_names: bool = False,
+        name: Optional[str] = None,
+        _use_mapper_path: bool = False,
+    ) -> AliasedClass[_O]:
+        primary_mapper = _class_to_mapper(base)
+
+        if selectable not in (None, False) and flat:
+            raise sa_exc.ArgumentError(
+                "the 'flat' and 'selectable' arguments cannot be passed "
+                "simultaneously to with_polymorphic()"
+            )
+
+        mappers, selectable = primary_mapper._with_polymorphic_args(
+            classes, selectable, innerjoin=innerjoin
+        )
+        if aliased or flat:
+            assert selectable is not None
+            selectable = selectable._anonymous_fromclause(flat=flat)
+
+        return AliasedClass(
+            base,
+            selectable,
+            name=name,
+            with_polymorphic_mappers=mappers,
+            adapt_on_names=adapt_on_names,
+            with_polymorphic_discriminator=polymorphic_on,
+            use_mapper_path=_use_mapper_path,
+            represents_outer_join=not innerjoin,
+        )
+
+    @property
+    def entity(self) -> AliasedClass[_O]:
+        # to eliminate reference cycles, the AliasedClass is held weakly.
+        # this produces some situations where the AliasedClass gets lost,
+        # particularly when one is created internally and only the AliasedInsp
+        # is passed around.
+        # to work around this case, we just generate a new one when we need
+        # it, as it is a simple class with very little initial state on it.
+        ent = self._weak_entity()
+        if ent is None:
+            ent = AliasedClass._reconstitute_from_aliased_insp(self)
+            self._weak_entity = weakref.ref(ent)
+        return ent
+
+    is_aliased_class = True
+    "always returns True"
+
+    def _memoized_method___clause_element__(self) -> FromClause:
+        return self.selectable._annotate(
+            {
+                "parentmapper": self.mapper,
+                "parententity": self,
+                "entity_namespace": self,
+            }
+        )._set_propagate_attrs(
+            {"compile_state_plugin": "orm", "plugin_subject": self}
+        )
+
+    @property
+    def entity_namespace(self) -> AliasedClass[_O]:
+        return self.entity
+
+    @property
+    def class_(self) -> Type[_O]:
+        """Return the mapped class ultimately represented by this
+        :class:`.AliasedInsp`."""
+        return self.mapper.class_
+
+    @property
+    def _path_registry(self) -> AbstractEntityRegistry:
+        if self._use_mapper_path:
+            return self.mapper._path_registry
+        else:
+            return PathRegistry.per_mapper(self)
+
+    def __getstate__(self) -> Dict[str, Any]:
+        return {
+            "entity": self.entity,
+            "mapper": self.mapper,
+            "alias": self.selectable,
+            "name": self.name,
+            "adapt_on_names": self._adapt_on_names,
+            "with_polymorphic_mappers": self.with_polymorphic_mappers,
+            "with_polymorphic_discriminator": self.polymorphic_on,
+            "base_alias": self._base_alias(),
+            "use_mapper_path": self._use_mapper_path,
+            "represents_outer_join": self.represents_outer_join,
+            "nest_adapters": self._nest_adapters,
+        }
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        self.__init__(  # type: ignore
+            state["entity"],
+            state["mapper"],
+            state["alias"],
+            state["name"],
+            state["with_polymorphic_mappers"],
+            state["with_polymorphic_discriminator"],
+            state["base_alias"],
+            state["use_mapper_path"],
+            state["adapt_on_names"],
+            state["represents_outer_join"],
+            state["nest_adapters"],
+        )
+
+    def _merge_with(self, other: AliasedInsp[_O]) -> AliasedInsp[_O]:
+        # assert self._is_with_polymorphic
+        # assert other._is_with_polymorphic
+
+        primary_mapper = other.mapper
+
+        assert self.mapper is primary_mapper
+
+        our_classes = util.to_set(
+            mp.class_ for mp in self.with_polymorphic_mappers
+        )
+        new_classes = {mp.class_ for mp in other.with_polymorphic_mappers}
+        if our_classes == new_classes:
+            return other
+        else:
+            classes = our_classes.union(new_classes)
+
+        mappers, selectable = primary_mapper._with_polymorphic_args(
+            classes, None, innerjoin=not other.represents_outer_join
+        )
+        selectable = selectable._anonymous_fromclause(flat=True)
+        return AliasedClass(
+            primary_mapper,
+            selectable,
+            with_polymorphic_mappers=mappers,
+            with_polymorphic_discriminator=other.polymorphic_on,
+            use_mapper_path=other._use_mapper_path,
+            represents_outer_join=other.represents_outer_join,
+        )._aliased_insp
+
+    def _adapt_element(
+        self, expr: _ORMCOLEXPR, key: Optional[str] = None
+    ) -> _ORMCOLEXPR:
+        assert isinstance(expr, ColumnElement)
+        d: Dict[str, Any] = {
+            "parententity": self,
+            "parentmapper": self.mapper,
+        }
+        if key:
+            d["proxy_key"] = key
+
+        # IMO mypy should see this one also as returning the same type
+        # we put into it, but it's not
+        return (
+            self._adapter.traverse(expr)
+            ._annotate(d)
+            ._set_propagate_attrs(
+                {"compile_state_plugin": "orm", "plugin_subject": self}
+            )
+        )
+
+    if TYPE_CHECKING:
+        # establish compatibility with the _ORMAdapterProto protocol,
+        # which in turn is compatible with _CoreAdapterProto.
+
+        def _orm_adapt_element(
+            self,
+            obj: _CE,
+            key: Optional[str] = None,
+        ) -> _CE: ...
+
+    else:
+        _orm_adapt_element = _adapt_element
+
+    def _entity_for_mapper(self, mapper):
+        self_poly = self.with_polymorphic_mappers
+        if mapper in self_poly:
+            if mapper is self.mapper:
+                return self
+            else:
+                return getattr(
+                    self.entity, mapper.class_.__name__
+                )._aliased_insp
+        elif mapper.isa(self.mapper):
+            return self
+        else:
+            assert False, "mapper %s doesn't correspond to %s" % (mapper, self)
+
+    def _memoized_attr__get_clause(self):
+        onclause, replacemap = self.mapper._get_clause
+        return (
+            self._adapter.traverse(onclause),
+            {
+                self._adapter.traverse(col): param
+                for col, param in replacemap.items()
+            },
+        )
+
+    def _memoized_attr__memoized_values(self):
+        return {}
+
+    def _memoized_attr__all_column_expressions(self):
+        if self._is_with_polymorphic:
+            cols_plus_keys = self.mapper._columns_plus_keys(
+                [ent.mapper for ent in self._with_polymorphic_entities]
+            )
+        else:
+            cols_plus_keys = self.mapper._columns_plus_keys()
+
+        cols_plus_keys = [
+            (key, self._adapt_element(col)) for key, col in cols_plus_keys
+        ]
+
+        return ColumnCollection(cols_plus_keys)
+
+    def _memo(self, key, callable_, *args, **kw):
+        if key in self._memoized_values:
+            return self._memoized_values[key]
+        else:
+            self._memoized_values[key] = value = callable_(*args, **kw)
+            return value
+
+    def __repr__(self):
+        if self.with_polymorphic_mappers:
+            with_poly = "(%s)" % ", ".join(
+                mp.class_.__name__ for mp in self.with_polymorphic_mappers
+            )
+        else:
+            with_poly = ""
+        return "<AliasedInsp at 0x%x; %s%s>" % (
+            id(self),
+            self.class_.__name__,
+            with_poly,
+        )
+
+    def __str__(self):
+        if self._is_with_polymorphic:
+            return "with_polymorphic(%s, [%s])" % (
+                self._target.__name__,
+                ", ".join(
+                    mp.class_.__name__
+                    for mp in self.with_polymorphic_mappers
+                    if mp is not self.mapper
+                ),
+            )
+        else:
+            return "aliased(%s)" % (self._target.__name__,)
+
+
+class _WrapUserEntity:
+    """A wrapper used within the loader_criteria lambda caller so that
+    we can bypass declared_attr descriptors on unmapped mixins, which
+    normally emit a warning for such use.
+
+    might also be useful for other per-lambda instrumentations should
+    the need arise.
+
+    """
+
+    __slots__ = ("subject",)
+
+    def __init__(self, subject):
+        self.subject = subject
+
+    @util.preload_module("sqlalchemy.orm.decl_api")
+    def __getattribute__(self, name):
+        decl_api = util.preloaded.orm.decl_api
+
+        subject = object.__getattribute__(self, "subject")
+        if name in subject.__dict__ and isinstance(
+            subject.__dict__[name], decl_api.declared_attr
+        ):
+            return subject.__dict__[name].fget(subject)
+        else:
+            return getattr(subject, name)
+
+
+class LoaderCriteriaOption(CriteriaOption):
+    """Add additional WHERE criteria to the load for all occurrences of
+    a particular entity.
+
+    :class:`_orm.LoaderCriteriaOption` is invoked using the
+    :func:`_orm.with_loader_criteria` function; see that function for
+    details.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = (
+        "root_entity",
+        "entity",
+        "deferred_where_criteria",
+        "where_criteria",
+        "_where_crit_orig",
+        "include_aliases",
+        "propagate_to_loaders",
+    )
+
+    _traverse_internals = [
+        ("root_entity", visitors.ExtendedInternalTraversal.dp_plain_obj),
+        ("entity", visitors.ExtendedInternalTraversal.dp_has_cache_key),
+        ("where_criteria", visitors.InternalTraversal.dp_clauseelement),
+        ("include_aliases", visitors.InternalTraversal.dp_boolean),
+        ("propagate_to_loaders", visitors.InternalTraversal.dp_boolean),
+    ]
+
+    root_entity: Optional[Type[Any]]
+    entity: Optional[_InternalEntityType[Any]]
+    where_criteria: Union[ColumnElement[bool], lambdas.DeferredLambdaElement]
+    deferred_where_criteria: bool
+    include_aliases: bool
+    propagate_to_loaders: bool
+
+    _where_crit_orig: Any
+
+    def __init__(
+        self,
+        entity_or_base: _EntityType[Any],
+        where_criteria: Union[
+            _ColumnExpressionArgument[bool],
+            Callable[[Any], _ColumnExpressionArgument[bool]],
+        ],
+        loader_only: bool = False,
+        include_aliases: bool = False,
+        propagate_to_loaders: bool = True,
+        track_closure_variables: bool = True,
+    ):
+        entity = cast(
+            "_InternalEntityType[Any]",
+            inspection.inspect(entity_or_base, False),
+        )
+        if entity is None:
+            self.root_entity = cast("Type[Any]", entity_or_base)
+            self.entity = None
+        else:
+            self.root_entity = None
+            self.entity = entity
+
+        self._where_crit_orig = where_criteria
+        if callable(where_criteria):
+            if self.root_entity is not None:
+                wrap_entity = self.root_entity
+            else:
+                assert entity is not None
+                wrap_entity = entity.entity
+
+            self.deferred_where_criteria = True
+            self.where_criteria = lambdas.DeferredLambdaElement(
+                where_criteria,
+                roles.WhereHavingRole,
+                lambda_args=(_WrapUserEntity(wrap_entity),),
+                opts=lambdas.LambdaOptions(
+                    track_closure_variables=track_closure_variables
+                ),
+            )
+        else:
+            self.deferred_where_criteria = False
+            self.where_criteria = coercions.expect(
+                roles.WhereHavingRole, where_criteria
+            )
+
+        self.include_aliases = include_aliases
+        self.propagate_to_loaders = propagate_to_loaders
+
+    @classmethod
+    def _unreduce(
+        cls, entity, where_criteria, include_aliases, propagate_to_loaders
+    ):
+        return LoaderCriteriaOption(
+            entity,
+            where_criteria,
+            include_aliases=include_aliases,
+            propagate_to_loaders=propagate_to_loaders,
+        )
+
+    def __reduce__(self):
+        return (
+            LoaderCriteriaOption._unreduce,
+            (
+                self.entity.class_ if self.entity else self.root_entity,
+                self._where_crit_orig,
+                self.include_aliases,
+                self.propagate_to_loaders,
+            ),
+        )
+
+    def _all_mappers(self) -> Iterator[Mapper[Any]]:
+        if self.entity:
+            yield from self.entity.mapper.self_and_descendants
+        else:
+            assert self.root_entity
+            stack = list(self.root_entity.__subclasses__())
+            while stack:
+                subclass = stack.pop(0)
+                ent = cast(
+                    "_InternalEntityType[Any]",
+                    inspection.inspect(subclass, raiseerr=False),
+                )
+                if ent:
+                    yield from ent.mapper.self_and_descendants
+                else:
+                    stack.extend(subclass.__subclasses__())
+
+    def _should_include(self, compile_state: ORMCompileState) -> bool:
+        if (
+            compile_state.select_statement._annotations.get(
+                "for_loader_criteria", None
+            )
+            is self
+        ):
+            return False
+        return True
+
+    def _resolve_where_criteria(
+        self, ext_info: _InternalEntityType[Any]
+    ) -> ColumnElement[bool]:
+        if self.deferred_where_criteria:
+            crit = cast(
+                "ColumnElement[bool]",
+                self.where_criteria._resolve_with_args(ext_info.entity),
+            )
+        else:
+            crit = self.where_criteria  # type: ignore
+        assert isinstance(crit, ColumnElement)
+        return sql_util._deep_annotate(
+            crit,
+            {"for_loader_criteria": self},
+            detect_subquery_cols=True,
+            ind_cols_on_fromclause=True,
+        )
+
+    def process_compile_state_replaced_entities(
+        self,
+        compile_state: ORMCompileState,
+        mapper_entities: Iterable[_MapperEntity],
+    ) -> None:
+        self.process_compile_state(compile_state)
+
+    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+        """Apply a modification to a given :class:`.CompileState`."""
+
+        # if options to limit the criteria to immediate query only,
+        # use compile_state.attributes instead
+
+        self.get_global_criteria(compile_state.global_attributes)
+
+    def get_global_criteria(self, attributes: Dict[Any, Any]) -> None:
+        for mp in self._all_mappers():
+            load_criteria = attributes.setdefault(
+                ("additional_entity_criteria", mp), []
+            )
+
+            load_criteria.append(self)
+
+
+inspection._inspects(AliasedClass)(lambda target: target._aliased_insp)
+
+
+@inspection._inspects(type)
+def _inspect_mc(
+    class_: Type[_O],
+) -> Optional[Mapper[_O]]:
+    try:
+        class_manager = opt_manager_of_class(class_)
+        if class_manager is None or not class_manager.is_mapped:
+            return None
+        mapper = class_manager.mapper
+    except exc.NO_STATE:
+        return None
+    else:
+        return mapper
+
+
+GenericAlias = type(List[Any])
+
+
+@inspection._inspects(GenericAlias)
+def _inspect_generic_alias(
+    class_: Type[_O],
+) -> Optional[Mapper[_O]]:
+    origin = cast("Type[_O]", get_origin(class_))
+    return _inspect_mc(origin)
+
+
+@inspection._self_inspects
+class Bundle(
+    ORMColumnsClauseRole[_T],
+    SupportsCloneAnnotations,
+    MemoizedHasCacheKey,
+    inspection.Inspectable["Bundle[_T]"],
+    InspectionAttr,
+):
+    """A grouping of SQL expressions that are returned by a :class:`.Query`
+    under one namespace.
+
+    The :class:`.Bundle` essentially allows nesting of the tuple-based
+    results returned by a column-oriented :class:`_query.Query` object.
+    It also
+    is extensible via simple subclassing, where the primary capability
+    to override is that of how the set of expressions should be returned,
+    allowing post-processing as well as custom return types, without
+    involving ORM identity-mapped classes.
+
+    .. seealso::
+
+        :ref:`bundles`
+
+
+    """
+
+    single_entity = False
+    """If True, queries for a single Bundle will be returned as a single
+    entity, rather than an element within a keyed tuple."""
+
+    is_clause_element = False
+
+    is_mapper = False
+
+    is_aliased_class = False
+
+    is_bundle = True
+
+    _propagate_attrs: _PropagateAttrsType = util.immutabledict()
+
+    proxy_set = util.EMPTY_SET  # type: ignore
+
+    exprs: List[_ColumnsClauseElement]
+
+    def __init__(
+        self, name: str, *exprs: _ColumnExpressionArgument[Any], **kw: Any
+    ):
+        r"""Construct a new :class:`.Bundle`.
+
+        e.g.::
+
+            bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+            for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
+                print(row.mybundle.x, row.mybundle.y)
+
+        :param name: name of the bundle.
+        :param \*exprs: columns or SQL expressions comprising the bundle.
+        :param single_entity=False: if True, rows for this :class:`.Bundle`
+         can be returned as a "single entity" outside of any enclosing tuple
+         in the same manner as a mapped entity.
+
+        """  # noqa: E501
+        self.name = self._label = name
+        coerced_exprs = [
+            coercions.expect(
+                roles.ColumnsClauseRole, expr, apply_propagate_attrs=self
+            )
+            for expr in exprs
+        ]
+        self.exprs = coerced_exprs
+
+        self.c = self.columns = ColumnCollection(
+            (getattr(col, "key", col._label), col)
+            for col in [e._annotations.get("bundle", e) for e in coerced_exprs]
+        ).as_readonly()
+        self.single_entity = kw.pop("single_entity", self.single_entity)
+
+    def _gen_cache_key(
+        self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
+    ) -> Tuple[Any, ...]:
+        return (self.__class__, self.name, self.single_entity) + tuple(
+            [expr._gen_cache_key(anon_map, bindparams) for expr in self.exprs]
+        )
+
+    @property
+    def mapper(self) -> Optional[Mapper[Any]]:
+        mp: Optional[Mapper[Any]] = self.exprs[0]._annotations.get(
+            "parentmapper", None
+        )
+        return mp
+
+    @property
+    def entity(self) -> Optional[_InternalEntityType[Any]]:
+        ie: Optional[_InternalEntityType[Any]] = self.exprs[
+            0
+        ]._annotations.get("parententity", None)
+        return ie
+
+    @property
+    def entity_namespace(
+        self,
+    ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        return self.c
+
+    columns: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]
+
+    """A namespace of SQL expressions referred to by this :class:`.Bundle`.
+
+        e.g.::
+
+            bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+            q = sess.query(bn).filter(bn.c.x == 5)
+
+        Nesting of bundles is also supported::
+
+            b1 = Bundle(
+                "b1",
+                Bundle("b2", MyClass.a, MyClass.b),
+                Bundle("b3", MyClass.x, MyClass.y),
+            )
+
+            q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
+
+    .. seealso::
+
+        :attr:`.Bundle.c`
+
+    """  # noqa: E501
+
+    c: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]
+    """An alias for :attr:`.Bundle.columns`."""
+
+    def _clone(self, **kw):
+        cloned = self.__class__.__new__(self.__class__)
+        cloned.__dict__.update(self.__dict__)
+        return cloned
+
+    def __clause_element__(self):
+        # ensure existing entity_namespace remains
+        annotations = {"bundle": self, "entity_namespace": self}
+        annotations.update(self._annotations)
+
+        plugin_subject = self.exprs[0]._propagate_attrs.get(
+            "plugin_subject", self.entity
+        )
+        return (
+            expression.ClauseList(
+                _literal_as_text_role=roles.ColumnsClauseRole,
+                group=False,
+                *[e._annotations.get("bundle", e) for e in self.exprs],
+            )
+            ._annotate(annotations)
+            ._set_propagate_attrs(
+                # the Bundle *must* use the orm plugin no matter what.  the
+                # subject can be None but it's much better if it's not.
+                {
+                    "compile_state_plugin": "orm",
+                    "plugin_subject": plugin_subject,
+                }
+            )
+        )
+
+    @property
+    def clauses(self):
+        return self.__clause_element__().clauses
+
+    def label(self, name):
+        """Provide a copy of this :class:`.Bundle` passing a new label."""
+
+        cloned = self._clone()
+        cloned.name = name
+        return cloned
+
+    def create_row_processor(
+        self,
+        query: Select[Any],
+        procs: Sequence[Callable[[Row[Any]], Any]],
+        labels: Sequence[str],
+    ) -> Callable[[Row[Any]], Any]:
+        """Produce the "row processing" function for this :class:`.Bundle`.
+
+        May be overridden by subclasses to provide custom behaviors when
+        results are fetched. The method is passed the statement object and a
+        set of "row processor" functions at query execution time; these
+        processor functions when given a result row will return the individual
+        attribute value, which can then be adapted into any kind of return data
+        structure.
+
+        The example below illustrates replacing the usual :class:`.Row`
+        return structure with a straight Python dictionary::
+
+            from sqlalchemy.orm import Bundle
+
+
+            class DictBundle(Bundle):
+                def create_row_processor(self, query, procs, labels):
+                    "Override create_row_processor to return values as dictionaries"
+
+                    def proc(row):
+                        return dict(zip(labels, (proc(row) for proc in procs)))
+
+                    return proc
+
+        A result from the above :class:`_orm.Bundle` will return dictionary
+        values::
+
+            bn = DictBundle("mybundle", MyClass.data1, MyClass.data2)
+            for row in session.execute(select(bn)).where(bn.c.data1 == "d1"):
+                print(row.mybundle["data1"], row.mybundle["data2"])
+
+        """  # noqa: E501
+        keyed_tuple = result_tuple(labels, [() for l in labels])
+
+        def proc(row: Row[Any]) -> Any:
+            return keyed_tuple([proc(row) for proc in procs])
+
+        return proc
+
+
+def _orm_annotate(element: _SA, exclude: Optional[Any] = None) -> _SA:
+    """Deep copy the given ClauseElement, annotating each element with the
+    "_orm_adapt" flag.
+
+    Elements within the exclude collection will be cloned but not annotated.
+
+    """
+    return sql_util._deep_annotate(element, {"_orm_adapt": True}, exclude)
+
+
+def _orm_deannotate(element: _SA) -> _SA:
+    """Remove annotations that link a column to a particular mapping.
+
+    Note this doesn't affect "remote" and "foreign" annotations
+    passed by the :func:`_orm.foreign` and :func:`_orm.remote`
+    annotators.
+
+    """
+
+    return sql_util._deep_deannotate(
+        element, values=("_orm_adapt", "parententity")
+    )
+
+
+def _orm_full_deannotate(element: _SA) -> _SA:
+    return sql_util._deep_deannotate(element)
+
+
+class _ORMJoin(expression.Join):
+    """Extend Join to support ORM constructs as input."""
+
+    __visit_name__ = expression.Join.__visit_name__
+
+    inherit_cache = True
+
+    def __init__(
+        self,
+        left: _FromClauseArgument,
+        right: _FromClauseArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        isouter: bool = False,
+        full: bool = False,
+        _left_memo: Optional[Any] = None,
+        _right_memo: Optional[Any] = None,
+        _extra_criteria: Tuple[ColumnElement[bool], ...] = (),
+    ):
+        left_info = cast(
+            "Union[FromClause, _InternalEntityType[Any]]",
+            inspection.inspect(left),
+        )
+
+        right_info = cast(
+            "Union[FromClause, _InternalEntityType[Any]]",
+            inspection.inspect(right),
+        )
+        adapt_to = right_info.selectable
+
+        # used by joined eager loader
+        self._left_memo = _left_memo
+        self._right_memo = _right_memo
+
+        if isinstance(onclause, attributes.QueryableAttribute):
+            if TYPE_CHECKING:
+                assert isinstance(
+                    onclause.comparator, RelationshipProperty.Comparator
+                )
+            on_selectable = onclause.comparator._source_selectable()
+            prop = onclause.property
+            _extra_criteria += onclause._extra_criteria
+        elif isinstance(onclause, MapperProperty):
+            # used internally by joined eager loader...possibly not ideal
+            prop = onclause
+            on_selectable = prop.parent.selectable
+        else:
+            prop = None
+            on_selectable = None
+
+        left_selectable = left_info.selectable
+        if prop:
+            adapt_from: Optional[FromClause]
+            if sql_util.clause_is_present(on_selectable, left_selectable):
+                adapt_from = on_selectable
+            else:
+                assert isinstance(left_selectable, FromClause)
+                adapt_from = left_selectable
+
+            (
+                pj,
+                sj,
+                source,
+                dest,
+                secondary,
+                target_adapter,
+            ) = prop._create_joins(
+                source_selectable=adapt_from,
+                dest_selectable=adapt_to,
+                source_polymorphic=True,
+                of_type_entity=right_info,
+                alias_secondary=True,
+                extra_criteria=_extra_criteria,
+            )
+
+            if sj is not None:
+                if isouter:
+                    # note this is an inner join from secondary->right
+                    right = sql.join(secondary, right, sj)
+                    onclause = pj
+                else:
+                    left = sql.join(left, secondary, pj, isouter)
+                    onclause = sj
+            else:
+                onclause = pj
+
+            self._target_adapter = target_adapter
+
+        # we don't use the normal coercions logic for _ORMJoin
+        # (probably should), so do some gymnastics to get the entity.
+        # logic here is for #8721, which was a major bug in 1.4
+        # for almost two years, not reported/fixed until 1.4.43 (!)
+        if is_selectable(left_info):
+            parententity = left_selectable._annotations.get(
+                "parententity", None
+            )
+        elif insp_is_mapper(left_info) or insp_is_aliased_class(left_info):
+            parententity = left_info
+        else:
+            parententity = None
+
+        if parententity is not None:
+            self._annotations = self._annotations.union(
+                {"parententity": parententity}
+            )
+
+        augment_onclause = bool(_extra_criteria) and not prop
+        expression.Join.__init__(self, left, right, onclause, isouter, full)
+
+        assert self.onclause is not None
+
+        if augment_onclause:
+            self.onclause &= sql.and_(*_extra_criteria)
+
+        if (
+            not prop
+            and getattr(right_info, "mapper", None)
+            and right_info.mapper.single  # type: ignore
+        ):
+            right_info = cast("_InternalEntityType[Any]", right_info)
+            # if single inheritance target and we are using a manual
+            # or implicit ON clause, augment it the same way we'd augment the
+            # WHERE.
+            single_crit = right_info.mapper._single_table_criterion
+            if single_crit is not None:
+                if insp_is_aliased_class(right_info):
+                    single_crit = right_info._adapter.traverse(single_crit)
+                self.onclause = self.onclause & single_crit
+
+    def _splice_into_center(self, other):
+        """Splice a join into the center.
+
+        Given join(a, b) and join(b, c), return join(a, b).join(c)
+
+        """
+        leftmost = other
+        while isinstance(leftmost, sql.Join):
+            leftmost = leftmost.left
+
+        assert self.right is leftmost
+
+        left = _ORMJoin(
+            self.left,
+            other.left,
+            self.onclause,
+            isouter=self.isouter,
+            _left_memo=self._left_memo,
+            _right_memo=other._left_memo._path_registry,
+        )
+
+        return _ORMJoin(
+            left,
+            other.right,
+            other.onclause,
+            isouter=other.isouter,
+            _right_memo=other._right_memo,
+        )
+
+    def join(
+        self,
+        right: _FromClauseArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        isouter: bool = False,
+        full: bool = False,
+    ) -> _ORMJoin:
+        return _ORMJoin(self, right, onclause, full=full, isouter=isouter)
+
+    def outerjoin(
+        self,
+        right: _FromClauseArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        full: bool = False,
+    ) -> _ORMJoin:
+        return _ORMJoin(self, right, onclause, isouter=True, full=full)
+
+
+def with_parent(
+    instance: object,
+    prop: attributes.QueryableAttribute[Any],
+    from_entity: Optional[_EntityType[Any]] = None,
+) -> ColumnElement[bool]:
+    """Create filtering criterion that relates this query's primary entity
+    to the given related instance, using established
+    :func:`_orm.relationship()`
+    configuration.
+
+    E.g.::
+
+        stmt = select(Address).where(with_parent(some_user, User.addresses))
+
+    The SQL rendered is the same as that rendered when a lazy loader
+    would fire off from the given parent on that attribute, meaning
+    that the appropriate state is taken from the parent object in
+    Python without the need to render joins to the parent table
+    in the rendered statement.
+
+    The given property may also make use of :meth:`_orm.PropComparator.of_type`
+    to indicate the left side of the criteria::
+
+
+        a1 = aliased(Address)
+        a2 = aliased(Address)
+        stmt = select(a1, a2).where(with_parent(u1, User.addresses.of_type(a2)))
+
+    The above use is equivalent to using the
+    :func:`_orm.with_parent.from_entity` argument::
+
+        a1 = aliased(Address)
+        a2 = aliased(Address)
+        stmt = select(a1, a2).where(
+            with_parent(u1, User.addresses, from_entity=a2)
+        )
+
+    :param instance:
+      An instance which has some :func:`_orm.relationship`.
+
+    :param property:
+      Class-bound attribute, which indicates
+      what relationship from the instance should be used to reconcile the
+      parent/child relationship.
+
+    :param from_entity:
+      Entity in which to consider as the left side.  This defaults to the
+      "zero" entity of the :class:`_query.Query` itself.
+
+      .. versionadded:: 1.2
+
+    """  # noqa: E501
+    prop_t: RelationshipProperty[Any]
+
+    if isinstance(prop, str):
+        raise sa_exc.ArgumentError(
+            "with_parent() accepts class-bound mapped attributes, not strings"
+        )
+    elif isinstance(prop, attributes.QueryableAttribute):
+        if prop._of_type:
+            from_entity = prop._of_type
+        mapper_property = prop.property
+        if mapper_property is None or not prop_is_relationship(
+            mapper_property
+        ):
+            raise sa_exc.ArgumentError(
+                f"Expected relationship property for with_parent(), "
+                f"got {mapper_property}"
+            )
+        prop_t = mapper_property
+    else:
+        prop_t = prop
+
+    return prop_t._with_parent(instance, from_entity=from_entity)
+
+
+def has_identity(object_: object) -> bool:
+    """Return True if the given object has a database
+    identity.
+
+    This typically corresponds to the object being
+    in either the persistent or detached state.
+
+    .. seealso::
+
+        :func:`.was_deleted`
+
+    """
+    state = attributes.instance_state(object_)
+    return state.has_identity
+
+
+def was_deleted(object_: object) -> bool:
+    """Return True if the given object was deleted
+    within a session flush.
+
+    This is regardless of whether or not the object is
+    persistent or detached.
+
+    .. seealso::
+
+        :attr:`.InstanceState.was_deleted`
+
+    """
+
+    state = attributes.instance_state(object_)
+    return state.was_deleted
+
+
+def _entity_corresponds_to(
+    given: _InternalEntityType[Any], entity: _InternalEntityType[Any]
+) -> bool:
+    """determine if 'given' corresponds to 'entity', in terms
+    of an entity passed to Query that would match the same entity
+    being referred to elsewhere in the query.
+
+    """
+    if insp_is_aliased_class(entity):
+        if insp_is_aliased_class(given):
+            if entity._base_alias() is given._base_alias():
+                return True
+        return False
+    elif insp_is_aliased_class(given):
+        if given._use_mapper_path:
+            return entity in given.with_polymorphic_mappers
+        else:
+            return entity is given
+
+    assert insp_is_mapper(given)
+    return entity.common_parent(given)
+
+
+def _entity_corresponds_to_use_path_impl(
+    given: _InternalEntityType[Any], entity: _InternalEntityType[Any]
+) -> bool:
+    """determine if 'given' corresponds to 'entity', in terms
+    of a path of loader options where a mapped attribute is taken to
+    be a member of a parent entity.
+
+    e.g.::
+
+        someoption(A).someoption(A.b)  # -> fn(A, A) -> True
+        someoption(A).someoption(C.d)  # -> fn(A, C) -> False
+
+        a1 = aliased(A)
+        someoption(a1).someoption(A.b)  # -> fn(a1, A) -> False
+        someoption(a1).someoption(a1.b)  # -> fn(a1, a1) -> True
+
+        wp = with_polymorphic(A, [A1, A2])
+        someoption(wp).someoption(A1.foo)  # -> fn(wp, A1) -> False
+        someoption(wp).someoption(wp.A1.foo)  # -> fn(wp, wp.A1) -> True
+
+    """
+    if insp_is_aliased_class(given):
+        return (
+            insp_is_aliased_class(entity)
+            and not entity._use_mapper_path
+            and (given is entity or entity in given._with_polymorphic_entities)
+        )
+    elif not insp_is_aliased_class(entity):
+        return given.isa(entity.mapper)
+    else:
+        return (
+            entity._use_mapper_path
+            and given in entity.with_polymorphic_mappers
+        )
+
+
+def _entity_isa(given: _InternalEntityType[Any], mapper: Mapper[Any]) -> bool:
+    """determine if 'given' "is a" mapper, in terms of the given
+    would load rows of type 'mapper'.
+
+    """
+    if given.is_aliased_class:
+        return mapper in given.with_polymorphic_mappers or given.mapper.isa(
+            mapper
+        )
+    elif given.with_polymorphic_mappers:
+        return mapper in given.with_polymorphic_mappers or given.isa(mapper)
+    else:
+        return given.isa(mapper)
+
+
+def _getitem(iterable_query: Query[Any], item: Any) -> Any:
+    """calculate __getitem__ in terms of an iterable query object
+    that also has a slice() method.
+
+    """
+
+    def _no_negative_indexes():
+        raise IndexError(
+            "negative indexes are not accepted by SQL "
+            "index / slice operators"
+        )
+
+    if isinstance(item, slice):
+        start, stop, step = util.decode_slice(item)
+
+        if (
+            isinstance(stop, int)
+            and isinstance(start, int)
+            and stop - start <= 0
+        ):
+            return []
+
+        elif (isinstance(start, int) and start < 0) or (
+            isinstance(stop, int) and stop < 0
+        ):
+            _no_negative_indexes()
+
+        res = iterable_query.slice(start, stop)
+        if step is not None:
+            return list(res)[None : None : item.step]
+        else:
+            return list(res)
+    else:
+        if item == -1:
+            _no_negative_indexes()
+        else:
+            return list(iterable_query[item : item + 1])[0]
+
+
+def _is_mapped_annotation(
+    raw_annotation: _AnnotationScanType,
+    cls: Type[Any],
+    originating_cls: Type[Any],
+) -> bool:
+    try:
+        annotated = de_stringify_annotation(
+            cls, raw_annotation, originating_cls.__module__
+        )
+    except NameError:
+        # in most cases, at least within our own tests, we can raise
+        # here, which is more accurate as it prevents us from returning
+        # false negatives.  However, in the real world, try to avoid getting
+        # involved with end-user annotations that have nothing to do with us.
+        # see issue #8888 where we bypass using this function in the case
+        # that we want to detect an unresolvable Mapped[] type.
+        return False
+    else:
+        return is_origin_of_cls(annotated, _MappedAnnotationBase)
+
+
+class _CleanupError(Exception):
+    pass
+
+
+def _cleanup_mapped_str_annotation(
+    annotation: str, originating_module: str
+) -> str:
+    # fix up an annotation that comes in as the form:
+    # 'Mapped[List[Address]]'  so that it instead looks like:
+    # 'Mapped[List["Address"]]' , which will allow us to get
+    # "Address" as a string
+
+    # additionally, resolve symbols for these names since this is where
+    # we'd have to do it
+
+    inner: Optional[Match[str]]
+
+    mm = re.match(r"^([^ \|]+?)\[(.+)\]$", annotation)
+
+    if not mm:
+        return annotation
+
+    # ticket #8759.  Resolve the Mapped name to a real symbol.
+    # originally this just checked the name.
+    try:
+        obj = eval_name_only(mm.group(1), originating_module)
+    except NameError as ne:
+        raise _CleanupError(
+            f'For annotation "{annotation}", could not resolve '
+            f'container type "{mm.group(1)}".  '
+            "Please ensure this type is imported at the module level "
+            "outside of TYPE_CHECKING blocks"
+        ) from ne
+
+    if obj is typing.ClassVar:
+        real_symbol = "ClassVar"
+    else:
+        try:
+            if issubclass(obj, _MappedAnnotationBase):
+                real_symbol = obj.__name__
+            else:
+                return annotation
+        except TypeError:
+            # avoid isinstance(obj, type) check, just catch TypeError
+            return annotation
+
+    # note: if one of the codepaths above didn't define real_symbol and
+    # then didn't return, real_symbol raises UnboundLocalError
+    # which is actually a NameError, and the calling routines don't
+    # notice this since they are catching NameError anyway.   Just in case
+    # this is being modified in the future, something to be aware of.
+
+    stack = []
+    inner = mm
+    while True:
+        stack.append(real_symbol if mm is inner else inner.group(1))
+        g2 = inner.group(2)
+        inner = re.match(r"^([^ \|]+?)\[(.+)\]$", g2)
+        if inner is None:
+            stack.append(g2)
+            break
+
+    # stacks we want to rewrite, that is, quote the last entry which
+    # we think is a relationship class name:
+    #
+    #   ['Mapped', 'List', 'Address']
+    #   ['Mapped', 'A']
+    #
+    # stacks we dont want to rewrite, which are generally MappedColumn
+    # use cases:
+    #
+    # ['Mapped', "'Optional[Dict[str, str]]'"]
+    # ['Mapped', 'dict[str, str] | None']
+
+    if (
+        # avoid already quoted symbols such as
+        # ['Mapped', "'Optional[Dict[str, str]]'"]
+        not re.match(r"""^["'].*["']$""", stack[-1])
+        # avoid further generics like Dict[] such as
+        # ['Mapped', 'dict[str, str] | None'],
+        # ['Mapped', 'list[int] | list[str]'],
+        # ['Mapped', 'Union[list[int], list[str]]'],
+        and not re.search(r"[\[\]]", stack[-1])
+    ):
+        stripchars = "\"' "
+        stack[-1] = ", ".join(
+            f'"{elem.strip(stripchars)}"' for elem in stack[-1].split(",")
+        )
+
+        annotation = "[".join(stack) + ("]" * (len(stack) - 1))
+
+    return annotation
+
+
+def _extract_mapped_subtype(
+    raw_annotation: Optional[_AnnotationScanType],
+    cls: type,
+    originating_module: str,
+    key: str,
+    attr_cls: Type[Any],
+    required: bool,
+    is_dataclass_field: bool,
+    expect_mapped: bool = True,
+    raiseerr: bool = True,
+) -> Optional[Tuple[Union[_AnnotationScanType, str], Optional[type]]]:
+    """given an annotation, figure out if it's ``Mapped[something]`` and if
+    so, return the ``something`` part.
+
+    Includes error raise scenarios and other options.
+
+    """
+
+    if raw_annotation is None:
+        if required:
+            raise sa_exc.ArgumentError(
+                f"Python typing annotation is required for attribute "
+                f'"{cls.__name__}.{key}" when primary argument(s) for '
+                f'"{attr_cls.__name__}" construct are None or not present'
+            )
+        return None
+
+    try:
+        # destringify the "outside" of the annotation.  note we are not
+        # adding include_generic so it will *not* dig into generic contents,
+        # which will remain as ForwardRef or plain str under future annotations
+        # mode.  The full destringify happens later when mapped_column goes
+        # to do a full lookup in the registry type_annotations_map.
+        annotated = de_stringify_annotation(
+            cls,
+            raw_annotation,
+            originating_module,
+            str_cleanup_fn=_cleanup_mapped_str_annotation,
+        )
+    except _CleanupError as ce:
+        raise sa_exc.ArgumentError(
+            f"Could not interpret annotation {raw_annotation}.  "
+            "Check that it uses names that are correctly imported at the "
+            "module level. See chained stack trace for more hints."
+        ) from ce
+    except NameError as ne:
+        if raiseerr and "Mapped[" in raw_annotation:  # type: ignore
+            raise sa_exc.ArgumentError(
+                f"Could not interpret annotation {raw_annotation}.  "
+                "Check that it uses names that are correctly imported at the "
+                "module level. See chained stack trace for more hints."
+            ) from ne
+
+        annotated = raw_annotation  # type: ignore
+
+    if is_dataclass_field:
+        return annotated, None
+    else:
+        if not hasattr(annotated, "__origin__") or not is_origin_of_cls(
+            annotated, _MappedAnnotationBase
+        ):
+            if expect_mapped:
+                if not raiseerr:
+                    return None
+
+                origin = getattr(annotated, "__origin__", None)
+                if origin is typing.ClassVar:
+                    return None
+
+                # check for other kind of ORM descriptor like AssociationProxy,
+                # don't raise for that (issue #9957)
+                elif isinstance(origin, type) and issubclass(
+                    origin, ORMDescriptor
+                ):
+                    return None
+
+                raise sa_exc.ArgumentError(
+                    f'Type annotation for "{cls.__name__}.{key}" '
+                    "can't be correctly interpreted for "
+                    "Annotated Declarative Table form.  ORM annotations "
+                    "should normally make use of the ``Mapped[]`` generic "
+                    "type, or other ORM-compatible generic type, as a "
+                    "container for the actual type, which indicates the "
+                    "intent that the attribute is mapped. "
+                    "Class variables that are not intended to be mapped "
+                    "by the ORM should use ClassVar[].  "
+                    "To allow Annotated Declarative to disregard legacy "
+                    "annotations which don't use Mapped[] to pass, set "
+                    '"__allow_unmapped__ = True" on the class or a '
+                    "superclass this class.",
+                    code="zlpr",
+                )
+
+            else:
+                return annotated, None
+
+        if len(annotated.__args__) != 1:
+            raise sa_exc.ArgumentError(
+                "Expected sub-type for Mapped[] annotation"
+            )
+
+        return (
+            # fix dict/list/set args to be ForwardRef, see #11814
+            fixup_container_fwd_refs(annotated.__args__[0]),
+            annotated.__origin__,
+        )
+
+
+def _mapper_property_as_plain_name(prop: Type[Any]) -> str:
+    if hasattr(prop, "_mapper_property_name"):
+        name = prop._mapper_property_name()
+    else:
+        name = None
+    return util.clsname_as_plain_name(prop, name)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py
new file mode 100644
index 00000000..ac034a09
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/orm/writeonly.py
@@ -0,0 +1,678 @@
+# orm/writeonly.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Write-only collection API.
+
+This is an alternate mapped attribute style that only supports single-item
+collection mutation operations.   To read the collection, a select()
+object must be executed each time.
+
+.. versionadded:: 2.0
+
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Collection
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from sqlalchemy.sql import bindparam
+from . import attributes
+from . import interfaces
+from . import relationships
+from . import strategies
+from .base import NEVER_SET
+from .base import object_mapper
+from .base import PassiveFlag
+from .base import RelationshipDirection
+from .. import exc
+from .. import inspect
+from .. import log
+from .. import util
+from ..sql import delete
+from ..sql import insert
+from ..sql import select
+from ..sql import update
+from ..sql.dml import Delete
+from ..sql.dml import Insert
+from ..sql.dml import Update
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from . import QueryableAttribute
+    from ._typing import _InstanceDict
+    from .attributes import AttributeEventToken
+    from .base import LoaderCallableStatus
+    from .collections import _AdaptedCollectionProtocol
+    from .collections import CollectionAdapter
+    from .mapper import Mapper
+    from .relationships import _RelationshipOrderByArg
+    from .state import InstanceState
+    from .util import AliasedClass
+    from ..event import _Dispatch
+    from ..sql.selectable import FromClause
+    from ..sql.selectable import Select
+
+_T = TypeVar("_T", bound=Any)
+
+
+class WriteOnlyHistory(Generic[_T]):
+    """Overrides AttributeHistory to receive append/remove events directly."""
+
+    unchanged_items: util.OrderedIdentitySet
+    added_items: util.OrderedIdentitySet
+    deleted_items: util.OrderedIdentitySet
+    _reconcile_collection: bool
+
+    def __init__(
+        self,
+        attr: WriteOnlyAttributeImpl,
+        state: InstanceState[_T],
+        passive: PassiveFlag,
+        apply_to: Optional[WriteOnlyHistory[_T]] = None,
+    ) -> None:
+        if apply_to:
+            if passive & PassiveFlag.SQL_OK:
+                raise exc.InvalidRequestError(
+                    f"Attribute {attr} can't load the existing state from the "
+                    "database for this operation; full iteration is not "
+                    "permitted.  If this is a delete operation, configure "
+                    f"passive_deletes=True on the {attr} relationship in "
+                    "order to resolve this error."
+                )
+
+            self.unchanged_items = apply_to.unchanged_items
+            self.added_items = apply_to.added_items
+            self.deleted_items = apply_to.deleted_items
+            self._reconcile_collection = apply_to._reconcile_collection
+        else:
+            self.deleted_items = util.OrderedIdentitySet()
+            self.added_items = util.OrderedIdentitySet()
+            self.unchanged_items = util.OrderedIdentitySet()
+            self._reconcile_collection = False
+
+    @property
+    def added_plus_unchanged(self) -> List[_T]:
+        return list(self.added_items.union(self.unchanged_items))
+
+    @property
+    def all_items(self) -> List[_T]:
+        return list(
+            self.added_items.union(self.unchanged_items).union(
+                self.deleted_items
+            )
+        )
+
+    def as_history(self) -> attributes.History:
+        if self._reconcile_collection:
+            added = self.added_items.difference(self.unchanged_items)
+            deleted = self.deleted_items.intersection(self.unchanged_items)
+            unchanged = self.unchanged_items.difference(deleted)
+        else:
+            added, unchanged, deleted = (
+                self.added_items,
+                self.unchanged_items,
+                self.deleted_items,
+            )
+        return attributes.History(list(added), list(unchanged), list(deleted))
+
+    def indexed(self, index: Union[int, slice]) -> Union[List[_T], _T]:
+        return list(self.added_items)[index]
+
+    def add_added(self, value: _T) -> None:
+        self.added_items.add(value)
+
+    def add_removed(self, value: _T) -> None:
+        if value in self.added_items:
+            self.added_items.remove(value)
+        else:
+            self.deleted_items.add(value)
+
+
+class WriteOnlyAttributeImpl(
+    attributes.HasCollectionAdapter, attributes.AttributeImpl
+):
+    uses_objects: bool = True
+    default_accepts_scalar_loader: bool = False
+    supports_population: bool = False
+    _supports_dynamic_iteration: bool = False
+    collection: bool = False
+    dynamic: bool = True
+    order_by: _RelationshipOrderByArg = ()
+    collection_history_cls: Type[WriteOnlyHistory[Any]] = WriteOnlyHistory
+
+    query_class: Type[WriteOnlyCollection[Any]]
+
+    def __init__(
+        self,
+        class_: Union[Type[Any], AliasedClass[Any]],
+        key: str,
+        dispatch: _Dispatch[QueryableAttribute[Any]],
+        target_mapper: Mapper[_T],
+        order_by: _RelationshipOrderByArg,
+        **kw: Any,
+    ):
+        super().__init__(class_, key, None, dispatch, **kw)
+        self.target_mapper = target_mapper
+        self.query_class = WriteOnlyCollection
+        if order_by:
+            self.order_by = tuple(order_by)
+
+    def get(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> Union[util.OrderedIdentitySet, WriteOnlyCollection[Any]]:
+        if not passive & PassiveFlag.SQL_OK:
+            return self._get_collection_history(
+                state, PassiveFlag.PASSIVE_NO_INITIALIZE
+            ).added_items
+        else:
+            return self.query_class(self, state)
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Literal[None] = ...,
+        passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
+    ) -> CollectionAdapter: ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: _AdaptedCollectionProtocol = ...,
+        passive: PassiveFlag = ...,
+    ) -> CollectionAdapter: ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = ...,
+        passive: PassiveFlag = ...,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]: ...
+
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]:
+        data: Collection[Any]
+        if not passive & PassiveFlag.SQL_OK:
+            data = self._get_collection_history(state, passive).added_items
+        else:
+            history = self._get_collection_history(state, passive)
+            data = history.added_plus_unchanged
+        return DynamicCollectionAdapter(data)  # type: ignore[return-value]
+
+    @util.memoized_property
+    def _append_token(  # type:ignore[override]
+        self,
+    ) -> attributes.AttributeEventToken:
+        return attributes.AttributeEventToken(self, attributes.OP_APPEND)
+
+    @util.memoized_property
+    def _remove_token(  # type:ignore[override]
+        self,
+    ) -> attributes.AttributeEventToken:
+        return attributes.AttributeEventToken(self, attributes.OP_REMOVE)
+
+    def fire_append_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        collection_history: Optional[WriteOnlyHistory[Any]] = None,
+    ) -> None:
+        if collection_history is None:
+            collection_history = self._modified_event(state, dict_)
+
+        collection_history.add_added(value)
+
+        for fn in self.dispatch.append:
+            value = fn(state, value, initiator or self._append_token)
+
+        if self.trackparent and value is not None:
+            self.sethasparent(attributes.instance_state(value), state, True)
+
+    def fire_remove_event(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        collection_history: Optional[WriteOnlyHistory[Any]] = None,
+    ) -> None:
+        if collection_history is None:
+            collection_history = self._modified_event(state, dict_)
+
+        collection_history.add_removed(value)
+
+        if self.trackparent and value is not None:
+            self.sethasparent(attributes.instance_state(value), state, False)
+
+        for fn in self.dispatch.remove:
+            fn(state, value, initiator or self._remove_token)
+
+    def _modified_event(
+        self, state: InstanceState[Any], dict_: _InstanceDict
+    ) -> WriteOnlyHistory[Any]:
+        if self.key not in state.committed_state:
+            state.committed_state[self.key] = self.collection_history_cls(
+                self, state, PassiveFlag.PASSIVE_NO_FETCH
+            )
+
+        state._modified_event(dict_, self, NEVER_SET)
+
+        # this is a hack to allow the entities.ComparableEntity fixture
+        # to work
+        dict_[self.key] = True
+        return state.committed_state[self.key]  # type: ignore[no-any-return]
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+        check_old: Any = None,
+        pop: bool = False,
+        _adapt: bool = True,
+    ) -> None:
+        if initiator and initiator.parent_token is self.parent_token:
+            return
+
+        if pop and value is None:
+            return
+
+        iterable = value
+        new_values = list(iterable)
+        if state.has_identity:
+            if not self._supports_dynamic_iteration:
+                raise exc.InvalidRequestError(
+                    f'Collection "{self}" does not support implicit '
+                    "iteration; collection replacement operations "
+                    "can't be used"
+                )
+            old_collection = util.IdentitySet(
+                self.get(state, dict_, passive=passive)
+            )
+
+        collection_history = self._modified_event(state, dict_)
+        if not state.has_identity:
+            old_collection = collection_history.added_items
+        else:
+            old_collection = old_collection.union(
+                collection_history.added_items
+            )
+
+        constants = old_collection.intersection(new_values)
+        additions = util.IdentitySet(new_values).difference(constants)
+        removals = old_collection.difference(constants)
+
+        for member in new_values:
+            if member in additions:
+                self.fire_append_event(
+                    state,
+                    dict_,
+                    member,
+                    None,
+                    collection_history=collection_history,
+                )
+
+        for member in removals:
+            self.fire_remove_event(
+                state,
+                dict_,
+                member,
+                None,
+                collection_history=collection_history,
+            )
+
+    def delete(self, *args: Any, **kwargs: Any) -> NoReturn:
+        raise NotImplementedError()
+
+    def set_committed_value(
+        self, state: InstanceState[Any], dict_: _InstanceDict, value: Any
+    ) -> NoReturn:
+        raise NotImplementedError(
+            "Dynamic attributes don't support collection population."
+        )
+
+    def get_history(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
+    ) -> attributes.History:
+        c = self._get_collection_history(state, passive)
+        return c.as_history()
+
+    def get_all_pending(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_NO_INITIALIZE,
+    ) -> List[Tuple[InstanceState[Any], Any]]:
+        c = self._get_collection_history(state, passive)
+        return [(attributes.instance_state(x), x) for x in c.all_items]
+
+    def _get_collection_history(
+        self, state: InstanceState[Any], passive: PassiveFlag
+    ) -> WriteOnlyHistory[Any]:
+        c: WriteOnlyHistory[Any]
+        if self.key in state.committed_state:
+            c = state.committed_state[self.key]
+        else:
+            c = self.collection_history_cls(
+                self, state, PassiveFlag.PASSIVE_NO_FETCH
+            )
+
+        if state.has_identity and (passive & PassiveFlag.INIT_OK):
+            return self.collection_history_cls(
+                self, state, passive, apply_to=c
+            )
+        else:
+            return c
+
+    def append(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
+    ) -> None:
+        if initiator is not self:
+            self.fire_append_event(state, dict_, value, initiator)
+
+    def remove(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
+    ) -> None:
+        if initiator is not self:
+            self.fire_remove_event(state, dict_, value, initiator)
+
+    def pop(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken],
+        passive: PassiveFlag = PassiveFlag.PASSIVE_NO_FETCH,
+    ) -> None:
+        self.remove(state, dict_, value, initiator, passive=passive)
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy="write_only")
+class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified):
+    impl_class = WriteOnlyAttributeImpl
+
+    def init_class_attribute(self, mapper: Mapper[Any]) -> None:
+        self.is_class_level = True
+        if not self.uselist or self.parent_property.direction not in (
+            interfaces.ONETOMANY,
+            interfaces.MANYTOMANY,
+        ):
+            raise exc.InvalidRequestError(
+                "On relationship %s, 'dynamic' loaders cannot be used with "
+                "many-to-one/one-to-one relationships and/or "
+                "uselist=False." % self.parent_property
+            )
+
+        strategies._register_attribute(  # type: ignore[no-untyped-call]
+            self.parent_property,
+            mapper,
+            useobject=True,
+            impl_class=self.impl_class,
+            target_mapper=self.parent_property.mapper,
+            order_by=self.parent_property.order_by,
+            query_class=self.parent_property.query_class,
+        )
+
+
+class DynamicCollectionAdapter:
+    """simplified CollectionAdapter for internal API consistency"""
+
+    data: Collection[Any]
+
+    def __init__(self, data: Collection[Any]):
+        self.data = data
+
+    def __iter__(self) -> Iterator[Any]:
+        return iter(self.data)
+
+    def _reset_empty(self) -> None:
+        pass
+
+    def __len__(self) -> int:
+        return len(self.data)
+
+    def __bool__(self) -> bool:
+        return True
+
+
+class AbstractCollectionWriter(Generic[_T]):
+    """Virtual collection which includes append/remove methods that synchronize
+    into the attribute event system.
+
+    """
+
+    if not TYPE_CHECKING:
+        __slots__ = ()
+
+    instance: _T
+    _from_obj: Tuple[FromClause, ...]
+
+    def __init__(self, attr: WriteOnlyAttributeImpl, state: InstanceState[_T]):
+        instance = state.obj()
+        if TYPE_CHECKING:
+            assert instance
+        self.instance = instance
+        self.attr = attr
+
+        mapper = object_mapper(instance)
+        prop = mapper._props[self.attr.key]
+
+        if prop.secondary is not None:
+            # this is a hack right now.  The Query only knows how to
+            # make subsequent joins() without a given left-hand side
+            # from self._from_obj[0].  We need to ensure prop.secondary
+            # is in the FROM.  So we purposely put the mapper selectable
+            # in _from_obj[0] to ensure a user-defined join() later on
+            # doesn't fail, and secondary is then in _from_obj[1].
+
+            # note also, we are using the official ORM-annotated selectable
+            # from __clause_element__(), see #7868
+            self._from_obj = (prop.mapper.__clause_element__(), prop.secondary)
+        else:
+            self._from_obj = ()
+
+        self._where_criteria = (
+            prop._with_parent(instance, alias_secondary=False),
+        )
+
+        if self.attr.order_by:
+            self._order_by_clauses = self.attr.order_by
+        else:
+            self._order_by_clauses = ()
+
+    def _add_all_impl(self, iterator: Iterable[_T]) -> None:
+        for item in iterator:
+            self.attr.append(
+                attributes.instance_state(self.instance),
+                attributes.instance_dict(self.instance),
+                item,
+                None,
+            )
+
+    def _remove_impl(self, item: _T) -> None:
+        self.attr.remove(
+            attributes.instance_state(self.instance),
+            attributes.instance_dict(self.instance),
+            item,
+            None,
+        )
+
+
+class WriteOnlyCollection(AbstractCollectionWriter[_T]):
+    """Write-only collection which can synchronize changes into the
+    attribute event system.
+
+    The :class:`.WriteOnlyCollection` is used in a mapping by
+    using the ``"write_only"`` lazy loading strategy with
+    :func:`_orm.relationship`.     For background on this configuration,
+    see :ref:`write_only_relationship`.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`write_only_relationship`
+
+    """
+
+    __slots__ = (
+        "instance",
+        "attr",
+        "_where_criteria",
+        "_from_obj",
+        "_order_by_clauses",
+    )
+
+    def __iter__(self) -> NoReturn:
+        raise TypeError(
+            "WriteOnly collections don't support iteration in-place; "
+            "to query for collection items, use the select() method to "
+            "produce a SQL statement and execute it with session.scalars()."
+        )
+
+    def select(self) -> Select[Tuple[_T]]:
+        """Produce a :class:`_sql.Select` construct that represents the
+        rows within this instance-local :class:`_orm.WriteOnlyCollection`.
+
+        """
+        stmt = select(self.attr.target_mapper).where(*self._where_criteria)
+        if self._from_obj:
+            stmt = stmt.select_from(*self._from_obj)
+        if self._order_by_clauses:
+            stmt = stmt.order_by(*self._order_by_clauses)
+        return stmt
+
+    def insert(self) -> Insert:
+        """For one-to-many collections, produce a :class:`_dml.Insert` which
+        will insert new rows in terms of this this instance-local
+        :class:`_orm.WriteOnlyCollection`.
+
+        This construct is only supported for a :class:`_orm.Relationship`
+        that does **not** include the :paramref:`_orm.relationship.secondary`
+        parameter.  For relationships that refer to a many-to-many table,
+        use ordinary bulk insert techniques to produce new objects, then
+        use :meth:`_orm.AbstractCollectionWriter.add_all` to associate them
+        with the collection.
+
+
+        """
+
+        state = inspect(self.instance)
+        mapper = state.mapper
+        prop = mapper._props[self.attr.key]
+
+        if prop.direction is not RelationshipDirection.ONETOMANY:
+            raise exc.InvalidRequestError(
+                "Write only bulk INSERT only supported for one-to-many "
+                "collections; for many-to-many, use a separate bulk "
+                "INSERT along with add_all()."
+            )
+
+        dict_: Dict[str, Any] = {}
+
+        for l, r in prop.synchronize_pairs:
+            fn = prop._get_attr_w_warn_on_none(
+                mapper,
+                state,
+                state.dict,
+                l,
+            )
+
+            dict_[r.key] = bindparam(None, callable_=fn)
+
+        return insert(self.attr.target_mapper).values(**dict_)
+
+    def update(self) -> Update:
+        """Produce a :class:`_dml.Update` which will refer to rows in terms
+        of this instance-local :class:`_orm.WriteOnlyCollection`.
+
+        """
+        return update(self.attr.target_mapper).where(*self._where_criteria)
+
+    def delete(self) -> Delete:
+        """Produce a :class:`_dml.Delete` which will refer to rows in terms
+        of this instance-local :class:`_orm.WriteOnlyCollection`.
+
+        """
+        return delete(self.attr.target_mapper).where(*self._where_criteria)
+
+    def add_all(self, iterator: Iterable[_T]) -> None:
+        """Add an iterable of items to this :class:`_orm.WriteOnlyCollection`.
+
+        The given items will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        """
+        self._add_all_impl(iterator)
+
+    def add(self, item: _T) -> None:
+        """Add an item to this :class:`_orm.WriteOnlyCollection`.
+
+        The given item will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        """
+        self._add_all_impl([item])
+
+    def remove(self, item: _T) -> None:
+        """Remove an item from this :class:`_orm.WriteOnlyCollection`.
+
+        The given item will be removed from the parent instance's collection on
+        the next flush.
+
+        """
+        self._remove_impl(item)