about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/sqlalchemy/sql
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/sqlalchemy/sql')
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py145
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py132
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py1862
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py20
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py75
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py713
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py463
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py585
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py2185
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py1057
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py1403
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py7840
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py1669
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py1438
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py552
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py1837
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py5537
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/events.py458
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/expression.py162
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/functions.py2064
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/lambdas.py1443
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/naming.py212
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/operators.py2623
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/roles.py323
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/schema.py6201
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/selectable.py7183
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/sqltypes.py3844
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/traversals.py1024
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/type_api.py2358
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/util.py1487
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/sql/visitors.py1167
31 files changed, 58062 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py
new file mode 100644
index 00000000..188f709d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/__init__.py
@@ -0,0 +1,145 @@
+# sql/__init__.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from typing import Any
+from typing import TYPE_CHECKING
+
+from ._typing import ColumnExpressionArgument as ColumnExpressionArgument
+from ._typing import NotNullable as NotNullable
+from ._typing import Nullable as Nullable
+from .base import Executable as Executable
+from .compiler import COLLECT_CARTESIAN_PRODUCTS as COLLECT_CARTESIAN_PRODUCTS
+from .compiler import FROM_LINTING as FROM_LINTING
+from .compiler import NO_LINTING as NO_LINTING
+from .compiler import WARN_LINTING as WARN_LINTING
+from .ddl import BaseDDLElement as BaseDDLElement
+from .ddl import DDL as DDL
+from .ddl import DDLElement as DDLElement
+from .ddl import ExecutableDDLElement as ExecutableDDLElement
+from .expression import Alias as Alias
+from .expression import alias as alias
+from .expression import all_ as all_
+from .expression import and_ as and_
+from .expression import any_ as any_
+from .expression import asc as asc
+from .expression import between as between
+from .expression import bindparam as bindparam
+from .expression import case as case
+from .expression import cast as cast
+from .expression import ClauseElement as ClauseElement
+from .expression import collate as collate
+from .expression import column as column
+from .expression import ColumnCollection as ColumnCollection
+from .expression import ColumnElement as ColumnElement
+from .expression import CompoundSelect as CompoundSelect
+from .expression import cte as cte
+from .expression import Delete as Delete
+from .expression import delete as delete
+from .expression import desc as desc
+from .expression import distinct as distinct
+from .expression import except_ as except_
+from .expression import except_all as except_all
+from .expression import exists as exists
+from .expression import extract as extract
+from .expression import false as false
+from .expression import False_ as False_
+from .expression import FromClause as FromClause
+from .expression import func as func
+from .expression import funcfilter as funcfilter
+from .expression import Insert as Insert
+from .expression import insert as insert
+from .expression import intersect as intersect
+from .expression import intersect_all as intersect_all
+from .expression import Join as Join
+from .expression import join as join
+from .expression import label as label
+from .expression import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT
+from .expression import (
+    LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY,
+)
+from .expression import LABEL_STYLE_NONE as LABEL_STYLE_NONE
+from .expression import (
+    LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL,
+)
+from .expression import lambda_stmt as lambda_stmt
+from .expression import LambdaElement as LambdaElement
+from .expression import lateral as lateral
+from .expression import literal as literal
+from .expression import literal_column as literal_column
+from .expression import modifier as modifier
+from .expression import not_ as not_
+from .expression import null as null
+from .expression import nulls_first as nulls_first
+from .expression import nulls_last as nulls_last
+from .expression import nullsfirst as nullsfirst
+from .expression import nullslast as nullslast
+from .expression import or_ as or_
+from .expression import outerjoin as outerjoin
+from .expression import outparam as outparam
+from .expression import over as over
+from .expression import quoted_name as quoted_name
+from .expression import Select as Select
+from .expression import select as select
+from .expression import Selectable as Selectable
+from .expression import SelectLabelStyle as SelectLabelStyle
+from .expression import SQLColumnExpression as SQLColumnExpression
+from .expression import StatementLambdaElement as StatementLambdaElement
+from .expression import Subquery as Subquery
+from .expression import table as table
+from .expression import TableClause as TableClause
+from .expression import TableSample as TableSample
+from .expression import tablesample as tablesample
+from .expression import text as text
+from .expression import true as true
+from .expression import True_ as True_
+from .expression import try_cast as try_cast
+from .expression import tuple_ as tuple_
+from .expression import type_coerce as type_coerce
+from .expression import union as union
+from .expression import union_all as union_all
+from .expression import Update as Update
+from .expression import update as update
+from .expression import Values as Values
+from .expression import values as values
+from .expression import within_group as within_group
+from .visitors import ClauseVisitor as ClauseVisitor
+
+
+def __go(lcls: Any) -> None:
+    from .. import util as _sa_util
+
+    from . import base
+    from . import coercions
+    from . import elements
+    from . import lambdas
+    from . import selectable
+    from . import schema
+    from . import traversals
+    from . import type_api
+
+    if not TYPE_CHECKING:
+        base.coercions = elements.coercions = coercions
+        base.elements = elements
+        base.type_api = type_api
+        coercions.elements = elements
+        coercions.lambdas = lambdas
+        coercions.schema = schema
+        coercions.selectable = selectable
+
+    from .annotation import _prepare_annotations
+    from .annotation import Annotated
+    from .elements import AnnotatedColumnElement
+    from .elements import ClauseList
+    from .selectable import AnnotatedFromClause
+
+    _prepare_annotations(ColumnElement, AnnotatedColumnElement)
+    _prepare_annotations(FromClause, AnnotatedFromClause)
+    _prepare_annotations(ClauseList, Annotated)
+
+    _sa_util.preloaded.import_prefix("sqlalchemy.sql")
+
+
+__go(locals())
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py
new file mode 100644
index 00000000..0a6f6011
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_dml_constructors.py
@@ -0,0 +1,132 @@
+# sql/_dml_constructors.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from .dml import Delete
+from .dml import Insert
+from .dml import Update
+
+if TYPE_CHECKING:
+    from ._typing import _DMLTableArgument
+
+
+def insert(table: _DMLTableArgument) -> Insert:
+    """Construct an :class:`_expression.Insert` object.
+
+    E.g.::
+
+        from sqlalchemy import insert
+
+        stmt = insert(user_table).values(name="username", fullname="Full Username")
+
+    Similar functionality is available via the
+    :meth:`_expression.TableClause.insert` method on
+    :class:`_schema.Table`.
+
+    .. seealso::
+
+        :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial`
+
+
+    :param table: :class:`_expression.TableClause`
+     which is the subject of the
+     insert.
+
+    :param values: collection of values to be inserted; see
+     :meth:`_expression.Insert.values`
+     for a description of allowed formats here.
+     Can be omitted entirely; a :class:`_expression.Insert` construct
+     will also dynamically render the VALUES clause at execution time
+     based on the parameters passed to :meth:`_engine.Connection.execute`.
+
+    :param inline: if True, no attempt will be made to retrieve the
+     SQL-generated default values to be provided within the statement;
+     in particular,
+     this allows SQL expressions to be rendered 'inline' within the
+     statement without the need to pre-execute them beforehand; for
+     backends that support "returning", this turns off the "implicit
+     returning" feature for the statement.
+
+    If both :paramref:`_expression.insert.values` and compile-time bind
+    parameters are present, the compile-time bind parameters override the
+    information specified within :paramref:`_expression.insert.values` on a
+    per-key basis.
+
+    The keys within :paramref:`_expression.Insert.values` can be either
+    :class:`~sqlalchemy.schema.Column` objects or their string
+    identifiers. Each key may reference one of:
+
+    * a literal data value (i.e. string, number, etc.);
+    * a Column object;
+    * a SELECT statement.
+
+    If a ``SELECT`` statement is specified which references this
+    ``INSERT`` statement's table, the statement will be correlated
+    against the ``INSERT`` statement.
+
+    .. seealso::
+
+        :ref:`tutorial_core_insert` - in the :ref:`unified_tutorial`
+
+    """  # noqa: E501
+    return Insert(table)
+
+
+def update(table: _DMLTableArgument) -> Update:
+    r"""Construct an :class:`_expression.Update` object.
+
+    E.g.::
+
+        from sqlalchemy import update
+
+        stmt = (
+            update(user_table).where(user_table.c.id == 5).values(name="user #5")
+        )
+
+    Similar functionality is available via the
+    :meth:`_expression.TableClause.update` method on
+    :class:`_schema.Table`.
+
+    :param table: A :class:`_schema.Table`
+     object representing the database
+     table to be updated.
+
+
+    .. seealso::
+
+        :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial`
+
+
+    """  # noqa: E501
+    return Update(table)
+
+
+def delete(table: _DMLTableArgument) -> Delete:
+    r"""Construct :class:`_expression.Delete` object.
+
+    E.g.::
+
+        from sqlalchemy import delete
+
+        stmt = delete(user_table).where(user_table.c.id == 5)
+
+    Similar functionality is available via the
+    :meth:`_expression.TableClause.delete` method on
+    :class:`_schema.Table`.
+
+    :param table: The table to delete rows from.
+
+    .. seealso::
+
+        :ref:`tutorial_core_update_delete` - in the :ref:`unified_tutorial`
+
+
+    """
+    return Delete(table)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py
new file mode 100644
index 00000000..b628fcc9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_elements_constructors.py
@@ -0,0 +1,1862 @@
+# sql/_elements_constructors.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Callable
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple as typing_Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import coercions
+from . import roles
+from .base import _NoArg
+from .coercions import _document_text_coercion
+from .elements import BindParameter
+from .elements import BooleanClauseList
+from .elements import Case
+from .elements import Cast
+from .elements import CollationClause
+from .elements import CollectionAggregate
+from .elements import ColumnClause
+from .elements import ColumnElement
+from .elements import Extract
+from .elements import False_
+from .elements import FunctionFilter
+from .elements import Label
+from .elements import Null
+from .elements import Over
+from .elements import TextClause
+from .elements import True_
+from .elements import TryCast
+from .elements import Tuple
+from .elements import TypeCoerce
+from .elements import UnaryExpression
+from .elements import WithinGroup
+from .functions import FunctionElement
+from ..util.typing import Literal
+
+if typing.TYPE_CHECKING:
+    from ._typing import _ByArgument
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _ColumnExpressionOrLiteralArgument
+    from ._typing import _ColumnExpressionOrStrLabelArgument
+    from ._typing import _TypeEngineArgument
+    from .elements import BinaryExpression
+    from .selectable import FromClause
+    from .type_api import TypeEngine
+
+_T = TypeVar("_T")
+
+
+def all_(expr: _ColumnExpressionArgument[_T]) -> CollectionAggregate[bool]:
+    """Produce an ALL expression.
+
+    For dialects such as that of PostgreSQL, this operator applies
+    to usage of the :class:`_types.ARRAY` datatype, for that of
+    MySQL, it may apply to a subquery.  e.g.::
+
+        # renders on PostgreSQL:
+        # '5 = ALL (somearray)'
+        expr = 5 == all_(mytable.c.somearray)
+
+        # renders on MySQL:
+        # '5 = ALL (SELECT value FROM table)'
+        expr = 5 == all_(select(table.c.value))
+
+    Comparison to NULL may work using ``None``::
+
+        None == all_(mytable.c.somearray)
+
+    The any_() / all_() operators also feature a special "operand flipping"
+    behavior such that if any_() / all_() are used on the left side of a
+    comparison using a standalone operator such as ``==``, ``!=``, etc.
+    (not including operator methods such as
+    :meth:`_sql.ColumnOperators.is_`) the rendered expression is flipped::
+
+        # would render '5 = ALL (column)`
+        all_(mytable.c.column) == 5
+
+    Or with ``None``, which note will not perform
+    the usual step of rendering "IS" as is normally the case for NULL::
+
+        # would render 'NULL = ALL(somearray)'
+        all_(mytable.c.somearray) == None
+
+    .. versionchanged:: 1.4.26  repaired the use of any_() / all_()
+       comparing to NULL on the right side to be flipped to the left.
+
+    The column-level :meth:`_sql.ColumnElement.all_` method (not to be
+    confused with :class:`_types.ARRAY` level
+    :meth:`_types.ARRAY.Comparator.all`) is shorthand for
+    ``all_(col)``::
+
+        5 == mytable.c.somearray.all_()
+
+    .. seealso::
+
+        :meth:`_sql.ColumnOperators.all_`
+
+        :func:`_expression.any_`
+
+    """
+    return CollectionAggregate._create_all(expr)
+
+
+def and_(  # type: ignore[empty-body]
+    initial_clause: Union[Literal[True], _ColumnExpressionArgument[bool]],
+    *clauses: _ColumnExpressionArgument[bool],
+) -> ColumnElement[bool]:
+    r"""Produce a conjunction of expressions joined by ``AND``.
+
+    E.g.::
+
+        from sqlalchemy import and_
+
+        stmt = select(users_table).where(
+            and_(users_table.c.name == "wendy", users_table.c.enrolled == True)
+        )
+
+    The :func:`.and_` conjunction is also available using the
+    Python ``&`` operator (though note that compound expressions
+    need to be parenthesized in order to function with Python
+    operator precedence behavior)::
+
+        stmt = select(users_table).where(
+            (users_table.c.name == "wendy") & (users_table.c.enrolled == True)
+        )
+
+    The :func:`.and_` operation is also implicit in some cases;
+    the :meth:`_expression.Select.where`
+    method for example can be invoked multiple
+    times against a statement, which will have the effect of each
+    clause being combined using :func:`.and_`::
+
+        stmt = (
+            select(users_table)
+            .where(users_table.c.name == "wendy")
+            .where(users_table.c.enrolled == True)
+        )
+
+    The :func:`.and_` construct must be given at least one positional
+    argument in order to be valid; a :func:`.and_` construct with no
+    arguments is ambiguous.   To produce an "empty" or dynamically
+    generated :func:`.and_`  expression, from a given list of expressions,
+    a "default" element of :func:`_sql.true` (or just ``True``) should be
+    specified::
+
+        from sqlalchemy import true
+
+        criteria = and_(true(), *expressions)
+
+    The above expression will compile to SQL as the expression ``true``
+    or ``1 = 1``, depending on backend, if no other expressions are
+    present.  If expressions are present, then the :func:`_sql.true` value is
+    ignored as it does not affect the outcome of an AND expression that
+    has other elements.
+
+    .. deprecated:: 1.4  The :func:`.and_` element now requires that at
+       least one argument is passed; creating the :func:`.and_` construct
+       with no arguments is deprecated, and will emit a deprecation warning
+       while continuing to produce a blank SQL string.
+
+    .. seealso::
+
+        :func:`.or_`
+
+    """
+    ...
+
+
+if not TYPE_CHECKING:
+    # handle deprecated case which allows zero-arguments
+    def and_(*clauses):  # noqa: F811
+        r"""Produce a conjunction of expressions joined by ``AND``.
+
+        E.g.::
+
+            from sqlalchemy import and_
+
+            stmt = select(users_table).where(
+                and_(users_table.c.name == "wendy", users_table.c.enrolled == True)
+            )
+
+        The :func:`.and_` conjunction is also available using the
+        Python ``&`` operator (though note that compound expressions
+        need to be parenthesized in order to function with Python
+        operator precedence behavior)::
+
+            stmt = select(users_table).where(
+                (users_table.c.name == "wendy") & (users_table.c.enrolled == True)
+            )
+
+        The :func:`.and_` operation is also implicit in some cases;
+        the :meth:`_expression.Select.where`
+        method for example can be invoked multiple
+        times against a statement, which will have the effect of each
+        clause being combined using :func:`.and_`::
+
+            stmt = (
+                select(users_table)
+                .where(users_table.c.name == "wendy")
+                .where(users_table.c.enrolled == True)
+            )
+
+        The :func:`.and_` construct must be given at least one positional
+        argument in order to be valid; a :func:`.and_` construct with no
+        arguments is ambiguous.   To produce an "empty" or dynamically
+        generated :func:`.and_`  expression, from a given list of expressions,
+        a "default" element of :func:`_sql.true` (or just ``True``) should be
+        specified::
+
+            from sqlalchemy import true
+
+            criteria = and_(true(), *expressions)
+
+        The above expression will compile to SQL as the expression ``true``
+        or ``1 = 1``, depending on backend, if no other expressions are
+        present.  If expressions are present, then the :func:`_sql.true` value
+        is ignored as it does not affect the outcome of an AND expression that
+        has other elements.
+
+        .. deprecated:: 1.4  The :func:`.and_` element now requires that at
+          least one argument is passed; creating the :func:`.and_` construct
+          with no arguments is deprecated, and will emit a deprecation warning
+          while continuing to produce a blank SQL string.
+
+        .. seealso::
+
+            :func:`.or_`
+
+        """  # noqa: E501
+        return BooleanClauseList.and_(*clauses)
+
+
+def any_(expr: _ColumnExpressionArgument[_T]) -> CollectionAggregate[bool]:
+    """Produce an ANY expression.
+
+    For dialects such as that of PostgreSQL, this operator applies
+    to usage of the :class:`_types.ARRAY` datatype, for that of
+    MySQL, it may apply to a subquery.  e.g.::
+
+        # renders on PostgreSQL:
+        # '5 = ANY (somearray)'
+        expr = 5 == any_(mytable.c.somearray)
+
+        # renders on MySQL:
+        # '5 = ANY (SELECT value FROM table)'
+        expr = 5 == any_(select(table.c.value))
+
+    Comparison to NULL may work using ``None`` or :func:`_sql.null`::
+
+        None == any_(mytable.c.somearray)
+
+    The any_() / all_() operators also feature a special "operand flipping"
+    behavior such that if any_() / all_() are used on the left side of a
+    comparison using a standalone operator such as ``==``, ``!=``, etc.
+    (not including operator methods such as
+    :meth:`_sql.ColumnOperators.is_`) the rendered expression is flipped::
+
+        # would render '5 = ANY (column)`
+        any_(mytable.c.column) == 5
+
+    Or with ``None``, which note will not perform
+    the usual step of rendering "IS" as is normally the case for NULL::
+
+        # would render 'NULL = ANY(somearray)'
+        any_(mytable.c.somearray) == None
+
+    .. versionchanged:: 1.4.26  repaired the use of any_() / all_()
+       comparing to NULL on the right side to be flipped to the left.
+
+    The column-level :meth:`_sql.ColumnElement.any_` method (not to be
+    confused with :class:`_types.ARRAY` level
+    :meth:`_types.ARRAY.Comparator.any`) is shorthand for
+    ``any_(col)``::
+
+        5 = mytable.c.somearray.any_()
+
+    .. seealso::
+
+        :meth:`_sql.ColumnOperators.any_`
+
+        :func:`_expression.all_`
+
+    """
+    return CollectionAggregate._create_any(expr)
+
+
+def asc(
+    column: _ColumnExpressionOrStrLabelArgument[_T],
+) -> UnaryExpression[_T]:
+    """Produce an ascending ``ORDER BY`` clause element.
+
+    e.g.::
+
+        from sqlalchemy import asc
+
+        stmt = select(users_table).order_by(asc(users_table.c.name))
+
+    will produce SQL as:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user ORDER BY name ASC
+
+    The :func:`.asc` function is a standalone version of the
+    :meth:`_expression.ColumnElement.asc`
+    method available on all SQL expressions,
+    e.g.::
+
+
+        stmt = select(users_table).order_by(users_table.c.name.asc())
+
+    :param column: A :class:`_expression.ColumnElement` (e.g.
+     scalar SQL expression)
+     with which to apply the :func:`.asc` operation.
+
+    .. seealso::
+
+        :func:`.desc`
+
+        :func:`.nulls_first`
+
+        :func:`.nulls_last`
+
+        :meth:`_expression.Select.order_by`
+
+    """
+    return UnaryExpression._create_asc(column)
+
+
+def collate(
+    expression: _ColumnExpressionArgument[str], collation: str
+) -> BinaryExpression[str]:
+    """Return the clause ``expression COLLATE collation``.
+
+    e.g.::
+
+        collate(mycolumn, "utf8_bin")
+
+    produces:
+
+    .. sourcecode:: sql
+
+        mycolumn COLLATE utf8_bin
+
+    The collation expression is also quoted if it is a case sensitive
+    identifier, e.g. contains uppercase characters.
+
+    .. versionchanged:: 1.2 quoting is automatically applied to COLLATE
+       expressions if they are case sensitive.
+
+    """
+    return CollationClause._create_collation_expression(expression, collation)
+
+
+def between(
+    expr: _ColumnExpressionOrLiteralArgument[_T],
+    lower_bound: Any,
+    upper_bound: Any,
+    symmetric: bool = False,
+) -> BinaryExpression[bool]:
+    """Produce a ``BETWEEN`` predicate clause.
+
+    E.g.::
+
+        from sqlalchemy import between
+
+        stmt = select(users_table).where(between(users_table.c.id, 5, 7))
+
+    Would produce SQL resembling:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2
+
+    The :func:`.between` function is a standalone version of the
+    :meth:`_expression.ColumnElement.between` method available on all
+    SQL expressions, as in::
+
+        stmt = select(users_table).where(users_table.c.id.between(5, 7))
+
+    All arguments passed to :func:`.between`, including the left side
+    column expression, are coerced from Python scalar values if a
+    the value is not a :class:`_expression.ColumnElement` subclass.
+    For example,
+    three fixed values can be compared as in::
+
+        print(between(5, 3, 7))
+
+    Which would produce::
+
+        :param_1 BETWEEN :param_2 AND :param_3
+
+    :param expr: a column expression, typically a
+     :class:`_expression.ColumnElement`
+     instance or alternatively a Python scalar expression to be coerced
+     into a column expression, serving as the left side of the ``BETWEEN``
+     expression.
+
+    :param lower_bound: a column or Python scalar expression serving as the
+     lower bound of the right side of the ``BETWEEN`` expression.
+
+    :param upper_bound: a column or Python scalar expression serving as the
+     upper bound of the right side of the ``BETWEEN`` expression.
+
+    :param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note
+     that not all databases support this syntax.
+
+    .. seealso::
+
+        :meth:`_expression.ColumnElement.between`
+
+    """
+    col_expr = coercions.expect(roles.ExpressionElementRole, expr)
+    return col_expr.between(lower_bound, upper_bound, symmetric=symmetric)
+
+
+def outparam(
+    key: str, type_: Optional[TypeEngine[_T]] = None
+) -> BindParameter[_T]:
+    """Create an 'OUT' parameter for usage in functions (stored procedures),
+    for databases which support them.
+
+    The ``outparam`` can be used like a regular function parameter.
+    The "output" value will be available from the
+    :class:`~sqlalchemy.engine.CursorResult` object via its ``out_parameters``
+    attribute, which returns a dictionary containing the values.
+
+    """
+    return BindParameter(key, None, type_=type_, unique=False, isoutparam=True)
+
+
+@overload
+def not_(clause: BinaryExpression[_T]) -> BinaryExpression[_T]: ...
+
+
+@overload
+def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]: ...
+
+
+def not_(clause: _ColumnExpressionArgument[_T]) -> ColumnElement[_T]:
+    """Return a negation of the given clause, i.e. ``NOT(clause)``.
+
+    The ``~`` operator is also overloaded on all
+    :class:`_expression.ColumnElement` subclasses to produce the
+    same result.
+
+    """
+
+    return coercions.expect(roles.ExpressionElementRole, clause).__invert__()
+
+
+def bindparam(
+    key: Optional[str],
+    value: Any = _NoArg.NO_ARG,
+    type_: Optional[_TypeEngineArgument[_T]] = None,
+    unique: bool = False,
+    required: Union[bool, Literal[_NoArg.NO_ARG]] = _NoArg.NO_ARG,
+    quote: Optional[bool] = None,
+    callable_: Optional[Callable[[], Any]] = None,
+    expanding: bool = False,
+    isoutparam: bool = False,
+    literal_execute: bool = False,
+) -> BindParameter[_T]:
+    r"""Produce a "bound expression".
+
+    The return value is an instance of :class:`.BindParameter`; this
+    is a :class:`_expression.ColumnElement`
+    subclass which represents a so-called
+    "placeholder" value in a SQL expression, the value of which is
+    supplied at the point at which the statement in executed against a
+    database connection.
+
+    In SQLAlchemy, the :func:`.bindparam` construct has
+    the ability to carry along the actual value that will be ultimately
+    used at expression time.  In this way, it serves not just as
+    a "placeholder" for eventual population, but also as a means of
+    representing so-called "unsafe" values which should not be rendered
+    directly in a SQL statement, but rather should be passed along
+    to the :term:`DBAPI` as values which need to be correctly escaped
+    and potentially handled for type-safety.
+
+    When using :func:`.bindparam` explicitly, the use case is typically
+    one of traditional deferment of parameters; the :func:`.bindparam`
+    construct accepts a name which can then be referred to at execution
+    time::
+
+        from sqlalchemy import bindparam
+
+        stmt = select(users_table).where(
+            users_table.c.name == bindparam("username")
+        )
+
+    The above statement, when rendered, will produce SQL similar to:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user WHERE name = :username
+
+    In order to populate the value of ``:username`` above, the value
+    would typically be applied at execution time to a method
+    like :meth:`_engine.Connection.execute`::
+
+        result = connection.execute(stmt, {"username": "wendy"})
+
+    Explicit use of :func:`.bindparam` is also common when producing
+    UPDATE or DELETE statements that are to be invoked multiple times,
+    where the WHERE criterion of the statement is to change on each
+    invocation, such as::
+
+        stmt = (
+            users_table.update()
+            .where(user_table.c.name == bindparam("username"))
+            .values(fullname=bindparam("fullname"))
+        )
+
+        connection.execute(
+            stmt,
+            [
+                {"username": "wendy", "fullname": "Wendy Smith"},
+                {"username": "jack", "fullname": "Jack Jones"},
+            ],
+        )
+
+    SQLAlchemy's Core expression system makes wide use of
+    :func:`.bindparam` in an implicit sense.   It is typical that Python
+    literal values passed to virtually all SQL expression functions are
+    coerced into fixed :func:`.bindparam` constructs.  For example, given
+    a comparison operation such as::
+
+        expr = users_table.c.name == "Wendy"
+
+    The above expression will produce a :class:`.BinaryExpression`
+    construct, where the left side is the :class:`_schema.Column` object
+    representing the ``name`` column, and the right side is a
+    :class:`.BindParameter` representing the literal value::
+
+        print(repr(expr.right))
+        BindParameter("%(4327771088 name)s", "Wendy", type_=String())
+
+    The expression above will render SQL such as:
+
+    .. sourcecode:: sql
+
+        user.name = :name_1
+
+    Where the ``:name_1`` parameter name is an anonymous name.  The
+    actual string ``Wendy`` is not in the rendered string, but is carried
+    along where it is later used within statement execution.  If we
+    invoke a statement like the following::
+
+        stmt = select(users_table).where(users_table.c.name == "Wendy")
+        result = connection.execute(stmt)
+
+    We would see SQL logging output as:
+
+    .. sourcecode:: sql
+
+        SELECT "user".id, "user".name
+        FROM "user"
+        WHERE "user".name = %(name_1)s
+        {'name_1': 'Wendy'}
+
+    Above, we see that ``Wendy`` is passed as a parameter to the database,
+    while the placeholder ``:name_1`` is rendered in the appropriate form
+    for the target database, in this case the PostgreSQL database.
+
+    Similarly, :func:`.bindparam` is invoked automatically when working
+    with :term:`CRUD` statements as far as the "VALUES" portion is
+    concerned.   The :func:`_expression.insert` construct produces an
+    ``INSERT`` expression which will, at statement execution time, generate
+    bound placeholders based on the arguments passed, as in::
+
+        stmt = users_table.insert()
+        result = connection.execute(stmt, {"name": "Wendy"})
+
+    The above will produce SQL output as:
+
+    .. sourcecode:: sql
+
+        INSERT INTO "user" (name) VALUES (%(name)s)
+        {'name': 'Wendy'}
+
+    The :class:`_expression.Insert` construct, at
+    compilation/execution time, rendered a single :func:`.bindparam`
+    mirroring the column name ``name`` as a result of the single ``name``
+    parameter we passed to the :meth:`_engine.Connection.execute` method.
+
+    :param key:
+      the key (e.g. the name) for this bind param.
+      Will be used in the generated
+      SQL statement for dialects that use named parameters.  This
+      value may be modified when part of a compilation operation,
+      if other :class:`BindParameter` objects exist with the same
+      key, or if its length is too long and truncation is
+      required.
+
+      If omitted, an "anonymous" name is generated for the bound parameter;
+      when given a value to bind, the end result is equivalent to calling upon
+      the :func:`.literal` function with a value to bind, particularly
+      if the :paramref:`.bindparam.unique` parameter is also provided.
+
+    :param value:
+      Initial value for this bind param.  Will be used at statement
+      execution time as the value for this parameter passed to the
+      DBAPI, if no other value is indicated to the statement execution
+      method for this particular parameter name.  Defaults to ``None``.
+
+    :param callable\_:
+      A callable function that takes the place of "value".  The function
+      will be called at statement execution time to determine the
+      ultimate value.   Used for scenarios where the actual bind
+      value cannot be determined at the point at which the clause
+      construct is created, but embedded bind values are still desirable.
+
+    :param type\_:
+      A :class:`.TypeEngine` class or instance representing an optional
+      datatype for this :func:`.bindparam`.  If not passed, a type
+      may be determined automatically for the bind, based on the given
+      value; for example, trivial Python types such as ``str``,
+      ``int``, ``bool``
+      may result in the :class:`.String`, :class:`.Integer` or
+      :class:`.Boolean` types being automatically selected.
+
+      The type of a :func:`.bindparam` is significant especially in that
+      the type will apply pre-processing to the value before it is
+      passed to the database.  For example, a :func:`.bindparam` which
+      refers to a datetime value, and is specified as holding the
+      :class:`.DateTime` type, may apply conversion needed to the
+      value (such as stringification on SQLite) before passing the value
+      to the database.
+
+    :param unique:
+      if True, the key name of this :class:`.BindParameter` will be
+      modified if another :class:`.BindParameter` of the same name
+      already has been located within the containing
+      expression.  This flag is used generally by the internals
+      when producing so-called "anonymous" bound expressions, it
+      isn't generally applicable to explicitly-named :func:`.bindparam`
+      constructs.
+
+    :param required:
+      If ``True``, a value is required at execution time.  If not passed,
+      it defaults to ``True`` if neither :paramref:`.bindparam.value`
+      or :paramref:`.bindparam.callable` were passed.  If either of these
+      parameters are present, then :paramref:`.bindparam.required`
+      defaults to ``False``.
+
+    :param quote:
+      True if this parameter name requires quoting and is not
+      currently known as a SQLAlchemy reserved word; this currently
+      only applies to the Oracle Database backends, where bound names must
+      sometimes be quoted.
+
+    :param isoutparam:
+      if True, the parameter should be treated like a stored procedure
+      "OUT" parameter.  This applies to backends such as Oracle Database which
+      support OUT parameters.
+
+    :param expanding:
+      if True, this parameter will be treated as an "expanding" parameter
+      at execution time; the parameter value is expected to be a sequence,
+      rather than a scalar value, and the string SQL statement will
+      be transformed on a per-execution basis to accommodate the sequence
+      with a variable number of parameter slots passed to the DBAPI.
+      This is to allow statement caching to be used in conjunction with
+      an IN clause.
+
+      .. seealso::
+
+        :meth:`.ColumnOperators.in_`
+
+        :ref:`baked_in` - with baked queries
+
+      .. note:: The "expanding" feature does not support "executemany"-
+         style parameter sets.
+
+      .. versionadded:: 1.2
+
+      .. versionchanged:: 1.3 the "expanding" bound parameter feature now
+         supports empty lists.
+
+    :param literal_execute:
+      if True, the bound parameter will be rendered in the compile phase
+      with a special "POSTCOMPILE" token, and the SQLAlchemy compiler will
+      render the final value of the parameter into the SQL statement at
+      statement execution time, omitting the value from the parameter
+      dictionary / list passed to DBAPI ``cursor.execute()``.  This
+      produces a similar effect as that of using the ``literal_binds``,
+      compilation flag,  however takes place as the statement is sent to
+      the DBAPI ``cursor.execute()`` method, rather than when the statement
+      is compiled.   The primary use of this
+      capability is for rendering LIMIT / OFFSET clauses for database
+      drivers that can't accommodate for bound parameters in these
+      contexts, while allowing SQL constructs to be cacheable at the
+      compilation level.
+
+      .. versionadded:: 1.4 Added "post compile" bound parameters
+
+        .. seealso::
+
+            :ref:`change_4808`.
+
+    .. seealso::
+
+        :ref:`tutorial_sending_parameters` - in the
+        :ref:`unified_tutorial`
+
+
+    """
+    return BindParameter(
+        key,
+        value,
+        type_,
+        unique,
+        required,
+        quote,
+        callable_,
+        expanding,
+        isoutparam,
+        literal_execute,
+    )
+
+
+def case(
+    *whens: Union[
+        typing_Tuple[_ColumnExpressionArgument[bool], Any], Mapping[Any, Any]
+    ],
+    value: Optional[Any] = None,
+    else_: Optional[Any] = None,
+) -> Case[Any]:
+    r"""Produce a ``CASE`` expression.
+
+    The ``CASE`` construct in SQL is a conditional object that
+    acts somewhat analogously to an "if/then" construct in other
+    languages.  It returns an instance of :class:`.Case`.
+
+    :func:`.case` in its usual form is passed a series of "when"
+    constructs, that is, a list of conditions and results as tuples::
+
+        from sqlalchemy import case
+
+        stmt = select(users_table).where(
+            case(
+                (users_table.c.name == "wendy", "W"),
+                (users_table.c.name == "jack", "J"),
+                else_="E",
+            )
+        )
+
+    The above statement will produce SQL resembling:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user
+        WHERE CASE
+            WHEN (name = :name_1) THEN :param_1
+            WHEN (name = :name_2) THEN :param_2
+            ELSE :param_3
+        END
+
+    When simple equality expressions of several values against a single
+    parent column are needed, :func:`.case` also has a "shorthand" format
+    used via the
+    :paramref:`.case.value` parameter, which is passed a column
+    expression to be compared.  In this form, the :paramref:`.case.whens`
+    parameter is passed as a dictionary containing expressions to be
+    compared against keyed to result expressions.  The statement below is
+    equivalent to the preceding statement::
+
+        stmt = select(users_table).where(
+            case({"wendy": "W", "jack": "J"}, value=users_table.c.name, else_="E")
+        )
+
+    The values which are accepted as result values in
+    :paramref:`.case.whens` as well as with :paramref:`.case.else_` are
+    coerced from Python literals into :func:`.bindparam` constructs.
+    SQL expressions, e.g. :class:`_expression.ColumnElement` constructs,
+    are accepted
+    as well.  To coerce a literal string expression into a constant
+    expression rendered inline, use the :func:`_expression.literal_column`
+    construct,
+    as in::
+
+        from sqlalchemy import case, literal_column
+
+        case(
+            (orderline.c.qty > 100, literal_column("'greaterthan100'")),
+            (orderline.c.qty > 10, literal_column("'greaterthan10'")),
+            else_=literal_column("'lessthan10'"),
+        )
+
+    The above will render the given constants without using bound
+    parameters for the result values (but still for the comparison
+    values), as in:
+
+    .. sourcecode:: sql
+
+        CASE
+            WHEN (orderline.qty > :qty_1) THEN 'greaterthan100'
+            WHEN (orderline.qty > :qty_2) THEN 'greaterthan10'
+            ELSE 'lessthan10'
+        END
+
+    :param \*whens: The criteria to be compared against,
+     :paramref:`.case.whens` accepts two different forms, based on
+     whether or not :paramref:`.case.value` is used.
+
+     .. versionchanged:: 1.4 the :func:`_sql.case`
+        function now accepts the series of WHEN conditions positionally
+
+     In the first form, it accepts multiple 2-tuples passed as positional
+     arguments; each 2-tuple consists of ``(<sql expression>, <value>)``,
+     where the SQL expression is a boolean expression and "value" is a
+     resulting value, e.g.::
+
+        case(
+            (users_table.c.name == "wendy", "W"),
+            (users_table.c.name == "jack", "J"),
+        )
+
+     In the second form, it accepts a Python dictionary of comparison
+     values mapped to a resulting value; this form requires
+     :paramref:`.case.value` to be present, and values will be compared
+     using the ``==`` operator, e.g.::
+
+        case({"wendy": "W", "jack": "J"}, value=users_table.c.name)
+
+    :param value: An optional SQL expression which will be used as a
+      fixed "comparison point" for candidate values within a dictionary
+      passed to :paramref:`.case.whens`.
+
+    :param else\_: An optional SQL expression which will be the evaluated
+      result of the ``CASE`` construct if all expressions within
+      :paramref:`.case.whens` evaluate to false.  When omitted, most
+      databases will produce a result of NULL if none of the "when"
+      expressions evaluate to true.
+
+
+    """  # noqa: E501
+    return Case(*whens, value=value, else_=else_)
+
+
+def cast(
+    expression: _ColumnExpressionOrLiteralArgument[Any],
+    type_: _TypeEngineArgument[_T],
+) -> Cast[_T]:
+    r"""Produce a ``CAST`` expression.
+
+    :func:`.cast` returns an instance of :class:`.Cast`.
+
+    E.g.::
+
+        from sqlalchemy import cast, Numeric
+
+        stmt = select(cast(product_table.c.unit_price, Numeric(10, 4)))
+
+    The above statement will produce SQL resembling:
+
+    .. sourcecode:: sql
+
+        SELECT CAST(unit_price AS NUMERIC(10, 4)) FROM product
+
+    The :func:`.cast` function performs two distinct functions when
+    used.  The first is that it renders the ``CAST`` expression within
+    the resulting SQL string.  The second is that it associates the given
+    type (e.g. :class:`.TypeEngine` class or instance) with the column
+    expression on the Python side, which means the expression will take
+    on the expression operator behavior associated with that type,
+    as well as the bound-value handling and result-row-handling behavior
+    of the type.
+
+    An alternative to :func:`.cast` is the :func:`.type_coerce` function.
+    This function performs the second task of associating an expression
+    with a specific type, but does not render the ``CAST`` expression
+    in SQL.
+
+    :param expression: A SQL expression, such as a
+     :class:`_expression.ColumnElement`
+     expression or a Python string which will be coerced into a bound
+     literal value.
+
+    :param type\_: A :class:`.TypeEngine` class or instance indicating
+     the type to which the ``CAST`` should apply.
+
+    .. seealso::
+
+        :ref:`tutorial_casts`
+
+        :func:`.try_cast` - an alternative to CAST that results in
+        NULLs when the cast fails, instead of raising an error.
+        Only supported by some dialects.
+
+        :func:`.type_coerce` - an alternative to CAST that coerces the type
+        on the Python side only, which is often sufficient to generate the
+        correct SQL and data coercion.
+
+
+    """
+    return Cast(expression, type_)
+
+
+def try_cast(
+    expression: _ColumnExpressionOrLiteralArgument[Any],
+    type_: _TypeEngineArgument[_T],
+) -> TryCast[_T]:
+    """Produce a ``TRY_CAST`` expression for backends which support it;
+    this is a ``CAST`` which returns NULL for un-castable conversions.
+
+    In SQLAlchemy, this construct is supported **only** by the SQL Server
+    dialect, and will raise a :class:`.CompileError` if used on other
+    included backends.  However, third party backends may also support
+    this construct.
+
+    .. tip:: As :func:`_sql.try_cast` originates from the SQL Server dialect,
+       it's importable both from ``sqlalchemy.`` as well as from
+       ``sqlalchemy.dialects.mssql``.
+
+    :func:`_sql.try_cast` returns an instance of :class:`.TryCast` and
+    generally behaves similarly to the :class:`.Cast` construct;
+    at the SQL level, the difference between ``CAST`` and ``TRY_CAST``
+    is that ``TRY_CAST`` returns NULL for an un-castable expression,
+    such as attempting to cast a string ``"hi"`` to an integer value.
+
+    E.g.::
+
+        from sqlalchemy import select, try_cast, Numeric
+
+        stmt = select(try_cast(product_table.c.unit_price, Numeric(10, 4)))
+
+    The above would render on Microsoft SQL Server as:
+
+    .. sourcecode:: sql
+
+        SELECT TRY_CAST (product_table.unit_price AS NUMERIC(10, 4))
+        FROM product_table
+
+    .. versionadded:: 2.0.14  :func:`.try_cast` has been
+       generalized from the SQL Server dialect into a general use
+       construct that may be supported by additional dialects.
+
+    """
+    return TryCast(expression, type_)
+
+
+def column(
+    text: str,
+    type_: Optional[_TypeEngineArgument[_T]] = None,
+    is_literal: bool = False,
+    _selectable: Optional[FromClause] = None,
+) -> ColumnClause[_T]:
+    """Produce a :class:`.ColumnClause` object.
+
+    The :class:`.ColumnClause` is a lightweight analogue to the
+    :class:`_schema.Column` class.  The :func:`_expression.column`
+    function can
+    be invoked with just a name alone, as in::
+
+        from sqlalchemy import column
+
+        id, name = column("id"), column("name")
+        stmt = select(id, name).select_from("user")
+
+    The above statement would produce SQL like:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user
+
+    Once constructed, :func:`_expression.column`
+    may be used like any other SQL
+    expression element such as within :func:`_expression.select`
+    constructs::
+
+        from sqlalchemy.sql import column
+
+        id, name = column("id"), column("name")
+        stmt = select(id, name).select_from("user")
+
+    The text handled by :func:`_expression.column`
+    is assumed to be handled
+    like the name of a database column; if the string contains mixed case,
+    special characters, or matches a known reserved word on the target
+    backend, the column expression will render using the quoting
+    behavior determined by the backend.  To produce a textual SQL
+    expression that is rendered exactly without any quoting,
+    use :func:`_expression.literal_column` instead,
+    or pass ``True`` as the
+    value of :paramref:`_expression.column.is_literal`.   Additionally,
+    full SQL
+    statements are best handled using the :func:`_expression.text`
+    construct.
+
+    :func:`_expression.column` can be used in a table-like
+    fashion by combining it with the :func:`.table` function
+    (which is the lightweight analogue to :class:`_schema.Table`
+    ) to produce
+    a working table construct with minimal boilerplate::
+
+        from sqlalchemy import table, column, select
+
+        user = table(
+            "user",
+            column("id"),
+            column("name"),
+            column("description"),
+        )
+
+        stmt = select(user.c.description).where(user.c.name == "wendy")
+
+    A :func:`_expression.column` / :func:`.table`
+    construct like that illustrated
+    above can be created in an
+    ad-hoc fashion and is not associated with any
+    :class:`_schema.MetaData`, DDL, or events, unlike its
+    :class:`_schema.Table` counterpart.
+
+    :param text: the text of the element.
+
+    :param type: :class:`_types.TypeEngine` object which can associate
+      this :class:`.ColumnClause` with a type.
+
+    :param is_literal: if True, the :class:`.ColumnClause` is assumed to
+      be an exact expression that will be delivered to the output with no
+      quoting rules applied regardless of case sensitive settings. the
+      :func:`_expression.literal_column()` function essentially invokes
+      :func:`_expression.column` while passing ``is_literal=True``.
+
+    .. seealso::
+
+        :class:`_schema.Column`
+
+        :func:`_expression.literal_column`
+
+        :func:`.table`
+
+        :func:`_expression.text`
+
+        :ref:`tutorial_select_arbitrary_text`
+
+    """
+    return ColumnClause(text, type_, is_literal, _selectable)
+
+
+def desc(
+    column: _ColumnExpressionOrStrLabelArgument[_T],
+) -> UnaryExpression[_T]:
+    """Produce a descending ``ORDER BY`` clause element.
+
+    e.g.::
+
+        from sqlalchemy import desc
+
+        stmt = select(users_table).order_by(desc(users_table.c.name))
+
+    will produce SQL as:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user ORDER BY name DESC
+
+    The :func:`.desc` function is a standalone version of the
+    :meth:`_expression.ColumnElement.desc`
+    method available on all SQL expressions,
+    e.g.::
+
+
+        stmt = select(users_table).order_by(users_table.c.name.desc())
+
+    :param column: A :class:`_expression.ColumnElement` (e.g.
+     scalar SQL expression)
+     with which to apply the :func:`.desc` operation.
+
+    .. seealso::
+
+        :func:`.asc`
+
+        :func:`.nulls_first`
+
+        :func:`.nulls_last`
+
+        :meth:`_expression.Select.order_by`
+
+    """
+    return UnaryExpression._create_desc(column)
+
+
+def distinct(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]:
+    """Produce an column-expression-level unary ``DISTINCT`` clause.
+
+    This applies the ``DISTINCT`` keyword to an **individual column
+    expression** (e.g. not the whole statement), and renders **specifically
+    in that column position**; this is used for containment within
+    an aggregate function, as in::
+
+        from sqlalchemy import distinct, func
+
+        stmt = select(users_table.c.id, func.count(distinct(users_table.c.name)))
+
+    The above would produce an statement resembling:
+
+    .. sourcecode:: sql
+
+        SELECT user.id, count(DISTINCT user.name) FROM user
+
+    .. tip:: The :func:`_sql.distinct` function does **not** apply DISTINCT
+       to the full SELECT statement, instead applying a DISTINCT modifier
+       to **individual column expressions**.  For general ``SELECT DISTINCT``
+       support, use the
+       :meth:`_sql.Select.distinct` method on :class:`_sql.Select`.
+
+    The :func:`.distinct` function is also available as a column-level
+    method, e.g. :meth:`_expression.ColumnElement.distinct`, as in::
+
+        stmt = select(func.count(users_table.c.name.distinct()))
+
+    The :func:`.distinct` operator is different from the
+    :meth:`_expression.Select.distinct` method of
+    :class:`_expression.Select`,
+    which produces a ``SELECT`` statement
+    with ``DISTINCT`` applied to the result set as a whole,
+    e.g. a ``SELECT DISTINCT`` expression.  See that method for further
+    information.
+
+    .. seealso::
+
+        :meth:`_expression.ColumnElement.distinct`
+
+        :meth:`_expression.Select.distinct`
+
+        :data:`.func`
+
+    """  # noqa: E501
+    return UnaryExpression._create_distinct(expr)
+
+
+def bitwise_not(expr: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]:
+    """Produce a unary bitwise NOT clause, typically via the ``~`` operator.
+
+    Not to be confused with boolean negation :func:`_sql.not_`.
+
+    .. versionadded:: 2.0.2
+
+    .. seealso::
+
+        :ref:`operators_bitwise`
+
+
+    """
+
+    return UnaryExpression._create_bitwise_not(expr)
+
+
+def extract(field: str, expr: _ColumnExpressionArgument[Any]) -> Extract:
+    """Return a :class:`.Extract` construct.
+
+    This is typically available as :func:`.extract`
+    as well as ``func.extract`` from the
+    :data:`.func` namespace.
+
+    :param field: The field to extract.
+
+     .. warning:: This field is used as a literal SQL string.
+         **DO NOT PASS UNTRUSTED INPUT TO THIS STRING**.
+
+    :param expr: A column or Python scalar expression serving as the
+      right side of the ``EXTRACT`` expression.
+
+    E.g.::
+
+        from sqlalchemy import extract
+        from sqlalchemy import table, column
+
+        logged_table = table(
+            "user",
+            column("id"),
+            column("date_created"),
+        )
+
+        stmt = select(logged_table.c.id).where(
+            extract("YEAR", logged_table.c.date_created) == 2021
+        )
+
+    In the above example, the statement is used to select ids from the
+    database where the ``YEAR`` component matches a specific value.
+
+    Similarly, one can also select an extracted component::
+
+        stmt = select(extract("YEAR", logged_table.c.date_created)).where(
+            logged_table.c.id == 1
+        )
+
+    The implementation of ``EXTRACT`` may vary across database backends.
+    Users are reminded to consult their database documentation.
+    """
+    return Extract(field, expr)
+
+
+def false() -> False_:
+    """Return a :class:`.False_` construct.
+
+    E.g.:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy import false
+        >>> print(select(t.c.x).where(false()))
+        {printsql}SELECT x FROM t WHERE false
+
+    A backend which does not support true/false constants will render as
+    an expression against 1 or 0:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(t.c.x).where(false()))
+        {printsql}SELECT x FROM t WHERE 0 = 1
+
+    The :func:`.true` and :func:`.false` constants also feature
+    "short circuit" operation within an :func:`.and_` or :func:`.or_`
+    conjunction:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(t.c.x).where(or_(t.c.x > 5, true())))
+        {printsql}SELECT x FROM t WHERE true{stop}
+
+        >>> print(select(t.c.x).where(and_(t.c.x > 5, false())))
+        {printsql}SELECT x FROM t WHERE false{stop}
+
+    .. seealso::
+
+        :func:`.true`
+
+    """
+
+    return False_._instance()
+
+
+def funcfilter(
+    func: FunctionElement[_T], *criterion: _ColumnExpressionArgument[bool]
+) -> FunctionFilter[_T]:
+    """Produce a :class:`.FunctionFilter` object against a function.
+
+    Used against aggregate and window functions,
+    for database backends that support the "FILTER" clause.
+
+    E.g.::
+
+        from sqlalchemy import funcfilter
+
+        funcfilter(func.count(1), MyClass.name == "some name")
+
+    Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')".
+
+    This function is also available from the :data:`~.expression.func`
+    construct itself via the :meth:`.FunctionElement.filter` method.
+
+    .. seealso::
+
+        :ref:`tutorial_functions_within_group` - in the
+        :ref:`unified_tutorial`
+
+        :meth:`.FunctionElement.filter`
+
+    """
+    return FunctionFilter(func, *criterion)
+
+
+def label(
+    name: str,
+    element: _ColumnExpressionArgument[_T],
+    type_: Optional[_TypeEngineArgument[_T]] = None,
+) -> Label[_T]:
+    """Return a :class:`Label` object for the
+    given :class:`_expression.ColumnElement`.
+
+    A label changes the name of an element in the columns clause of a
+    ``SELECT`` statement, typically via the ``AS`` SQL keyword.
+
+    This functionality is more conveniently available via the
+    :meth:`_expression.ColumnElement.label` method on
+    :class:`_expression.ColumnElement`.
+
+    :param name: label name
+
+    :param obj: a :class:`_expression.ColumnElement`.
+
+    """
+    return Label(name, element, type_)
+
+
+def null() -> Null:
+    """Return a constant :class:`.Null` construct."""
+
+    return Null._instance()
+
+
+def nulls_first(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]:
+    """Produce the ``NULLS FIRST`` modifier for an ``ORDER BY`` expression.
+
+    :func:`.nulls_first` is intended to modify the expression produced
+    by :func:`.asc` or :func:`.desc`, and indicates how NULL values
+    should be handled when they are encountered during ordering::
+
+
+        from sqlalchemy import desc, nulls_first
+
+        stmt = select(users_table).order_by(nulls_first(desc(users_table.c.name)))
+
+    The SQL expression from the above would resemble:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user ORDER BY name DESC NULLS FIRST
+
+    Like :func:`.asc` and :func:`.desc`, :func:`.nulls_first` is typically
+    invoked from the column expression itself using
+    :meth:`_expression.ColumnElement.nulls_first`,
+    rather than as its standalone
+    function version, as in::
+
+        stmt = select(users_table).order_by(
+            users_table.c.name.desc().nulls_first()
+        )
+
+    .. versionchanged:: 1.4 :func:`.nulls_first` is renamed from
+        :func:`.nullsfirst` in previous releases.
+        The previous name remains available for backwards compatibility.
+
+    .. seealso::
+
+        :func:`.asc`
+
+        :func:`.desc`
+
+        :func:`.nulls_last`
+
+        :meth:`_expression.Select.order_by`
+
+    """  # noqa: E501
+    return UnaryExpression._create_nulls_first(column)
+
+
+def nulls_last(column: _ColumnExpressionArgument[_T]) -> UnaryExpression[_T]:
+    """Produce the ``NULLS LAST`` modifier for an ``ORDER BY`` expression.
+
+    :func:`.nulls_last` is intended to modify the expression produced
+    by :func:`.asc` or :func:`.desc`, and indicates how NULL values
+    should be handled when they are encountered during ordering::
+
+
+        from sqlalchemy import desc, nulls_last
+
+        stmt = select(users_table).order_by(nulls_last(desc(users_table.c.name)))
+
+    The SQL expression from the above would resemble:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user ORDER BY name DESC NULLS LAST
+
+    Like :func:`.asc` and :func:`.desc`, :func:`.nulls_last` is typically
+    invoked from the column expression itself using
+    :meth:`_expression.ColumnElement.nulls_last`,
+    rather than as its standalone
+    function version, as in::
+
+        stmt = select(users_table).order_by(users_table.c.name.desc().nulls_last())
+
+    .. versionchanged:: 1.4 :func:`.nulls_last` is renamed from
+        :func:`.nullslast` in previous releases.
+        The previous name remains available for backwards compatibility.
+
+    .. seealso::
+
+        :func:`.asc`
+
+        :func:`.desc`
+
+        :func:`.nulls_first`
+
+        :meth:`_expression.Select.order_by`
+
+    """  # noqa: E501
+    return UnaryExpression._create_nulls_last(column)
+
+
+def or_(  # type: ignore[empty-body]
+    initial_clause: Union[Literal[False], _ColumnExpressionArgument[bool]],
+    *clauses: _ColumnExpressionArgument[bool],
+) -> ColumnElement[bool]:
+    """Produce a conjunction of expressions joined by ``OR``.
+
+    E.g.::
+
+        from sqlalchemy import or_
+
+        stmt = select(users_table).where(
+            or_(users_table.c.name == "wendy", users_table.c.name == "jack")
+        )
+
+    The :func:`.or_` conjunction is also available using the
+    Python ``|`` operator (though note that compound expressions
+    need to be parenthesized in order to function with Python
+    operator precedence behavior)::
+
+        stmt = select(users_table).where(
+            (users_table.c.name == "wendy") | (users_table.c.name == "jack")
+        )
+
+    The :func:`.or_` construct must be given at least one positional
+    argument in order to be valid; a :func:`.or_` construct with no
+    arguments is ambiguous.   To produce an "empty" or dynamically
+    generated :func:`.or_`  expression, from a given list of expressions,
+    a "default" element of :func:`_sql.false` (or just ``False``) should be
+    specified::
+
+        from sqlalchemy import false
+
+        or_criteria = or_(false(), *expressions)
+
+    The above expression will compile to SQL as the expression ``false``
+    or ``0 = 1``, depending on backend, if no other expressions are
+    present.  If expressions are present, then the :func:`_sql.false` value is
+    ignored as it does not affect the outcome of an OR expression which
+    has other elements.
+
+    .. deprecated:: 1.4  The :func:`.or_` element now requires that at
+       least one argument is passed; creating the :func:`.or_` construct
+       with no arguments is deprecated, and will emit a deprecation warning
+       while continuing to produce a blank SQL string.
+
+    .. seealso::
+
+        :func:`.and_`
+
+    """
+    ...
+
+
+if not TYPE_CHECKING:
+    # handle deprecated case which allows zero-arguments
+    def or_(*clauses):  # noqa: F811
+        """Produce a conjunction of expressions joined by ``OR``.
+
+        E.g.::
+
+            from sqlalchemy import or_
+
+            stmt = select(users_table).where(
+                or_(users_table.c.name == "wendy", users_table.c.name == "jack")
+            )
+
+        The :func:`.or_` conjunction is also available using the
+        Python ``|`` operator (though note that compound expressions
+        need to be parenthesized in order to function with Python
+        operator precedence behavior)::
+
+            stmt = select(users_table).where(
+                (users_table.c.name == "wendy") | (users_table.c.name == "jack")
+            )
+
+        The :func:`.or_` construct must be given at least one positional
+        argument in order to be valid; a :func:`.or_` construct with no
+        arguments is ambiguous.   To produce an "empty" or dynamically
+        generated :func:`.or_`  expression, from a given list of expressions,
+        a "default" element of :func:`_sql.false` (or just ``False``) should be
+        specified::
+
+            from sqlalchemy import false
+
+            or_criteria = or_(false(), *expressions)
+
+        The above expression will compile to SQL as the expression ``false``
+        or ``0 = 1``, depending on backend, if no other expressions are
+        present.  If expressions are present, then the :func:`_sql.false` value
+        is ignored as it does not affect the outcome of an OR expression which
+        has other elements.
+
+        .. deprecated:: 1.4  The :func:`.or_` element now requires that at
+           least one argument is passed; creating the :func:`.or_` construct
+           with no arguments is deprecated, and will emit a deprecation warning
+           while continuing to produce a blank SQL string.
+
+        .. seealso::
+
+            :func:`.and_`
+
+        """  # noqa: E501
+        return BooleanClauseList.or_(*clauses)
+
+
+def over(
+    element: FunctionElement[_T],
+    partition_by: Optional[_ByArgument] = None,
+    order_by: Optional[_ByArgument] = None,
+    range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+    rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+) -> Over[_T]:
+    r"""Produce an :class:`.Over` object against a function.
+
+    Used against aggregate or so-called "window" functions,
+    for database backends that support window functions.
+
+    :func:`_expression.over` is usually called using
+    the :meth:`.FunctionElement.over` method, e.g.::
+
+        func.row_number().over(order_by=mytable.c.some_column)
+
+    Would produce:
+
+    .. sourcecode:: sql
+
+        ROW_NUMBER() OVER(ORDER BY some_column)
+
+    Ranges are also possible using the :paramref:`.expression.over.range_`
+    and :paramref:`.expression.over.rows` parameters.  These
+    mutually-exclusive parameters each accept a 2-tuple, which contains
+    a combination of integers and None::
+
+        func.row_number().over(order_by=my_table.c.some_column, range_=(None, 0))
+
+    The above would produce:
+
+    .. sourcecode:: sql
+
+        ROW_NUMBER() OVER(ORDER BY some_column
+        RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
+
+    A value of ``None`` indicates "unbounded", a
+    value of zero indicates "current row", and negative / positive
+    integers indicate "preceding" and "following":
+
+    * RANGE BETWEEN 5 PRECEDING AND 10 FOLLOWING::
+
+        func.row_number().over(order_by="x", range_=(-5, 10))
+
+    * ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW::
+
+        func.row_number().over(order_by="x", rows=(None, 0))
+
+    * RANGE BETWEEN 2 PRECEDING AND UNBOUNDED FOLLOWING::
+
+        func.row_number().over(order_by="x", range_=(-2, None))
+
+    * RANGE BETWEEN 1 FOLLOWING AND 3 FOLLOWING::
+
+        func.row_number().over(order_by="x", range_=(1, 3))
+
+    :param element: a :class:`.FunctionElement`, :class:`.WithinGroup`,
+     or other compatible construct.
+    :param partition_by: a column element or string, or a list
+     of such, that will be used as the PARTITION BY clause
+     of the OVER construct.
+    :param order_by: a column element or string, or a list
+     of such, that will be used as the ORDER BY clause
+     of the OVER construct.
+    :param range\_: optional range clause for the window.  This is a
+     tuple value which can contain integer values or ``None``,
+     and will render a RANGE BETWEEN PRECEDING / FOLLOWING clause.
+
+    :param rows: optional rows clause for the window.  This is a tuple
+     value which can contain integer values or None, and will render
+     a ROWS BETWEEN PRECEDING / FOLLOWING clause.
+
+    This function is also available from the :data:`~.expression.func`
+    construct itself via the :meth:`.FunctionElement.over` method.
+
+    .. seealso::
+
+        :ref:`tutorial_window_functions` - in the :ref:`unified_tutorial`
+
+        :data:`.expression.func`
+
+        :func:`_expression.within_group`
+
+    """  # noqa: E501
+    return Over(element, partition_by, order_by, range_, rows)
+
+
+@_document_text_coercion("text", ":func:`.text`", ":paramref:`.text.text`")
+def text(text: str) -> TextClause:
+    r"""Construct a new :class:`_expression.TextClause` clause,
+    representing
+    a textual SQL string directly.
+
+    E.g.::
+
+        from sqlalchemy import text
+
+        t = text("SELECT * FROM users")
+        result = connection.execute(t)
+
+    The advantages :func:`_expression.text`
+    provides over a plain string are
+    backend-neutral support for bind parameters, per-statement
+    execution options, as well as
+    bind parameter and result-column typing behavior, allowing
+    SQLAlchemy type constructs to play a role when executing
+    a statement that is specified literally.  The construct can also
+    be provided with a ``.c`` collection of column elements, allowing
+    it to be embedded in other SQL expression constructs as a subquery.
+
+    Bind parameters are specified by name, using the format ``:name``.
+    E.g.::
+
+        t = text("SELECT * FROM users WHERE id=:user_id")
+        result = connection.execute(t, {"user_id": 12})
+
+    For SQL statements where a colon is required verbatim, as within
+    an inline string, use a backslash to escape::
+
+        t = text(r"SELECT * FROM users WHERE name='\:username'")
+
+    The :class:`_expression.TextClause`
+    construct includes methods which can
+    provide information about the bound parameters as well as the column
+    values which would be returned from the textual statement, assuming
+    it's an executable SELECT type of statement.  The
+    :meth:`_expression.TextClause.bindparams`
+    method is used to provide bound
+    parameter detail, and :meth:`_expression.TextClause.columns`
+    method allows
+    specification of return columns including names and types::
+
+        t = (
+            text("SELECT * FROM users WHERE id=:user_id")
+            .bindparams(user_id=7)
+            .columns(id=Integer, name=String)
+        )
+
+        for id, name in connection.execute(t):
+            print(id, name)
+
+    The :func:`_expression.text` construct is used in cases when
+    a literal string SQL fragment is specified as part of a larger query,
+    such as for the WHERE clause of a SELECT statement::
+
+        s = select(users.c.id, users.c.name).where(text("id=:user_id"))
+        result = connection.execute(s, {"user_id": 12})
+
+    :func:`_expression.text` is also used for the construction
+    of a full, standalone statement using plain text.
+    As such, SQLAlchemy refers
+    to it as an :class:`.Executable` object and may be used
+    like any other statement passed to an ``.execute()`` method.
+
+    :param text:
+      the text of the SQL statement to be created.  Use ``:<param>``
+      to specify bind parameters; they will be compiled to their
+      engine-specific format.
+
+    .. seealso::
+
+        :ref:`tutorial_select_arbitrary_text`
+
+    """
+    return TextClause(text)
+
+
+def true() -> True_:
+    """Return a constant :class:`.True_` construct.
+
+    E.g.:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy import true
+        >>> print(select(t.c.x).where(true()))
+        {printsql}SELECT x FROM t WHERE true
+
+    A backend which does not support true/false constants will render as
+    an expression against 1 or 0:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(t.c.x).where(true()))
+        {printsql}SELECT x FROM t WHERE 1 = 1
+
+    The :func:`.true` and :func:`.false` constants also feature
+    "short circuit" operation within an :func:`.and_` or :func:`.or_`
+    conjunction:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(t.c.x).where(or_(t.c.x > 5, true())))
+        {printsql}SELECT x FROM t WHERE true{stop}
+
+        >>> print(select(t.c.x).where(and_(t.c.x > 5, false())))
+        {printsql}SELECT x FROM t WHERE false{stop}
+
+    .. seealso::
+
+        :func:`.false`
+
+    """
+
+    return True_._instance()
+
+
+def tuple_(
+    *clauses: _ColumnExpressionArgument[Any],
+    types: Optional[Sequence[_TypeEngineArgument[Any]]] = None,
+) -> Tuple:
+    """Return a :class:`.Tuple`.
+
+    Main usage is to produce a composite IN construct using
+    :meth:`.ColumnOperators.in_` ::
+
+        from sqlalchemy import tuple_
+
+        tuple_(table.c.col1, table.c.col2).in_([(1, 2), (5, 12), (10, 19)])
+
+    .. versionchanged:: 1.3.6 Added support for SQLite IN tuples.
+
+    .. warning::
+
+        The composite IN construct is not supported by all backends, and is
+        currently known to work on PostgreSQL, MySQL, and SQLite.
+        Unsupported backends will raise a subclass of
+        :class:`~sqlalchemy.exc.DBAPIError` when such an expression is
+        invoked.
+
+    """
+    return Tuple(*clauses, types=types)
+
+
+def type_coerce(
+    expression: _ColumnExpressionOrLiteralArgument[Any],
+    type_: _TypeEngineArgument[_T],
+) -> TypeCoerce[_T]:
+    r"""Associate a SQL expression with a particular type, without rendering
+    ``CAST``.
+
+    E.g.::
+
+        from sqlalchemy import type_coerce
+
+        stmt = select(type_coerce(log_table.date_string, StringDateTime()))
+
+    The above construct will produce a :class:`.TypeCoerce` object, which
+    does not modify the rendering in any way on the SQL side, with the
+    possible exception of a generated label if used in a columns clause
+    context:
+
+    .. sourcecode:: sql
+
+        SELECT date_string AS date_string FROM log
+
+    When result rows are fetched, the ``StringDateTime`` type processor
+    will be applied to result rows on behalf of the ``date_string`` column.
+
+    .. note:: the :func:`.type_coerce` construct does not render any
+       SQL syntax of its own, including that it does not imply
+       parenthesization.   Please use :meth:`.TypeCoerce.self_group`
+       if explicit parenthesization is required.
+
+    In order to provide a named label for the expression, use
+    :meth:`_expression.ColumnElement.label`::
+
+        stmt = select(
+            type_coerce(log_table.date_string, StringDateTime()).label("date")
+        )
+
+    A type that features bound-value handling will also have that behavior
+    take effect when literal values or :func:`.bindparam` constructs are
+    passed to :func:`.type_coerce` as targets.
+    For example, if a type implements the
+    :meth:`.TypeEngine.bind_expression`
+    method or :meth:`.TypeEngine.bind_processor` method or equivalent,
+    these functions will take effect at statement compilation/execution
+    time when a literal value is passed, as in::
+
+        # bound-value handling of MyStringType will be applied to the
+        # literal value "some string"
+        stmt = select(type_coerce("some string", MyStringType))
+
+    When using :func:`.type_coerce` with composed expressions, note that
+    **parenthesis are not applied**.   If :func:`.type_coerce` is being
+    used in an operator context where the parenthesis normally present from
+    CAST are necessary, use the :meth:`.TypeCoerce.self_group` method:
+
+    .. sourcecode:: pycon+sql
+
+        >>> some_integer = column("someint", Integer)
+        >>> some_string = column("somestr", String)
+        >>> expr = type_coerce(some_integer + 5, String) + some_string
+        >>> print(expr)
+        {printsql}someint + :someint_1 || somestr{stop}
+        >>> expr = type_coerce(some_integer + 5, String).self_group() + some_string
+        >>> print(expr)
+        {printsql}(someint + :someint_1) || somestr{stop}
+
+    :param expression: A SQL expression, such as a
+     :class:`_expression.ColumnElement`
+     expression or a Python string which will be coerced into a bound
+     literal value.
+
+    :param type\_: A :class:`.TypeEngine` class or instance indicating
+     the type to which the expression is coerced.
+
+    .. seealso::
+
+        :ref:`tutorial_casts`
+
+        :func:`.cast`
+
+    """  # noqa
+    return TypeCoerce(expression, type_)
+
+
+def within_group(
+    element: FunctionElement[_T], *order_by: _ColumnExpressionArgument[Any]
+) -> WithinGroup[_T]:
+    r"""Produce a :class:`.WithinGroup` object against a function.
+
+    Used against so-called "ordered set aggregate" and "hypothetical
+    set aggregate" functions, including :class:`.percentile_cont`,
+    :class:`.rank`, :class:`.dense_rank`, etc.
+
+    :func:`_expression.within_group` is usually called using
+    the :meth:`.FunctionElement.within_group` method, e.g.::
+
+        from sqlalchemy import within_group
+
+        stmt = select(
+            department.c.id,
+            func.percentile_cont(0.5).within_group(department.c.salary.desc()),
+        )
+
+    The above statement would produce SQL similar to
+    ``SELECT department.id, percentile_cont(0.5)
+    WITHIN GROUP (ORDER BY department.salary DESC)``.
+
+    :param element: a :class:`.FunctionElement` construct, typically
+     generated by :data:`~.expression.func`.
+    :param \*order_by: one or more column elements that will be used
+     as the ORDER BY clause of the WITHIN GROUP construct.
+
+    .. seealso::
+
+        :ref:`tutorial_functions_within_group` - in the
+        :ref:`unified_tutorial`
+
+        :data:`.expression.func`
+
+        :func:`_expression.over`
+
+    """
+    return WithinGroup(element, *order_by)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py
new file mode 100644
index 00000000..c37d805e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_orm_types.py
@@ -0,0 +1,20 @@
+# sql/_orm_types.py
+# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""ORM types that need to present specifically for **documentation only** of
+the Executable.execution_options() method, which includes options that
+are meaningful to the ORM.
+
+"""
+
+
+from __future__ import annotations
+
+from ..util.typing import Literal
+
+SynchronizeSessionArgument = Literal[False, "auto", "evaluate", "fetch"]
+DMLStrategyArgument = Literal["bulk", "raw", "orm", "auto"]
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py
new file mode 100644
index 00000000..9e1a084a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_py_util.py
@@ -0,0 +1,75 @@
+# sql/_py_util.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Dict
+from typing import Tuple
+from typing import Union
+
+from ..util.typing import Literal
+
+if typing.TYPE_CHECKING:
+    from .cache_key import CacheConst
+
+
+class prefix_anon_map(Dict[str, str]):
+    """A map that creates new keys for missing key access.
+
+    Considers keys of the form "<ident> <name>" to produce
+    new symbols "<name>_<index>", where "index" is an incrementing integer
+    corresponding to <name>.
+
+    Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which
+    is otherwise usually used for this type of operation.
+
+    """
+
+    def __missing__(self, key: str) -> str:
+        (ident, derived) = key.split(" ", 1)
+        anonymous_counter = self.get(derived, 1)
+        self[derived] = anonymous_counter + 1  # type: ignore
+        value = f"{derived}_{anonymous_counter}"
+        self[key] = value
+        return value
+
+
+class cache_anon_map(
+    Dict[Union[int, "Literal[CacheConst.NO_CACHE]"], Union[Literal[True], str]]
+):
+    """A map that creates new keys for missing key access.
+
+    Produces an incrementing sequence given a series of unique keys.
+
+    This is similar to the compiler prefix_anon_map class although simpler.
+
+    Inlines the approach taken by :class:`sqlalchemy.util.PopulateDict` which
+    is otherwise usually used for this type of operation.
+
+    """
+
+    _index = 0
+
+    def get_anon(self, object_: Any) -> Tuple[str, bool]:
+        idself = id(object_)
+        if idself in self:
+            s_val = self[idself]
+            assert s_val is not True
+            return s_val, True
+        else:
+            # inline of __missing__
+            self[idself] = id_ = str(self._index)
+            self._index += 1
+
+            return id_, False
+
+    def __missing__(self, key: int) -> str:
+        self[key] = val = str(self._index)
+        self._index += 1
+        return val
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py
new file mode 100644
index 00000000..69427334
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_selectable_constructors.py
@@ -0,0 +1,713 @@
+# sql/_selectable_constructors.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import coercions
+from . import roles
+from ._typing import _ColumnsClauseArgument
+from ._typing import _no_kw
+from .elements import ColumnClause
+from .selectable import Alias
+from .selectable import CompoundSelect
+from .selectable import Exists
+from .selectable import FromClause
+from .selectable import Join
+from .selectable import Lateral
+from .selectable import LateralFromClause
+from .selectable import NamedFromClause
+from .selectable import Select
+from .selectable import TableClause
+from .selectable import TableSample
+from .selectable import Values
+
+if TYPE_CHECKING:
+    from ._typing import _FromClauseArgument
+    from ._typing import _OnClauseArgument
+    from ._typing import _SelectStatementForCompoundArgument
+    from ._typing import _T0
+    from ._typing import _T1
+    from ._typing import _T2
+    from ._typing import _T3
+    from ._typing import _T4
+    from ._typing import _T5
+    from ._typing import _T6
+    from ._typing import _T7
+    from ._typing import _T8
+    from ._typing import _T9
+    from ._typing import _TP
+    from ._typing import _TypedColumnClauseArgument as _TCCA
+    from .functions import Function
+    from .selectable import CTE
+    from .selectable import HasCTE
+    from .selectable import ScalarSelect
+    from .selectable import SelectBase
+
+
+def alias(
+    selectable: FromClause, name: Optional[str] = None, flat: bool = False
+) -> NamedFromClause:
+    """Return a named alias of the given :class:`.FromClause`.
+
+    For :class:`.Table` and :class:`.Join` objects, the return type is the
+    :class:`_expression.Alias` object. Other kinds of :class:`.NamedFromClause`
+    objects may be returned for other kinds of :class:`.FromClause` objects.
+
+    The named alias represents any :class:`_expression.FromClause` with an
+    alternate name assigned within SQL, typically using the ``AS`` clause when
+    generated, e.g. ``SELECT * FROM table AS aliasname``.
+
+    Equivalent functionality is available via the
+    :meth:`_expression.FromClause.alias`
+    method available on all :class:`_expression.FromClause` objects.
+
+    :param selectable: any :class:`_expression.FromClause` subclass,
+        such as a table, select statement, etc.
+
+    :param name: string name to be assigned as the alias.
+        If ``None``, a name will be deterministically generated at compile
+        time. Deterministic means the name is guaranteed to be unique against
+        other constructs used in the same statement, and will also be the same
+        name for each successive compilation of the same statement object.
+
+    :param flat: Will be passed through to if the given selectable
+     is an instance of :class:`_expression.Join` - see
+     :meth:`_expression.Join.alias` for details.
+
+    """
+    return Alias._factory(selectable, name=name, flat=flat)
+
+
+def cte(
+    selectable: HasCTE, name: Optional[str] = None, recursive: bool = False
+) -> CTE:
+    r"""Return a new :class:`_expression.CTE`,
+    or Common Table Expression instance.
+
+    Please see :meth:`_expression.HasCTE.cte` for detail on CTE usage.
+
+    """
+    return coercions.expect(roles.HasCTERole, selectable).cte(
+        name=name, recursive=recursive
+    )
+
+
+# TODO: mypy requires the _TypedSelectable overloads in all compound select
+# constructors since _SelectStatementForCompoundArgument includes
+# untyped args that make it return CompoundSelect[Unpack[tuple[Never, ...]]]
+# pyright does not have this issue
+_TypedSelectable = Union["Select[_TP]", "CompoundSelect[_TP]"]
+
+
+@overload
+def except_(
+    *selects: _TypedSelectable[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+@overload
+def except_(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+def except_(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]:
+    r"""Return an ``EXCEPT`` of multiple selectables.
+
+    The returned object is an instance of
+    :class:`_expression.CompoundSelect`.
+
+    :param \*selects:
+      a list of :class:`_expression.Select` instances.
+
+    """
+    return CompoundSelect._create_except(*selects)
+
+
+@overload
+def except_all(
+    *selects: _TypedSelectable[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+@overload
+def except_all(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+def except_all(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]:
+    r"""Return an ``EXCEPT ALL`` of multiple selectables.
+
+    The returned object is an instance of
+    :class:`_expression.CompoundSelect`.
+
+    :param \*selects:
+      a list of :class:`_expression.Select` instances.
+
+    """
+    return CompoundSelect._create_except_all(*selects)
+
+
+def exists(
+    __argument: Optional[
+        Union[_ColumnsClauseArgument[Any], SelectBase, ScalarSelect[Any]]
+    ] = None,
+) -> Exists:
+    """Construct a new :class:`_expression.Exists` construct.
+
+    The :func:`_sql.exists` can be invoked by itself to produce an
+    :class:`_sql.Exists` construct, which will accept simple WHERE
+    criteria::
+
+        exists_criteria = exists().where(table1.c.col1 == table2.c.col2)
+
+    However, for greater flexibility in constructing the SELECT, an
+    existing :class:`_sql.Select` construct may be converted to an
+    :class:`_sql.Exists`, most conveniently by making use of the
+    :meth:`_sql.SelectBase.exists` method::
+
+        exists_criteria = (
+            select(table2.c.col2).where(table1.c.col1 == table2.c.col2).exists()
+        )
+
+    The EXISTS criteria is then used inside of an enclosing SELECT::
+
+        stmt = select(table1.c.col1).where(exists_criteria)
+
+    The above statement will then be of the form:
+
+    .. sourcecode:: sql
+
+        SELECT col1 FROM table1 WHERE EXISTS
+        (SELECT table2.col2 FROM table2 WHERE table2.col2 = table1.col1)
+
+    .. seealso::
+
+        :ref:`tutorial_exists` - in the :term:`2.0 style` tutorial.
+
+        :meth:`_sql.SelectBase.exists` - method to transform a ``SELECT`` to an
+        ``EXISTS`` clause.
+
+    """  # noqa: E501
+
+    return Exists(__argument)
+
+
+@overload
+def intersect(
+    *selects: _TypedSelectable[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+@overload
+def intersect(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+def intersect(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]:
+    r"""Return an ``INTERSECT`` of multiple selectables.
+
+    The returned object is an instance of
+    :class:`_expression.CompoundSelect`.
+
+    :param \*selects:
+      a list of :class:`_expression.Select` instances.
+
+    """
+    return CompoundSelect._create_intersect(*selects)
+
+
+@overload
+def intersect_all(
+    *selects: _TypedSelectable[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+@overload
+def intersect_all(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+def intersect_all(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]:
+    r"""Return an ``INTERSECT ALL`` of multiple selectables.
+
+    The returned object is an instance of
+    :class:`_expression.CompoundSelect`.
+
+    :param \*selects:
+      a list of :class:`_expression.Select` instances.
+
+
+    """
+    return CompoundSelect._create_intersect_all(*selects)
+
+
+def join(
+    left: _FromClauseArgument,
+    right: _FromClauseArgument,
+    onclause: Optional[_OnClauseArgument] = None,
+    isouter: bool = False,
+    full: bool = False,
+) -> Join:
+    """Produce a :class:`_expression.Join` object, given two
+    :class:`_expression.FromClause`
+    expressions.
+
+    E.g.::
+
+        j = join(
+            user_table, address_table, user_table.c.id == address_table.c.user_id
+        )
+        stmt = select(user_table).select_from(j)
+
+    would emit SQL along the lines of:
+
+    .. sourcecode:: sql
+
+        SELECT user.id, user.name FROM user
+        JOIN address ON user.id = address.user_id
+
+    Similar functionality is available given any
+    :class:`_expression.FromClause` object (e.g. such as a
+    :class:`_schema.Table`) using
+    the :meth:`_expression.FromClause.join` method.
+
+    :param left: The left side of the join.
+
+    :param right: the right side of the join; this is any
+     :class:`_expression.FromClause` object such as a
+     :class:`_schema.Table` object, and
+     may also be a selectable-compatible object such as an ORM-mapped
+     class.
+
+    :param onclause: a SQL expression representing the ON clause of the
+     join.  If left at ``None``, :meth:`_expression.FromClause.join`
+     will attempt to
+     join the two tables based on a foreign key relationship.
+
+    :param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
+
+    :param full: if True, render a FULL OUTER JOIN, instead of JOIN.
+
+    .. seealso::
+
+        :meth:`_expression.FromClause.join` - method form,
+        based on a given left side.
+
+        :class:`_expression.Join` - the type of object produced.
+
+    """  # noqa: E501
+
+    return Join(left, right, onclause, isouter, full)
+
+
+def lateral(
+    selectable: Union[SelectBase, _FromClauseArgument],
+    name: Optional[str] = None,
+) -> LateralFromClause:
+    """Return a :class:`_expression.Lateral` object.
+
+    :class:`_expression.Lateral` is an :class:`_expression.Alias`
+    subclass that represents
+    a subquery with the LATERAL keyword applied to it.
+
+    The special behavior of a LATERAL subquery is that it appears in the
+    FROM clause of an enclosing SELECT, but may correlate to other
+    FROM clauses of that SELECT.   It is a special case of subquery
+    only supported by a small number of backends, currently more recent
+    PostgreSQL versions.
+
+    .. seealso::
+
+        :ref:`tutorial_lateral_correlation` -  overview of usage.
+
+    """
+    return Lateral._factory(selectable, name=name)
+
+
+def outerjoin(
+    left: _FromClauseArgument,
+    right: _FromClauseArgument,
+    onclause: Optional[_OnClauseArgument] = None,
+    full: bool = False,
+) -> Join:
+    """Return an ``OUTER JOIN`` clause element.
+
+    The returned object is an instance of :class:`_expression.Join`.
+
+    Similar functionality is also available via the
+    :meth:`_expression.FromClause.outerjoin` method on any
+    :class:`_expression.FromClause`.
+
+    :param left: The left side of the join.
+
+    :param right: The right side of the join.
+
+    :param onclause:  Optional criterion for the ``ON`` clause, is
+      derived from foreign key relationships established between
+      left and right otherwise.
+
+    To chain joins together, use the :meth:`_expression.FromClause.join`
+    or
+    :meth:`_expression.FromClause.outerjoin` methods on the resulting
+    :class:`_expression.Join` object.
+
+    """
+    return Join(left, right, onclause, isouter=True, full=full)
+
+
+# START OVERLOADED FUNCTIONS select Select 1-10
+
+# code within this block is **programmatically,
+# statically generated** by tools/generate_tuple_map_overloads.py
+
+
+@overload
+def select(__ent0: _TCCA[_T0]) -> Select[Tuple[_T0]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
+) -> Select[Tuple[_T0, _T1]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
+) -> Select[Tuple[_T0, _T1, _T2]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+    __ent4: _TCCA[_T4],
+) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+    __ent4: _TCCA[_T4],
+    __ent5: _TCCA[_T5],
+) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+    __ent4: _TCCA[_T4],
+    __ent5: _TCCA[_T5],
+    __ent6: _TCCA[_T6],
+) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+    __ent4: _TCCA[_T4],
+    __ent5: _TCCA[_T5],
+    __ent6: _TCCA[_T6],
+    __ent7: _TCCA[_T7],
+) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+    __ent4: _TCCA[_T4],
+    __ent5: _TCCA[_T5],
+    __ent6: _TCCA[_T6],
+    __ent7: _TCCA[_T7],
+    __ent8: _TCCA[_T8],
+) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: ...
+
+
+@overload
+def select(
+    __ent0: _TCCA[_T0],
+    __ent1: _TCCA[_T1],
+    __ent2: _TCCA[_T2],
+    __ent3: _TCCA[_T3],
+    __ent4: _TCCA[_T4],
+    __ent5: _TCCA[_T5],
+    __ent6: _TCCA[_T6],
+    __ent7: _TCCA[_T7],
+    __ent8: _TCCA[_T8],
+    __ent9: _TCCA[_T9],
+) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: ...
+
+
+# END OVERLOADED FUNCTIONS select
+
+
+@overload
+def select(
+    *entities: _ColumnsClauseArgument[Any], **__kw: Any
+) -> Select[Any]: ...
+
+
+def select(*entities: _ColumnsClauseArgument[Any], **__kw: Any) -> Select[Any]:
+    r"""Construct a new :class:`_expression.Select`.
+
+
+    .. versionadded:: 1.4 - The :func:`_sql.select` function now accepts
+       column arguments positionally.   The top-level :func:`_sql.select`
+       function will automatically use the 1.x or 2.x style API based on
+       the incoming arguments; using :func:`_sql.select` from the
+       ``sqlalchemy.future`` module will enforce that only the 2.x style
+       constructor is used.
+
+    Similar functionality is also available via the
+    :meth:`_expression.FromClause.select` method on any
+    :class:`_expression.FromClause`.
+
+    .. seealso::
+
+        :ref:`tutorial_selecting_data` - in the :ref:`unified_tutorial`
+
+    :param \*entities:
+      Entities to SELECT from.  For Core usage, this is typically a series
+      of :class:`_expression.ColumnElement` and / or
+      :class:`_expression.FromClause`
+      objects which will form the columns clause of the resulting
+      statement.   For those objects that are instances of
+      :class:`_expression.FromClause` (typically :class:`_schema.Table`
+      or :class:`_expression.Alias`
+      objects), the :attr:`_expression.FromClause.c`
+      collection is extracted
+      to form a collection of :class:`_expression.ColumnElement` objects.
+
+      This parameter will also accept :class:`_expression.TextClause`
+      constructs as
+      given, as well as ORM-mapped classes.
+
+    """
+    # the keyword args are a necessary element in order for the typing
+    # to work out w/ the varargs vs. having named "keyword" arguments that
+    # aren't always present.
+    if __kw:
+        raise _no_kw()
+    return Select(*entities)
+
+
+def table(name: str, *columns: ColumnClause[Any], **kw: Any) -> TableClause:
+    """Produce a new :class:`_expression.TableClause`.
+
+    The object returned is an instance of
+    :class:`_expression.TableClause`, which
+    represents the "syntactical" portion of the schema-level
+    :class:`_schema.Table` object.
+    It may be used to construct lightweight table constructs.
+
+    :param name: Name of the table.
+
+    :param columns: A collection of :func:`_expression.column` constructs.
+
+    :param schema: The schema name for this table.
+
+        .. versionadded:: 1.3.18 :func:`_expression.table` can now
+           accept a ``schema`` argument.
+    """
+
+    return TableClause(name, *columns, **kw)
+
+
+def tablesample(
+    selectable: _FromClauseArgument,
+    sampling: Union[float, Function[Any]],
+    name: Optional[str] = None,
+    seed: Optional[roles.ExpressionElementRole[Any]] = None,
+) -> TableSample:
+    """Return a :class:`_expression.TableSample` object.
+
+    :class:`_expression.TableSample` is an :class:`_expression.Alias`
+    subclass that represents
+    a table with the TABLESAMPLE clause applied to it.
+    :func:`_expression.tablesample`
+    is also available from the :class:`_expression.FromClause`
+    class via the
+    :meth:`_expression.FromClause.tablesample` method.
+
+    The TABLESAMPLE clause allows selecting a randomly selected approximate
+    percentage of rows from a table. It supports multiple sampling methods,
+    most commonly BERNOULLI and SYSTEM.
+
+    e.g.::
+
+        from sqlalchemy import func
+
+        selectable = people.tablesample(
+            func.bernoulli(1), name="alias", seed=func.random()
+        )
+        stmt = select(selectable.c.people_id)
+
+    Assuming ``people`` with a column ``people_id``, the above
+    statement would render as:
+
+    .. sourcecode:: sql
+
+        SELECT alias.people_id FROM
+        people AS alias TABLESAMPLE bernoulli(:bernoulli_1)
+        REPEATABLE (random())
+
+    :param sampling: a ``float`` percentage between 0 and 100 or
+        :class:`_functions.Function`.
+
+    :param name: optional alias name
+
+    :param seed: any real-valued SQL expression.  When specified, the
+     REPEATABLE sub-clause is also rendered.
+
+    """
+    return TableSample._factory(selectable, sampling, name=name, seed=seed)
+
+
+@overload
+def union(
+    *selects: _TypedSelectable[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+@overload
+def union(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+def union(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]:
+    r"""Return a ``UNION`` of multiple selectables.
+
+    The returned object is an instance of
+    :class:`_expression.CompoundSelect`.
+
+    A similar :func:`union()` method is available on all
+    :class:`_expression.FromClause` subclasses.
+
+    :param \*selects:
+      a list of :class:`_expression.Select` instances.
+
+    :param \**kwargs:
+      available keyword arguments are the same as those of
+      :func:`select`.
+
+    """
+    return CompoundSelect._create_union(*selects)
+
+
+@overload
+def union_all(
+    *selects: _TypedSelectable[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+@overload
+def union_all(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]: ...
+
+
+def union_all(
+    *selects: _SelectStatementForCompoundArgument[_TP],
+) -> CompoundSelect[_TP]:
+    r"""Return a ``UNION ALL`` of multiple selectables.
+
+    The returned object is an instance of
+    :class:`_expression.CompoundSelect`.
+
+    A similar :func:`union_all()` method is available on all
+    :class:`_expression.FromClause` subclasses.
+
+    :param \*selects:
+      a list of :class:`_expression.Select` instances.
+
+    """
+    return CompoundSelect._create_union_all(*selects)
+
+
+def values(
+    *columns: ColumnClause[Any],
+    name: Optional[str] = None,
+    literal_binds: bool = False,
+) -> Values:
+    r"""Construct a :class:`_expression.Values` construct.
+
+    The column expressions and the actual data for
+    :class:`_expression.Values` are given in two separate steps.  The
+    constructor receives the column expressions typically as
+    :func:`_expression.column` constructs,
+    and the data is then passed via the
+    :meth:`_expression.Values.data` method as a list,
+    which can be called multiple
+    times to add more data, e.g.::
+
+        from sqlalchemy import column
+        from sqlalchemy import values
+
+        value_expr = values(
+            column("id", Integer),
+            column("name", String),
+            name="my_values",
+        ).data([(1, "name1"), (2, "name2"), (3, "name3")])
+
+    :param \*columns: column expressions, typically composed using
+     :func:`_expression.column` objects.
+
+    :param name: the name for this VALUES construct.  If omitted, the
+     VALUES construct will be unnamed in a SQL expression.   Different
+     backends may have different requirements here.
+
+    :param literal_binds: Defaults to False.  Whether or not to render
+     the data values inline in the SQL output, rather than using bound
+     parameters.
+
+    """
+    return Values(*columns, literal_binds=literal_binds, name=name)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py
new file mode 100644
index 00000000..b1af53f7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/_typing.py
@@ -0,0 +1,463 @@
+# sql/_typing.py
+# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import operator
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import roles
+from .. import exc
+from .. import util
+from ..inspection import Inspectable
+from ..util.typing import Literal
+from ..util.typing import Protocol
+from ..util.typing import TypeAlias
+
+if TYPE_CHECKING:
+    from datetime import date
+    from datetime import datetime
+    from datetime import time
+    from datetime import timedelta
+    from decimal import Decimal
+    from uuid import UUID
+
+    from .base import Executable
+    from .compiler import Compiled
+    from .compiler import DDLCompiler
+    from .compiler import SQLCompiler
+    from .dml import UpdateBase
+    from .dml import ValuesBase
+    from .elements import ClauseElement
+    from .elements import ColumnElement
+    from .elements import KeyedColumnElement
+    from .elements import quoted_name
+    from .elements import SQLCoreOperations
+    from .elements import TextClause
+    from .lambdas import LambdaElement
+    from .roles import FromClauseRole
+    from .schema import Column
+    from .selectable import Alias
+    from .selectable import CompoundSelect
+    from .selectable import CTE
+    from .selectable import FromClause
+    from .selectable import Join
+    from .selectable import NamedFromClause
+    from .selectable import ReturnsRows
+    from .selectable import Select
+    from .selectable import Selectable
+    from .selectable import SelectBase
+    from .selectable import Subquery
+    from .selectable import TableClause
+    from .sqltypes import TableValueType
+    from .sqltypes import TupleType
+    from .type_api import TypeEngine
+    from ..engine import Dialect
+    from ..util.typing import TypeGuard
+
+_T = TypeVar("_T", bound=Any)
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+
+
+_CE = TypeVar("_CE", bound="ColumnElement[Any]")
+
+_CLE = TypeVar("_CLE", bound="ClauseElement")
+
+
+class _HasClauseElement(Protocol, Generic[_T_co]):
+    """indicates a class that has a __clause_element__() method"""
+
+    def __clause_element__(self) -> roles.ExpressionElementRole[_T_co]: ...
+
+
+class _CoreAdapterProto(Protocol):
+    """protocol for the ClauseAdapter/ColumnAdapter.traverse() method."""
+
+    def __call__(self, obj: _CE) -> _CE: ...
+
+
+class _HasDialect(Protocol):
+    """protocol for Engine/Connection-like objects that have dialect
+    attribute.
+    """
+
+    @property
+    def dialect(self) -> Dialect: ...
+
+
+# match column types that are not ORM entities
+_NOT_ENTITY = TypeVar(
+    "_NOT_ENTITY",
+    int,
+    str,
+    bool,
+    "datetime",
+    "date",
+    "time",
+    "timedelta",
+    "UUID",
+    float,
+    "Decimal",
+)
+
+_StarOrOne = Literal["*", 1]
+
+_MAYBE_ENTITY = TypeVar(
+    "_MAYBE_ENTITY",
+    roles.ColumnsClauseRole,
+    _StarOrOne,
+    Type[Any],
+    Inspectable[_HasClauseElement[Any]],
+    _HasClauseElement[Any],
+)
+
+
+# convention:
+# XYZArgument - something that the end user is passing to a public API method
+# XYZElement - the internal representation that we use for the thing.
+# the coercions system is responsible for converting from XYZArgument to
+# XYZElement.
+
+_TextCoercedExpressionArgument = Union[
+    str,
+    "TextClause",
+    "ColumnElement[_T]",
+    _HasClauseElement[_T],
+    roles.ExpressionElementRole[_T],
+]
+
+_ColumnsClauseArgument = Union[
+    roles.TypedColumnsClauseRole[_T],
+    roles.ColumnsClauseRole,
+    "SQLCoreOperations[_T]",
+    _StarOrOne,
+    Type[_T],
+    Inspectable[_HasClauseElement[_T]],
+    _HasClauseElement[_T],
+]
+"""open-ended SELECT columns clause argument.
+
+Includes column expressions, tables, ORM mapped entities, a few literal values.
+
+This type is used for lists of columns  / entities to be returned in result
+sets; select(...), insert().returning(...), etc.
+
+
+"""
+
+_TypedColumnClauseArgument = Union[
+    roles.TypedColumnsClauseRole[_T],
+    "SQLCoreOperations[_T]",
+    Type[_T],
+]
+
+_TP = TypeVar("_TP", bound=Tuple[Any, ...])
+
+_T0 = TypeVar("_T0", bound=Any)
+_T1 = TypeVar("_T1", bound=Any)
+_T2 = TypeVar("_T2", bound=Any)
+_T3 = TypeVar("_T3", bound=Any)
+_T4 = TypeVar("_T4", bound=Any)
+_T5 = TypeVar("_T5", bound=Any)
+_T6 = TypeVar("_T6", bound=Any)
+_T7 = TypeVar("_T7", bound=Any)
+_T8 = TypeVar("_T8", bound=Any)
+_T9 = TypeVar("_T9", bound=Any)
+
+
+_ColumnExpressionArgument = Union[
+    "ColumnElement[_T]",
+    _HasClauseElement[_T],
+    "SQLCoreOperations[_T]",
+    roles.ExpressionElementRole[_T],
+    roles.TypedColumnsClauseRole[_T],
+    Callable[[], "ColumnElement[_T]"],
+    "LambdaElement",
+]
+"See docs in public alias ColumnExpressionArgument."
+
+ColumnExpressionArgument: TypeAlias = _ColumnExpressionArgument[_T]
+"""Narrower "column expression" argument.
+
+This type is used for all the other "column" kinds of expressions that
+typically represent a single SQL column expression, not a set of columns the
+way a table or ORM entity does.
+
+This includes ColumnElement, or ORM-mapped attributes that will have a
+``__clause_element__()`` method, it also has the ExpressionElementRole
+overall which brings in the TextClause object also.
+
+.. versionadded:: 2.0.13
+
+"""
+
+_ColumnExpressionOrLiteralArgument = Union[Any, _ColumnExpressionArgument[_T]]
+
+_ColumnExpressionOrStrLabelArgument = Union[str, _ColumnExpressionArgument[_T]]
+
+_ByArgument = Union[
+    Iterable[_ColumnExpressionOrStrLabelArgument[Any]],
+    _ColumnExpressionOrStrLabelArgument[Any],
+]
+"""Used for keyword-based ``order_by`` and ``partition_by`` parameters."""
+
+
+_InfoType = Dict[Any, Any]
+"""the .info dictionary accepted and used throughout Core /ORM"""
+
+_FromClauseArgument = Union[
+    roles.FromClauseRole,
+    Type[Any],
+    Inspectable[_HasClauseElement[Any]],
+    _HasClauseElement[Any],
+]
+"""A FROM clause, like we would send to select().select_from().
+
+Also accommodates ORM entities and related constructs.
+
+"""
+
+_JoinTargetArgument = Union[_FromClauseArgument, roles.JoinTargetRole]
+"""target for join() builds on _FromClauseArgument to include additional
+join target roles such as those which come from the ORM.
+
+"""
+
+_OnClauseArgument = Union[_ColumnExpressionArgument[Any], roles.OnClauseRole]
+"""target for an ON clause, includes additional roles such as those which
+come from the ORM.
+
+"""
+
+_SelectStatementForCompoundArgument = Union[
+    "Select[_TP]",
+    "CompoundSelect[_TP]",
+    roles.CompoundElementRole,
+]
+"""SELECT statement acceptable by ``union()`` and other SQL set operations"""
+
+_DMLColumnArgument = Union[
+    str,
+    _HasClauseElement[Any],
+    roles.DMLColumnRole,
+    "SQLCoreOperations[Any]",
+]
+"""A DML column expression.  This is a "key" inside of insert().values(),
+update().values(), and related.
+
+These are usually strings or SQL table columns.
+
+There's also edge cases like JSON expression assignment, which we would want
+the DMLColumnRole to be able to accommodate.
+
+"""
+
+_DMLKey = TypeVar("_DMLKey", bound=_DMLColumnArgument)
+_DMLColumnKeyMapping = Mapping[_DMLKey, Any]
+
+
+_DDLColumnArgument = Union[str, "Column[Any]", roles.DDLConstraintColumnRole]
+"""DDL column.
+
+used for :class:`.PrimaryKeyConstraint`, :class:`.UniqueConstraint`, etc.
+
+"""
+
+_DMLTableArgument = Union[
+    "TableClause",
+    "Join",
+    "Alias",
+    "CTE",
+    Type[Any],
+    Inspectable[_HasClauseElement[Any]],
+    _HasClauseElement[Any],
+]
+
+_PropagateAttrsType = util.immutabledict[str, Any]
+
+_TypeEngineArgument = Union[Type["TypeEngine[_T]"], "TypeEngine[_T]"]
+
+_EquivalentColumnMap = Dict["ColumnElement[Any]", Set["ColumnElement[Any]"]]
+
+_LimitOffsetType = Union[int, _ColumnExpressionArgument[int], None]
+
+_AutoIncrementType = Union[bool, Literal["auto", "ignore_fk"]]
+
+if TYPE_CHECKING:
+
+    def is_sql_compiler(c: Compiled) -> TypeGuard[SQLCompiler]: ...
+
+    def is_ddl_compiler(c: Compiled) -> TypeGuard[DDLCompiler]: ...
+
+    def is_named_from_clause(
+        t: FromClauseRole,
+    ) -> TypeGuard[NamedFromClause]: ...
+
+    def is_column_element(
+        c: ClauseElement,
+    ) -> TypeGuard[ColumnElement[Any]]: ...
+
+    def is_keyed_column_element(
+        c: ClauseElement,
+    ) -> TypeGuard[KeyedColumnElement[Any]]: ...
+
+    def is_text_clause(c: ClauseElement) -> TypeGuard[TextClause]: ...
+
+    def is_from_clause(c: ClauseElement) -> TypeGuard[FromClause]: ...
+
+    def is_tuple_type(t: TypeEngine[Any]) -> TypeGuard[TupleType]: ...
+
+    def is_table_value_type(
+        t: TypeEngine[Any],
+    ) -> TypeGuard[TableValueType]: ...
+
+    def is_selectable(t: Any) -> TypeGuard[Selectable]: ...
+
+    def is_select_base(
+        t: Union[Executable, ReturnsRows]
+    ) -> TypeGuard[SelectBase]: ...
+
+    def is_select_statement(
+        t: Union[Executable, ReturnsRows]
+    ) -> TypeGuard[Select[Any]]: ...
+
+    def is_table(t: FromClause) -> TypeGuard[TableClause]: ...
+
+    def is_subquery(t: FromClause) -> TypeGuard[Subquery]: ...
+
+    def is_dml(c: ClauseElement) -> TypeGuard[UpdateBase]: ...
+
+else:
+    is_sql_compiler = operator.attrgetter("is_sql")
+    is_ddl_compiler = operator.attrgetter("is_ddl")
+    is_named_from_clause = operator.attrgetter("named_with_column")
+    is_column_element = operator.attrgetter("_is_column_element")
+    is_keyed_column_element = operator.attrgetter("_is_keyed_column_element")
+    is_text_clause = operator.attrgetter("_is_text_clause")
+    is_from_clause = operator.attrgetter("_is_from_clause")
+    is_tuple_type = operator.attrgetter("_is_tuple_type")
+    is_table_value_type = operator.attrgetter("_is_table_value")
+    is_selectable = operator.attrgetter("is_selectable")
+    is_select_base = operator.attrgetter("_is_select_base")
+    is_select_statement = operator.attrgetter("_is_select_statement")
+    is_table = operator.attrgetter("_is_table")
+    is_subquery = operator.attrgetter("_is_subquery")
+    is_dml = operator.attrgetter("is_dml")
+
+
+def has_schema_attr(t: FromClauseRole) -> TypeGuard[TableClause]:
+    return hasattr(t, "schema")
+
+
+def is_quoted_name(s: str) -> TypeGuard[quoted_name]:
+    return hasattr(s, "quote")
+
+
+def is_has_clause_element(s: object) -> TypeGuard[_HasClauseElement[Any]]:
+    return hasattr(s, "__clause_element__")
+
+
+def is_insert_update(c: ClauseElement) -> TypeGuard[ValuesBase]:
+    return c.is_dml and (c.is_insert or c.is_update)  # type: ignore
+
+
+def _no_kw() -> exc.ArgumentError:
+    return exc.ArgumentError(
+        "Additional keyword arguments are not accepted by this "
+        "function/method.  The presence of **kw is for pep-484 typing purposes"
+    )
+
+
+def _unexpected_kw(methname: str, kw: Dict[str, Any]) -> NoReturn:
+    k = list(kw)[0]
+    raise TypeError(f"{methname} got an unexpected keyword argument '{k}'")
+
+
+@overload
+def Nullable(
+    val: "SQLCoreOperations[_T]",
+) -> "SQLCoreOperations[Optional[_T]]": ...
+
+
+@overload
+def Nullable(
+    val: roles.ExpressionElementRole[_T],
+) -> roles.ExpressionElementRole[Optional[_T]]: ...
+
+
+@overload
+def Nullable(val: Type[_T]) -> Type[Optional[_T]]: ...
+
+
+def Nullable(
+    val: _TypedColumnClauseArgument[_T],
+) -> _TypedColumnClauseArgument[Optional[_T]]:
+    """Types a column or ORM class as nullable.
+
+    This can be used in select and other contexts to express that the value of
+    a column can be null, for example due to an outer join::
+
+        stmt1 = select(A, Nullable(B)).outerjoin(A.bs)
+        stmt2 = select(A.data, Nullable(B.data)).outerjoin(A.bs)
+
+    At runtime this method returns the input unchanged.
+
+    .. versionadded:: 2.0.20
+    """
+    return val
+
+
+@overload
+def NotNullable(
+    val: "SQLCoreOperations[Optional[_T]]",
+) -> "SQLCoreOperations[_T]": ...
+
+
+@overload
+def NotNullable(
+    val: roles.ExpressionElementRole[Optional[_T]],
+) -> roles.ExpressionElementRole[_T]: ...
+
+
+@overload
+def NotNullable(val: Type[Optional[_T]]) -> Type[_T]: ...
+
+
+@overload
+def NotNullable(val: Optional[Type[_T]]) -> Type[_T]: ...
+
+
+def NotNullable(
+    val: Union[_TypedColumnClauseArgument[Optional[_T]], Optional[Type[_T]]],
+) -> _TypedColumnClauseArgument[_T]:
+    """Types a column or ORM class as not nullable.
+
+    This can be used in select and other contexts to express that the value of
+    a column cannot be null, for example due to a where condition on a
+    nullable column::
+
+        stmt = select(NotNullable(A.value)).where(A.value.is_not(None))
+
+    At runtime this method returns the input unchanged.
+
+    .. versionadded:: 2.0.20
+    """
+    return val  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py
new file mode 100644
index 00000000..bf445ff3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/annotation.py
@@ -0,0 +1,585 @@
+# sql/annotation.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""The :class:`.Annotated` class and related routines; creates hash-equivalent
+copies of SQL constructs which contain context-specific markers and
+associations.
+
+Note that the :class:`.Annotated` concept as implemented in this module is not
+related in any way to the pep-593 concept of "Annotated".
+
+
+"""
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import FrozenSet
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+
+from . import operators
+from .cache_key import HasCacheKey
+from .visitors import anon_map
+from .visitors import ExternallyTraversible
+from .visitors import InternalTraversal
+from .. import util
+from ..util.typing import Literal
+from ..util.typing import Self
+
+if TYPE_CHECKING:
+    from .base import _EntityNamespace
+    from .visitors import _TraverseInternalsType
+
+_AnnotationDict = Mapping[str, Any]
+
+EMPTY_ANNOTATIONS: util.immutabledict[str, Any] = util.EMPTY_DICT
+
+
+class SupportsAnnotations(ExternallyTraversible):
+    __slots__ = ()
+
+    _annotations: util.immutabledict[str, Any] = EMPTY_ANNOTATIONS
+
+    proxy_set: util.generic_fn_descriptor[FrozenSet[Any]]
+
+    _is_immutable: bool
+
+    def _annotate(self, values: _AnnotationDict) -> Self:
+        raise NotImplementedError()
+
+    @overload
+    def _deannotate(
+        self,
+        values: Literal[None] = ...,
+        clone: bool = ...,
+    ) -> Self: ...
+
+    @overload
+    def _deannotate(
+        self,
+        values: Sequence[str] = ...,
+        clone: bool = ...,
+    ) -> SupportsAnnotations: ...
+
+    def _deannotate(
+        self,
+        values: Optional[Sequence[str]] = None,
+        clone: bool = False,
+    ) -> SupportsAnnotations:
+        raise NotImplementedError()
+
+    @util.memoized_property
+    def _annotations_cache_key(self) -> Tuple[Any, ...]:
+        anon_map_ = anon_map()
+
+        return self._gen_annotations_cache_key(anon_map_)
+
+    def _gen_annotations_cache_key(
+        self, anon_map: anon_map
+    ) -> Tuple[Any, ...]:
+        return (
+            "_annotations",
+            tuple(
+                (
+                    key,
+                    (
+                        value._gen_cache_key(anon_map, [])
+                        if isinstance(value, HasCacheKey)
+                        else value
+                    ),
+                )
+                for key, value in [
+                    (key, self._annotations[key])
+                    for key in sorted(self._annotations)
+                ]
+            ),
+        )
+
+
+class SupportsWrappingAnnotations(SupportsAnnotations):
+    __slots__ = ()
+
+    _constructor: Callable[..., SupportsWrappingAnnotations]
+
+    if TYPE_CHECKING:
+
+        @util.ro_non_memoized_property
+        def entity_namespace(self) -> _EntityNamespace: ...
+
+    def _annotate(self, values: _AnnotationDict) -> Self:
+        """return a copy of this ClauseElement with annotations
+        updated by the given dictionary.
+
+        """
+        return Annotated._as_annotated_instance(self, values)  # type: ignore
+
+    def _with_annotations(self, values: _AnnotationDict) -> Self:
+        """return a copy of this ClauseElement with annotations
+        replaced by the given dictionary.
+
+        """
+        return Annotated._as_annotated_instance(self, values)  # type: ignore
+
+    @overload
+    def _deannotate(
+        self,
+        values: Literal[None] = ...,
+        clone: bool = ...,
+    ) -> Self: ...
+
+    @overload
+    def _deannotate(
+        self,
+        values: Sequence[str] = ...,
+        clone: bool = ...,
+    ) -> SupportsAnnotations: ...
+
+    def _deannotate(
+        self,
+        values: Optional[Sequence[str]] = None,
+        clone: bool = False,
+    ) -> SupportsAnnotations:
+        """return a copy of this :class:`_expression.ClauseElement`
+        with annotations
+        removed.
+
+        :param values: optional tuple of individual values
+         to remove.
+
+        """
+        if clone:
+            s = self._clone()
+            return s
+        else:
+            return self
+
+
+class SupportsCloneAnnotations(SupportsWrappingAnnotations):
+    # SupportsCloneAnnotations extends from SupportsWrappingAnnotations
+    # to support the structure of having the base ClauseElement
+    # be a subclass of SupportsWrappingAnnotations.  Any ClauseElement
+    # subclass that wants to extend from SupportsCloneAnnotations
+    # will inherently also be subclassing SupportsWrappingAnnotations, so
+    # make that specific here.
+
+    if not typing.TYPE_CHECKING:
+        __slots__ = ()
+
+    _clone_annotations_traverse_internals: _TraverseInternalsType = [
+        ("_annotations", InternalTraversal.dp_annotations_key)
+    ]
+
+    def _annotate(self, values: _AnnotationDict) -> Self:
+        """return a copy of this ClauseElement with annotations
+        updated by the given dictionary.
+
+        """
+        new = self._clone()
+        new._annotations = new._annotations.union(values)
+        new.__dict__.pop("_annotations_cache_key", None)
+        new.__dict__.pop("_generate_cache_key", None)
+        return new
+
+    def _with_annotations(self, values: _AnnotationDict) -> Self:
+        """return a copy of this ClauseElement with annotations
+        replaced by the given dictionary.
+
+        """
+        new = self._clone()
+        new._annotations = util.immutabledict(values)
+        new.__dict__.pop("_annotations_cache_key", None)
+        new.__dict__.pop("_generate_cache_key", None)
+        return new
+
+    @overload
+    def _deannotate(
+        self,
+        values: Literal[None] = ...,
+        clone: bool = ...,
+    ) -> Self: ...
+
+    @overload
+    def _deannotate(
+        self,
+        values: Sequence[str] = ...,
+        clone: bool = ...,
+    ) -> SupportsAnnotations: ...
+
+    def _deannotate(
+        self,
+        values: Optional[Sequence[str]] = None,
+        clone: bool = False,
+    ) -> SupportsAnnotations:
+        """return a copy of this :class:`_expression.ClauseElement`
+        with annotations
+        removed.
+
+        :param values: optional tuple of individual values
+         to remove.
+
+        """
+        if clone or self._annotations:
+            # clone is used when we are also copying
+            # the expression for a deep deannotation
+            new = self._clone()
+            new._annotations = util.immutabledict()
+            new.__dict__.pop("_annotations_cache_key", None)
+            return new
+        else:
+            return self
+
+
+class Annotated(SupportsAnnotations):
+    """clones a SupportsAnnotations and applies an 'annotations' dictionary.
+
+    Unlike regular clones, this clone also mimics __hash__() and
+    __eq__() of the original element so that it takes its place
+    in hashed collections.
+
+    A reference to the original element is maintained, for the important
+    reason of keeping its hash value current.  When GC'ed, the
+    hash value may be reused, causing conflicts.
+
+    .. note::  The rationale for Annotated producing a brand new class,
+       rather than placing the functionality directly within ClauseElement,
+       is **performance**.  The __hash__() method is absent on plain
+       ClauseElement which leads to significantly reduced function call
+       overhead, as the use of sets and dictionaries against ClauseElement
+       objects is prevalent, but most are not "annotated".
+
+    """
+
+    _is_column_operators = False
+
+    @classmethod
+    def _as_annotated_instance(
+        cls, element: SupportsWrappingAnnotations, values: _AnnotationDict
+    ) -> Annotated:
+        try:
+            cls = annotated_classes[element.__class__]
+        except KeyError:
+            cls = _new_annotation_type(element.__class__, cls)
+        return cls(element, values)
+
+    _annotations: util.immutabledict[str, Any]
+    __element: SupportsWrappingAnnotations
+    _hash: int
+
+    def __new__(cls: Type[Self], *args: Any) -> Self:
+        return object.__new__(cls)
+
+    def __init__(
+        self, element: SupportsWrappingAnnotations, values: _AnnotationDict
+    ):
+        self.__dict__ = element.__dict__.copy()
+        self.__dict__.pop("_annotations_cache_key", None)
+        self.__dict__.pop("_generate_cache_key", None)
+        self.__element = element
+        self._annotations = util.immutabledict(values)
+        self._hash = hash(element)
+
+    def _annotate(self, values: _AnnotationDict) -> Self:
+        _values = self._annotations.union(values)
+        new = self._with_annotations(_values)
+        return new
+
+    def _with_annotations(self, values: _AnnotationDict) -> Self:
+        clone = self.__class__.__new__(self.__class__)
+        clone.__dict__ = self.__dict__.copy()
+        clone.__dict__.pop("_annotations_cache_key", None)
+        clone.__dict__.pop("_generate_cache_key", None)
+        clone._annotations = util.immutabledict(values)
+        return clone
+
+    @overload
+    def _deannotate(
+        self,
+        values: Literal[None] = ...,
+        clone: bool = ...,
+    ) -> Self: ...
+
+    @overload
+    def _deannotate(
+        self,
+        values: Sequence[str] = ...,
+        clone: bool = ...,
+    ) -> Annotated: ...
+
+    def _deannotate(
+        self,
+        values: Optional[Sequence[str]] = None,
+        clone: bool = True,
+    ) -> SupportsAnnotations:
+        if values is None:
+            return self.__element
+        else:
+            return self._with_annotations(
+                util.immutabledict(
+                    {
+                        key: value
+                        for key, value in self._annotations.items()
+                        if key not in values
+                    }
+                )
+            )
+
+    if not typing.TYPE_CHECKING:
+        # manually proxy some methods that need extra attention
+        def _compiler_dispatch(self, visitor: Any, **kw: Any) -> Any:
+            return self.__element.__class__._compiler_dispatch(
+                self, visitor, **kw
+            )
+
+        @property
+        def _constructor(self):
+            return self.__element._constructor
+
+    def _clone(self, **kw: Any) -> Self:
+        clone = self.__element._clone(**kw)
+        if clone is self.__element:
+            # detect immutable, don't change anything
+            return self
+        else:
+            # update the clone with any changes that have occurred
+            # to this object's __dict__.
+            clone.__dict__.update(self.__dict__)
+            return self.__class__(clone, self._annotations)
+
+    def __reduce__(self) -> Tuple[Type[Annotated], Tuple[Any, ...]]:
+        return self.__class__, (self.__element, self._annotations)
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __eq__(self, other: Any) -> bool:
+        if self._is_column_operators:
+            return self.__element.__class__.__eq__(self, other)
+        else:
+            return hash(other) == hash(self)
+
+    @util.ro_non_memoized_property
+    def entity_namespace(self) -> _EntityNamespace:
+        if "entity_namespace" in self._annotations:
+            return cast(
+                SupportsWrappingAnnotations,
+                self._annotations["entity_namespace"],
+            ).entity_namespace
+        else:
+            return self.__element.entity_namespace
+
+
+# hard-generate Annotated subclasses.  this technique
+# is used instead of on-the-fly types (i.e. type.__new__())
+# so that the resulting objects are pickleable; additionally, other
+# decisions can be made up front about the type of object being annotated
+# just once per class rather than per-instance.
+annotated_classes: Dict[Type[SupportsWrappingAnnotations], Type[Annotated]] = (
+    {}
+)
+
+_SA = TypeVar("_SA", bound="SupportsAnnotations")
+
+
+def _safe_annotate(to_annotate: _SA, annotations: _AnnotationDict) -> _SA:
+    try:
+        _annotate = to_annotate._annotate
+    except AttributeError:
+        # skip objects that don't actually have an `_annotate`
+        # attribute, namely QueryableAttribute inside of a join
+        # condition
+        return to_annotate
+    else:
+        return _annotate(annotations)
+
+
+def _deep_annotate(
+    element: _SA,
+    annotations: _AnnotationDict,
+    exclude: Optional[Sequence[SupportsAnnotations]] = None,
+    *,
+    detect_subquery_cols: bool = False,
+    ind_cols_on_fromclause: bool = False,
+    annotate_callable: Optional[
+        Callable[[SupportsAnnotations, _AnnotationDict], SupportsAnnotations]
+    ] = None,
+) -> _SA:
+    """Deep copy the given ClauseElement, annotating each element
+    with the given annotations dictionary.
+
+    Elements within the exclude collection will be cloned but not annotated.
+
+    """
+
+    # annotated objects hack the __hash__() method so if we want to
+    # uniquely process them we have to use id()
+
+    cloned_ids: Dict[int, SupportsAnnotations] = {}
+
+    def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations:
+        # ind_cols_on_fromclause means make sure an AnnotatedFromClause
+        # has its own .c collection independent of that which its proxying.
+        # this is used specifically by orm.LoaderCriteriaOption to break
+        # a reference cycle that it's otherwise prone to building,
+        # see test_relationship_criteria->
+        # test_loader_criteria_subquery_w_same_entity.  logic here was
+        # changed for #8796 and made explicit; previously it occurred
+        # by accident
+
+        kw["detect_subquery_cols"] = detect_subquery_cols
+        id_ = id(elem)
+
+        if id_ in cloned_ids:
+            return cloned_ids[id_]
+
+        if (
+            exclude
+            and hasattr(elem, "proxy_set")
+            and elem.proxy_set.intersection(exclude)
+        ):
+            newelem = elem._clone(clone=clone, **kw)
+        elif annotations != elem._annotations:
+            if detect_subquery_cols and elem._is_immutable:
+                to_annotate = elem._clone(clone=clone, **kw)
+            else:
+                to_annotate = elem
+            if annotate_callable:
+                newelem = annotate_callable(to_annotate, annotations)
+            else:
+                newelem = _safe_annotate(to_annotate, annotations)
+        else:
+            newelem = elem
+
+        newelem._copy_internals(
+            clone=clone, ind_cols_on_fromclause=ind_cols_on_fromclause
+        )
+
+        cloned_ids[id_] = newelem
+        return newelem
+
+    if element is not None:
+        element = cast(_SA, clone(element))
+    clone = None  # type: ignore  # remove gc cycles
+    return element
+
+
+@overload
+def _deep_deannotate(
+    element: Literal[None], values: Optional[Sequence[str]] = None
+) -> Literal[None]: ...
+
+
+@overload
+def _deep_deannotate(
+    element: _SA, values: Optional[Sequence[str]] = None
+) -> _SA: ...
+
+
+def _deep_deannotate(
+    element: Optional[_SA], values: Optional[Sequence[str]] = None
+) -> Optional[_SA]:
+    """Deep copy the given element, removing annotations."""
+
+    cloned: Dict[Any, SupportsAnnotations] = {}
+
+    def clone(elem: SupportsAnnotations, **kw: Any) -> SupportsAnnotations:
+        key: Any
+        if values:
+            key = id(elem)
+        else:
+            key = elem
+
+        if key not in cloned:
+            newelem = elem._deannotate(values=values, clone=True)
+            newelem._copy_internals(clone=clone)
+            cloned[key] = newelem
+            return newelem
+        else:
+            return cloned[key]
+
+    if element is not None:
+        element = cast(_SA, clone(element))
+    clone = None  # type: ignore  # remove gc cycles
+    return element
+
+
+def _shallow_annotate(element: _SA, annotations: _AnnotationDict) -> _SA:
+    """Annotate the given ClauseElement and copy its internals so that
+    internal objects refer to the new annotated object.
+
+    Basically used to apply a "don't traverse" annotation to a
+    selectable, without digging throughout the whole
+    structure wasting time.
+    """
+    element = element._annotate(annotations)
+    element._copy_internals()
+    return element
+
+
+def _new_annotation_type(
+    cls: Type[SupportsWrappingAnnotations], base_cls: Type[Annotated]
+) -> Type[Annotated]:
+    """Generates a new class that subclasses Annotated and proxies a given
+    element type.
+
+    """
+    if issubclass(cls, Annotated):
+        return cls
+    elif cls in annotated_classes:
+        return annotated_classes[cls]
+
+    for super_ in cls.__mro__:
+        # check if an Annotated subclass more specific than
+        # the given base_cls is already registered, such
+        # as AnnotatedColumnElement.
+        if super_ in annotated_classes:
+            base_cls = annotated_classes[super_]
+            break
+
+    annotated_classes[cls] = anno_cls = cast(
+        Type[Annotated],
+        type("Annotated%s" % cls.__name__, (base_cls, cls), {}),
+    )
+    globals()["Annotated%s" % cls.__name__] = anno_cls
+
+    if "_traverse_internals" in cls.__dict__:
+        anno_cls._traverse_internals = list(cls._traverse_internals) + [
+            ("_annotations", InternalTraversal.dp_annotations_key)
+        ]
+    elif cls.__dict__.get("inherit_cache", False):
+        anno_cls._traverse_internals = list(cls._traverse_internals) + [
+            ("_annotations", InternalTraversal.dp_annotations_key)
+        ]
+
+    # some classes include this even if they have traverse_internals
+    # e.g. BindParameter, add it if present.
+    if cls.__dict__.get("inherit_cache", False):
+        anno_cls.inherit_cache = True  # type: ignore
+    elif "inherit_cache" in cls.__dict__:
+        anno_cls.inherit_cache = cls.__dict__["inherit_cache"]  # type: ignore
+
+    anno_cls._is_column_operators = issubclass(cls, operators.ColumnOperators)
+
+    return anno_cls
+
+
+def _prepare_annotations(
+    target_hierarchy: Type[SupportsWrappingAnnotations],
+    base_cls: Type[Annotated],
+) -> None:
+    for cls in util.walk_subclasses(target_hierarchy):
+        _new_annotation_type(cls, base_cls)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py
new file mode 100644
index 00000000..7ccef84e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/base.py
@@ -0,0 +1,2185 @@
+# sql/base.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Foundational utilities common to many sql modules.
+
+"""
+
+
+from __future__ import annotations
+
+import collections
+from enum import Enum
+import itertools
+from itertools import zip_longest
+import operator
+import re
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import NamedTuple
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import roles
+from . import visitors
+from .cache_key import HasCacheKey  # noqa
+from .cache_key import MemoizedHasCacheKey  # noqa
+from .traversals import HasCopyInternals  # noqa
+from .visitors import ClauseVisitor
+from .visitors import ExtendedInternalTraversal
+from .visitors import ExternallyTraversible
+from .visitors import InternalTraversal
+from .. import event
+from .. import exc
+from .. import util
+from ..util import HasMemoized as HasMemoized
+from ..util import hybridmethod
+from ..util import typing as compat_typing
+from ..util.typing import Protocol
+from ..util.typing import Self
+from ..util.typing import TypeGuard
+
+if TYPE_CHECKING:
+    from . import coercions
+    from . import elements
+    from . import type_api
+    from ._orm_types import DMLStrategyArgument
+    from ._orm_types import SynchronizeSessionArgument
+    from ._typing import _CLE
+    from .compiler import SQLCompiler
+    from .elements import BindParameter
+    from .elements import ClauseList
+    from .elements import ColumnClause  # noqa
+    from .elements import ColumnElement
+    from .elements import NamedColumn
+    from .elements import SQLCoreOperations
+    from .elements import TextClause
+    from .schema import Column
+    from .schema import DefaultGenerator
+    from .selectable import _JoinTargetElement
+    from .selectable import _SelectIterable
+    from .selectable import FromClause
+    from ..engine import Connection
+    from ..engine import CursorResult
+    from ..engine.interfaces import _CoreMultiExecuteParams
+    from ..engine.interfaces import _ExecuteOptions
+    from ..engine.interfaces import _ImmutableExecuteOptions
+    from ..engine.interfaces import CacheStats
+    from ..engine.interfaces import Compiled
+    from ..engine.interfaces import CompiledCacheType
+    from ..engine.interfaces import CoreExecuteOptionsParameter
+    from ..engine.interfaces import Dialect
+    from ..engine.interfaces import IsolationLevel
+    from ..engine.interfaces import SchemaTranslateMapType
+    from ..event import dispatcher
+
+if not TYPE_CHECKING:
+    coercions = None  # noqa
+    elements = None  # noqa
+    type_api = None  # noqa
+
+
+class _NoArg(Enum):
+    NO_ARG = 0
+
+    def __repr__(self):
+        return f"_NoArg.{self.name}"
+
+
+NO_ARG = _NoArg.NO_ARG
+
+
+class _NoneName(Enum):
+    NONE_NAME = 0
+    """indicate a 'deferred' name that was ultimately the value None."""
+
+
+_NONE_NAME = _NoneName.NONE_NAME
+
+_T = TypeVar("_T", bound=Any)
+
+_Fn = TypeVar("_Fn", bound=Callable[..., Any])
+
+_AmbiguousTableNameMap = MutableMapping[str, str]
+
+
+class _DefaultDescriptionTuple(NamedTuple):
+    arg: Any
+    is_scalar: Optional[bool]
+    is_callable: Optional[bool]
+    is_sentinel: Optional[bool]
+
+    @classmethod
+    def _from_column_default(
+        cls, default: Optional[DefaultGenerator]
+    ) -> _DefaultDescriptionTuple:
+        return (
+            _DefaultDescriptionTuple(
+                default.arg,  # type: ignore
+                default.is_scalar,
+                default.is_callable,
+                default.is_sentinel,
+            )
+            if default
+            and (
+                default.has_arg
+                or (not default.for_update and default.is_sentinel)
+            )
+            else _DefaultDescriptionTuple(None, None, None, None)
+        )
+
+
+_never_select_column = operator.attrgetter("_omit_from_statements")
+
+
+class _EntityNamespace(Protocol):
+    def __getattr__(self, key: str) -> SQLCoreOperations[Any]: ...
+
+
+class _HasEntityNamespace(Protocol):
+    @util.ro_non_memoized_property
+    def entity_namespace(self) -> _EntityNamespace: ...
+
+
+def _is_has_entity_namespace(element: Any) -> TypeGuard[_HasEntityNamespace]:
+    return hasattr(element, "entity_namespace")
+
+
+# Remove when https://github.com/python/mypy/issues/14640 will be fixed
+_Self = TypeVar("_Self", bound=Any)
+
+
+class Immutable:
+    """mark a ClauseElement as 'immutable' when expressions are cloned.
+
+    "immutable" objects refers to the "mutability" of an object in the
+    context of SQL DQL and DML generation.   Such as, in DQL, one can
+    compose a SELECT or subquery of varied forms, but one cannot modify
+    the structure of a specific table or column within DQL.
+    :class:`.Immutable` is mostly intended to follow this concept, and as
+    such the primary "immutable" objects are :class:`.ColumnClause`,
+    :class:`.Column`, :class:`.TableClause`, :class:`.Table`.
+
+    """
+
+    __slots__ = ()
+
+    _is_immutable = True
+
+    def unique_params(self, *optionaldict, **kwargs):
+        raise NotImplementedError("Immutable objects do not support copying")
+
+    def params(self, *optionaldict, **kwargs):
+        raise NotImplementedError("Immutable objects do not support copying")
+
+    def _clone(self: _Self, **kw: Any) -> _Self:
+        return self
+
+    def _copy_internals(
+        self, *, omit_attrs: Iterable[str] = (), **kw: Any
+    ) -> None:
+        pass
+
+
+class SingletonConstant(Immutable):
+    """Represent SQL constants like NULL, TRUE, FALSE"""
+
+    _is_singleton_constant = True
+
+    _singleton: SingletonConstant
+
+    def __new__(cls: _T, *arg: Any, **kw: Any) -> _T:
+        return cast(_T, cls._singleton)
+
+    @util.non_memoized_property
+    def proxy_set(self) -> FrozenSet[ColumnElement[Any]]:
+        raise NotImplementedError()
+
+    @classmethod
+    def _create_singleton(cls):
+        obj = object.__new__(cls)
+        obj.__init__()  # type: ignore
+
+        # for a long time this was an empty frozenset, meaning
+        # a SingletonConstant would never be a "corresponding column" in
+        # a statement.  This referred to #6259.  However, in #7154 we see
+        # that we do in fact need "correspondence" to work when matching cols
+        # in result sets, so the non-correspondence was moved to a more
+        # specific level when we are actually adapting expressions for SQL
+        # render only.
+        obj.proxy_set = frozenset([obj])
+        cls._singleton = obj
+
+
+def _from_objects(
+    *elements: Union[
+        ColumnElement[Any], FromClause, TextClause, _JoinTargetElement
+    ]
+) -> Iterator[FromClause]:
+    return itertools.chain.from_iterable(
+        [element._from_objects for element in elements]
+    )
+
+
+def _select_iterables(
+    elements: Iterable[roles.ColumnsClauseRole],
+) -> _SelectIterable:
+    """expand tables into individual columns in the
+    given list of column expressions.
+
+    """
+    return itertools.chain.from_iterable(
+        [c._select_iterable for c in elements]
+    )
+
+
+_SelfGenerativeType = TypeVar("_SelfGenerativeType", bound="_GenerativeType")
+
+
+class _GenerativeType(compat_typing.Protocol):
+    def _generate(self) -> Self: ...
+
+
+def _generative(fn: _Fn) -> _Fn:
+    """non-caching _generative() decorator.
+
+    This is basically the legacy decorator that copies the object and
+    runs a method on the new copy.
+
+    """
+
+    @util.decorator
+    def _generative(
+        fn: _Fn, self: _SelfGenerativeType, *args: Any, **kw: Any
+    ) -> _SelfGenerativeType:
+        """Mark a method as generative."""
+
+        self = self._generate()
+        x = fn(self, *args, **kw)
+        assert x is self, "generative methods must return self"
+        return self
+
+    decorated = _generative(fn)
+    decorated.non_generative = fn  # type: ignore
+    return decorated
+
+
+def _exclusive_against(*names: str, **kw: Any) -> Callable[[_Fn], _Fn]:
+    msgs = kw.pop("msgs", {})
+
+    defaults = kw.pop("defaults", {})
+
+    getters = [
+        (name, operator.attrgetter(name), defaults.get(name, None))
+        for name in names
+    ]
+
+    @util.decorator
+    def check(fn, *args, **kw):
+        # make pylance happy by not including "self" in the argument
+        # list
+        self = args[0]
+        args = args[1:]
+        for name, getter, default_ in getters:
+            if getter(self) is not default_:
+                msg = msgs.get(
+                    name,
+                    "Method %s() has already been invoked on this %s construct"
+                    % (fn.__name__, self.__class__),
+                )
+                raise exc.InvalidRequestError(msg)
+        return fn(self, *args, **kw)
+
+    return check
+
+
+def _clone(element, **kw):
+    return element._clone(**kw)
+
+
+def _expand_cloned(
+    elements: Iterable[_CLE],
+) -> Iterable[_CLE]:
+    """expand the given set of ClauseElements to be the set of all 'cloned'
+    predecessors.
+
+    """
+    # TODO: cython candidate
+    return itertools.chain(*[x._cloned_set for x in elements])
+
+
+def _de_clone(
+    elements: Iterable[_CLE],
+) -> Iterable[_CLE]:
+    for x in elements:
+        while x._is_clone_of is not None:
+            x = x._is_clone_of
+        yield x
+
+
+def _cloned_intersection(a: Iterable[_CLE], b: Iterable[_CLE]) -> Set[_CLE]:
+    """return the intersection of sets a and b, counting
+    any overlap between 'cloned' predecessors.
+
+    The returned set is in terms of the entities present within 'a'.
+
+    """
+    all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
+    return {elem for elem in a if all_overlap.intersection(elem._cloned_set)}
+
+
+def _cloned_difference(a: Iterable[_CLE], b: Iterable[_CLE]) -> Set[_CLE]:
+    all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
+    return {
+        elem for elem in a if not all_overlap.intersection(elem._cloned_set)
+    }
+
+
+class _DialectArgView(MutableMapping[str, Any]):
+    """A dictionary view of dialect-level arguments in the form
+    <dialectname>_<argument_name>.
+
+    """
+
+    def __init__(self, obj):
+        self.obj = obj
+
+    def _key(self, key):
+        try:
+            dialect, value_key = key.split("_", 1)
+        except ValueError as err:
+            raise KeyError(key) from err
+        else:
+            return dialect, value_key
+
+    def __getitem__(self, key):
+        dialect, value_key = self._key(key)
+
+        try:
+            opt = self.obj.dialect_options[dialect]
+        except exc.NoSuchModuleError as err:
+            raise KeyError(key) from err
+        else:
+            return opt[value_key]
+
+    def __setitem__(self, key, value):
+        try:
+            dialect, value_key = self._key(key)
+        except KeyError as err:
+            raise exc.ArgumentError(
+                "Keys must be of the form <dialectname>_<argname>"
+            ) from err
+        else:
+            self.obj.dialect_options[dialect][value_key] = value
+
+    def __delitem__(self, key):
+        dialect, value_key = self._key(key)
+        del self.obj.dialect_options[dialect][value_key]
+
+    def __len__(self):
+        return sum(
+            len(args._non_defaults)
+            for args in self.obj.dialect_options.values()
+        )
+
+    def __iter__(self):
+        return (
+            "%s_%s" % (dialect_name, value_name)
+            for dialect_name in self.obj.dialect_options
+            for value_name in self.obj.dialect_options[
+                dialect_name
+            ]._non_defaults
+        )
+
+
+class _DialectArgDict(MutableMapping[str, Any]):
+    """A dictionary view of dialect-level arguments for a specific
+    dialect.
+
+    Maintains a separate collection of user-specified arguments
+    and dialect-specified default arguments.
+
+    """
+
+    def __init__(self):
+        self._non_defaults = {}
+        self._defaults = {}
+
+    def __len__(self):
+        return len(set(self._non_defaults).union(self._defaults))
+
+    def __iter__(self):
+        return iter(set(self._non_defaults).union(self._defaults))
+
+    def __getitem__(self, key):
+        if key in self._non_defaults:
+            return self._non_defaults[key]
+        else:
+            return self._defaults[key]
+
+    def __setitem__(self, key, value):
+        self._non_defaults[key] = value
+
+    def __delitem__(self, key):
+        del self._non_defaults[key]
+
+
+@util.preload_module("sqlalchemy.dialects")
+def _kw_reg_for_dialect(dialect_name):
+    dialect_cls = util.preloaded.dialects.registry.load(dialect_name)
+    if dialect_cls.construct_arguments is None:
+        return None
+    return dict(dialect_cls.construct_arguments)
+
+
+class DialectKWArgs:
+    """Establish the ability for a class to have dialect-specific arguments
+    with defaults and constructor validation.
+
+    The :class:`.DialectKWArgs` interacts with the
+    :attr:`.DefaultDialect.construct_arguments` present on a dialect.
+
+    .. seealso::
+
+        :attr:`.DefaultDialect.construct_arguments`
+
+    """
+
+    __slots__ = ()
+
+    _dialect_kwargs_traverse_internals = [
+        ("dialect_options", InternalTraversal.dp_dialect_options)
+    ]
+
+    @classmethod
+    def argument_for(cls, dialect_name, argument_name, default):
+        """Add a new kind of dialect-specific keyword argument for this class.
+
+        E.g.::
+
+            Index.argument_for("mydialect", "length", None)
+
+            some_index = Index("a", "b", mydialect_length=5)
+
+        The :meth:`.DialectKWArgs.argument_for` method is a per-argument
+        way adding extra arguments to the
+        :attr:`.DefaultDialect.construct_arguments` dictionary. This
+        dictionary provides a list of argument names accepted by various
+        schema-level constructs on behalf of a dialect.
+
+        New dialects should typically specify this dictionary all at once as a
+        data member of the dialect class.  The use case for ad-hoc addition of
+        argument names is typically for end-user code that is also using
+        a custom compilation scheme which consumes the additional arguments.
+
+        :param dialect_name: name of a dialect.  The dialect must be
+         locatable, else a :class:`.NoSuchModuleError` is raised.   The
+         dialect must also include an existing
+         :attr:`.DefaultDialect.construct_arguments` collection, indicating
+         that it participates in the keyword-argument validation and default
+         system, else :class:`.ArgumentError` is raised.  If the dialect does
+         not include this collection, then any keyword argument can be
+         specified on behalf of this dialect already.  All dialects packaged
+         within SQLAlchemy include this collection, however for third party
+         dialects, support may vary.
+
+        :param argument_name: name of the parameter.
+
+        :param default: default value of the parameter.
+
+        """
+
+        construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
+        if construct_arg_dictionary is None:
+            raise exc.ArgumentError(
+                "Dialect '%s' does have keyword-argument "
+                "validation and defaults enabled configured" % dialect_name
+            )
+        if cls not in construct_arg_dictionary:
+            construct_arg_dictionary[cls] = {}
+        construct_arg_dictionary[cls][argument_name] = default
+
+    @util.memoized_property
+    def dialect_kwargs(self):
+        """A collection of keyword arguments specified as dialect-specific
+        options to this construct.
+
+        The arguments are present here in their original ``<dialect>_<kwarg>``
+        format.  Only arguments that were actually passed are included;
+        unlike the :attr:`.DialectKWArgs.dialect_options` collection, which
+        contains all options known by this dialect including defaults.
+
+        The collection is also writable; keys are accepted of the
+        form ``<dialect>_<kwarg>`` where the value will be assembled
+        into the list of options.
+
+        .. seealso::
+
+            :attr:`.DialectKWArgs.dialect_options` - nested dictionary form
+
+        """
+        return _DialectArgView(self)
+
+    @property
+    def kwargs(self):
+        """A synonym for :attr:`.DialectKWArgs.dialect_kwargs`."""
+        return self.dialect_kwargs
+
+    _kw_registry = util.PopulateDict(_kw_reg_for_dialect)
+
+    def _kw_reg_for_dialect_cls(self, dialect_name):
+        construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
+        d = _DialectArgDict()
+
+        if construct_arg_dictionary is None:
+            d._defaults.update({"*": None})
+        else:
+            for cls in reversed(self.__class__.__mro__):
+                if cls in construct_arg_dictionary:
+                    d._defaults.update(construct_arg_dictionary[cls])
+        return d
+
+    @util.memoized_property
+    def dialect_options(self):
+        """A collection of keyword arguments specified as dialect-specific
+        options to this construct.
+
+        This is a two-level nested registry, keyed to ``<dialect_name>``
+        and ``<argument_name>``.  For example, the ``postgresql_where``
+        argument would be locatable as::
+
+            arg = my_object.dialect_options["postgresql"]["where"]
+
+        .. versionadded:: 0.9.2
+
+        .. seealso::
+
+            :attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form
+
+        """
+
+        return util.PopulateDict(
+            util.portable_instancemethod(self._kw_reg_for_dialect_cls)
+        )
+
+    def _validate_dialect_kwargs(self, kwargs: Dict[str, Any]) -> None:
+        # validate remaining kwargs that they all specify DB prefixes
+
+        if not kwargs:
+            return
+
+        for k in kwargs:
+            m = re.match("^(.+?)_(.+)$", k)
+            if not m:
+                raise TypeError(
+                    "Additional arguments should be "
+                    "named <dialectname>_<argument>, got '%s'" % k
+                )
+            dialect_name, arg_name = m.group(1, 2)
+
+            try:
+                construct_arg_dictionary = self.dialect_options[dialect_name]
+            except exc.NoSuchModuleError:
+                util.warn(
+                    "Can't validate argument %r; can't "
+                    "locate any SQLAlchemy dialect named %r"
+                    % (k, dialect_name)
+                )
+                self.dialect_options[dialect_name] = d = _DialectArgDict()
+                d._defaults.update({"*": None})
+                d._non_defaults[arg_name] = kwargs[k]
+            else:
+                if (
+                    "*" not in construct_arg_dictionary
+                    and arg_name not in construct_arg_dictionary
+                ):
+                    raise exc.ArgumentError(
+                        "Argument %r is not accepted by "
+                        "dialect %r on behalf of %r"
+                        % (k, dialect_name, self.__class__)
+                    )
+                else:
+                    construct_arg_dictionary[arg_name] = kwargs[k]
+
+
+class CompileState:
+    """Produces additional object state necessary for a statement to be
+    compiled.
+
+    the :class:`.CompileState` class is at the base of classes that assemble
+    state for a particular statement object that is then used by the
+    compiler.   This process is essentially an extension of the process that
+    the SQLCompiler.visit_XYZ() method takes, however there is an emphasis
+    on converting raw user intent into more organized structures rather than
+    producing string output.   The top-level :class:`.CompileState` for the
+    statement being executed is also accessible when the execution context
+    works with invoking the statement and collecting results.
+
+    The production of :class:`.CompileState` is specific to the compiler,  such
+    as within the :meth:`.SQLCompiler.visit_insert`,
+    :meth:`.SQLCompiler.visit_select` etc. methods.  These methods are also
+    responsible for associating the :class:`.CompileState` with the
+    :class:`.SQLCompiler` itself, if the statement is the "toplevel" statement,
+    i.e. the outermost SQL statement that's actually being executed.
+    There can be other :class:`.CompileState` objects that are not the
+    toplevel, such as when a SELECT subquery or CTE-nested
+    INSERT/UPDATE/DELETE is generated.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ("statement", "_ambiguous_table_name_map")
+
+    plugins: Dict[Tuple[str, str], Type[CompileState]] = {}
+
+    _ambiguous_table_name_map: Optional[_AmbiguousTableNameMap]
+
+    @classmethod
+    def create_for_statement(
+        cls, statement: Executable, compiler: SQLCompiler, **kw: Any
+    ) -> CompileState:
+        # factory construction.
+
+        if statement._propagate_attrs:
+            plugin_name = statement._propagate_attrs.get(
+                "compile_state_plugin", "default"
+            )
+            klass = cls.plugins.get(
+                (plugin_name, statement._effective_plugin_target), None
+            )
+            if klass is None:
+                klass = cls.plugins[
+                    ("default", statement._effective_plugin_target)
+                ]
+
+        else:
+            klass = cls.plugins[
+                ("default", statement._effective_plugin_target)
+            ]
+
+        if klass is cls:
+            return cls(statement, compiler, **kw)
+        else:
+            return klass.create_for_statement(statement, compiler, **kw)
+
+    def __init__(self, statement, compiler, **kw):
+        self.statement = statement
+
+    @classmethod
+    def get_plugin_class(
+        cls, statement: Executable
+    ) -> Optional[Type[CompileState]]:
+        plugin_name = statement._propagate_attrs.get(
+            "compile_state_plugin", None
+        )
+
+        if plugin_name:
+            key = (plugin_name, statement._effective_plugin_target)
+            if key in cls.plugins:
+                return cls.plugins[key]
+
+        # there's no case where we call upon get_plugin_class() and want
+        # to get None back, there should always be a default.  return that
+        # if there was no plugin-specific class  (e.g. Insert with "orm"
+        # plugin)
+        try:
+            return cls.plugins[("default", statement._effective_plugin_target)]
+        except KeyError:
+            return None
+
+    @classmethod
+    def _get_plugin_class_for_plugin(
+        cls, statement: Executable, plugin_name: str
+    ) -> Optional[Type[CompileState]]:
+        try:
+            return cls.plugins[
+                (plugin_name, statement._effective_plugin_target)
+            ]
+        except KeyError:
+            return None
+
+    @classmethod
+    def plugin_for(
+        cls, plugin_name: str, visit_name: str
+    ) -> Callable[[_Fn], _Fn]:
+        def decorate(cls_to_decorate):
+            cls.plugins[(plugin_name, visit_name)] = cls_to_decorate
+            return cls_to_decorate
+
+        return decorate
+
+
+class Generative(HasMemoized):
+    """Provide a method-chaining pattern in conjunction with the
+    @_generative decorator."""
+
+    def _generate(self) -> Self:
+        skip = self._memoized_keys
+        cls = self.__class__
+        s = cls.__new__(cls)
+        if skip:
+            # ensure this iteration remains atomic
+            s.__dict__ = {
+                k: v for k, v in self.__dict__.copy().items() if k not in skip
+            }
+        else:
+            s.__dict__ = self.__dict__.copy()
+        return s
+
+
+class InPlaceGenerative(HasMemoized):
+    """Provide a method-chaining pattern in conjunction with the
+    @_generative decorator that mutates in place."""
+
+    __slots__ = ()
+
+    def _generate(self):
+        skip = self._memoized_keys
+        # note __dict__ needs to be in __slots__ if this is used
+        for k in skip:
+            self.__dict__.pop(k, None)
+        return self
+
+
+class HasCompileState(Generative):
+    """A class that has a :class:`.CompileState` associated with it."""
+
+    _compile_state_plugin: Optional[Type[CompileState]] = None
+
+    _attributes: util.immutabledict[str, Any] = util.EMPTY_DICT
+
+    _compile_state_factory = CompileState.create_for_statement
+
+
+class _MetaOptions(type):
+    """metaclass for the Options class.
+
+    This metaclass is actually necessary despite the availability of the
+    ``__init_subclass__()`` hook as this type also provides custom class-level
+    behavior for the ``__add__()`` method.
+
+    """
+
+    _cache_attrs: Tuple[str, ...]
+
+    def __add__(self, other):
+        o1 = self()
+
+        if set(other).difference(self._cache_attrs):
+            raise TypeError(
+                "dictionary contains attributes not covered by "
+                "Options class %s: %r"
+                % (self, set(other).difference(self._cache_attrs))
+            )
+
+        o1.__dict__.update(other)
+        return o1
+
+    if TYPE_CHECKING:
+
+        def __getattr__(self, key: str) -> Any: ...
+
+        def __setattr__(self, key: str, value: Any) -> None: ...
+
+        def __delattr__(self, key: str) -> None: ...
+
+
+class Options(metaclass=_MetaOptions):
+    """A cacheable option dictionary with defaults."""
+
+    __slots__ = ()
+
+    _cache_attrs: Tuple[str, ...]
+
+    def __init_subclass__(cls) -> None:
+        dict_ = cls.__dict__
+        cls._cache_attrs = tuple(
+            sorted(
+                d
+                for d in dict_
+                if not d.startswith("__")
+                and d not in ("_cache_key_traversal",)
+            )
+        )
+        super().__init_subclass__()
+
+    def __init__(self, **kw):
+        self.__dict__.update(kw)
+
+    def __add__(self, other):
+        o1 = self.__class__.__new__(self.__class__)
+        o1.__dict__.update(self.__dict__)
+
+        if set(other).difference(self._cache_attrs):
+            raise TypeError(
+                "dictionary contains attributes not covered by "
+                "Options class %s: %r"
+                % (self, set(other).difference(self._cache_attrs))
+            )
+
+        o1.__dict__.update(other)
+        return o1
+
+    def __eq__(self, other):
+        # TODO: very inefficient.  This is used only in test suites
+        # right now.
+        for a, b in zip_longest(self._cache_attrs, other._cache_attrs):
+            if getattr(self, a) != getattr(other, b):
+                return False
+        return True
+
+    def __repr__(self):
+        # TODO: fairly inefficient, used only in debugging right now.
+
+        return "%s(%s)" % (
+            self.__class__.__name__,
+            ", ".join(
+                "%s=%r" % (k, self.__dict__[k])
+                for k in self._cache_attrs
+                if k in self.__dict__
+            ),
+        )
+
+    @classmethod
+    def isinstance(cls, klass: Type[Any]) -> bool:
+        return issubclass(cls, klass)
+
+    @hybridmethod
+    def add_to_element(self, name, value):
+        return self + {name: getattr(self, name) + value}
+
+    @hybridmethod
+    def _state_dict_inst(self) -> Mapping[str, Any]:
+        return self.__dict__
+
+    _state_dict_const: util.immutabledict[str, Any] = util.EMPTY_DICT
+
+    @_state_dict_inst.classlevel
+    def _state_dict(cls) -> Mapping[str, Any]:
+        return cls._state_dict_const
+
+    @classmethod
+    def safe_merge(cls, other):
+        d = other._state_dict()
+
+        # only support a merge with another object of our class
+        # and which does not have attrs that we don't.   otherwise
+        # we risk having state that might not be part of our cache
+        # key strategy
+
+        if (
+            cls is not other.__class__
+            and other._cache_attrs
+            and set(other._cache_attrs).difference(cls._cache_attrs)
+        ):
+            raise TypeError(
+                "other element %r is not empty, is not of type %s, "
+                "and contains attributes not covered here %r"
+                % (
+                    other,
+                    cls,
+                    set(other._cache_attrs).difference(cls._cache_attrs),
+                )
+            )
+        return cls + d
+
+    @classmethod
+    def from_execution_options(
+        cls, key, attrs, exec_options, statement_exec_options
+    ):
+        """process Options argument in terms of execution options.
+
+
+        e.g.::
+
+            (
+                load_options,
+                execution_options,
+            ) = QueryContext.default_load_options.from_execution_options(
+                "_sa_orm_load_options",
+                {"populate_existing", "autoflush", "yield_per"},
+                execution_options,
+                statement._execution_options,
+            )
+
+        get back the Options and refresh "_sa_orm_load_options" in the
+        exec options dict w/ the Options as well
+
+        """
+
+        # common case is that no options we are looking for are
+        # in either dictionary, so cancel for that first
+        check_argnames = attrs.intersection(
+            set(exec_options).union(statement_exec_options)
+        )
+
+        existing_options = exec_options.get(key, cls)
+
+        if check_argnames:
+            result = {}
+            for argname in check_argnames:
+                local = "_" + argname
+                if argname in exec_options:
+                    result[local] = exec_options[argname]
+                elif argname in statement_exec_options:
+                    result[local] = statement_exec_options[argname]
+
+            new_options = existing_options + result
+            exec_options = util.immutabledict().merge_with(
+                exec_options, {key: new_options}
+            )
+            return new_options, exec_options
+
+        else:
+            return existing_options, exec_options
+
+    if TYPE_CHECKING:
+
+        def __getattr__(self, key: str) -> Any: ...
+
+        def __setattr__(self, key: str, value: Any) -> None: ...
+
+        def __delattr__(self, key: str) -> None: ...
+
+
+class CacheableOptions(Options, HasCacheKey):
+    __slots__ = ()
+
+    @hybridmethod
+    def _gen_cache_key_inst(self, anon_map, bindparams):
+        return HasCacheKey._gen_cache_key(self, anon_map, bindparams)
+
+    @_gen_cache_key_inst.classlevel
+    def _gen_cache_key(cls, anon_map, bindparams):
+        return (cls, ())
+
+    @hybridmethod
+    def _generate_cache_key(self):
+        return HasCacheKey._generate_cache_key_for_object(self)
+
+
+class ExecutableOption(HasCopyInternals):
+    __slots__ = ()
+
+    _annotations = util.EMPTY_DICT
+
+    __visit_name__ = "executable_option"
+
+    _is_has_cache_key = False
+
+    _is_core = True
+
+    def _clone(self, **kw):
+        """Create a shallow copy of this ExecutableOption."""
+        c = self.__class__.__new__(self.__class__)
+        c.__dict__ = dict(self.__dict__)  # type: ignore
+        return c
+
+
+class Executable(roles.StatementRole):
+    """Mark a :class:`_expression.ClauseElement` as supporting execution.
+
+    :class:`.Executable` is a superclass for all "statement" types
+    of objects, including :func:`select`, :func:`delete`, :func:`update`,
+    :func:`insert`, :func:`text`.
+
+    """
+
+    supports_execution: bool = True
+    _execution_options: _ImmutableExecuteOptions = util.EMPTY_DICT
+    _is_default_generator = False
+    _with_options: Tuple[ExecutableOption, ...] = ()
+    _with_context_options: Tuple[
+        Tuple[Callable[[CompileState], None], Any], ...
+    ] = ()
+    _compile_options: Optional[Union[Type[CacheableOptions], CacheableOptions]]
+
+    _executable_traverse_internals = [
+        ("_with_options", InternalTraversal.dp_executable_options),
+        (
+            "_with_context_options",
+            ExtendedInternalTraversal.dp_with_context_options,
+        ),
+        ("_propagate_attrs", ExtendedInternalTraversal.dp_propagate_attrs),
+    ]
+
+    is_select = False
+    is_from_statement = False
+    is_update = False
+    is_insert = False
+    is_text = False
+    is_delete = False
+    is_dml = False
+
+    if TYPE_CHECKING:
+        __visit_name__: str
+
+        def _compile_w_cache(
+            self,
+            dialect: Dialect,
+            *,
+            compiled_cache: Optional[CompiledCacheType],
+            column_keys: List[str],
+            for_executemany: bool = False,
+            schema_translate_map: Optional[SchemaTranslateMapType] = None,
+            **kw: Any,
+        ) -> Tuple[
+            Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats
+        ]: ...
+
+        def _execute_on_connection(
+            self,
+            connection: Connection,
+            distilled_params: _CoreMultiExecuteParams,
+            execution_options: CoreExecuteOptionsParameter,
+        ) -> CursorResult[Any]: ...
+
+        def _execute_on_scalar(
+            self,
+            connection: Connection,
+            distilled_params: _CoreMultiExecuteParams,
+            execution_options: CoreExecuteOptionsParameter,
+        ) -> Any: ...
+
+    @util.ro_non_memoized_property
+    def _all_selected_columns(self):
+        raise NotImplementedError()
+
+    @property
+    def _effective_plugin_target(self) -> str:
+        return self.__visit_name__
+
+    @_generative
+    def options(self, *options: ExecutableOption) -> Self:
+        """Apply options to this statement.
+
+        In the general sense, options are any kind of Python object
+        that can be interpreted by the SQL compiler for the statement.
+        These options can be consumed by specific dialects or specific kinds
+        of compilers.
+
+        The most commonly known kind of option are the ORM level options
+        that apply "eager load" and other loading behaviors to an ORM
+        query.   However, options can theoretically be used for many other
+        purposes.
+
+        For background on specific kinds of options for specific kinds of
+        statements, refer to the documentation for those option objects.
+
+        .. versionchanged:: 1.4 - added :meth:`.Executable.options` to
+           Core statement objects towards the goal of allowing unified
+           Core / ORM querying capabilities.
+
+        .. seealso::
+
+            :ref:`loading_columns` - refers to options specific to the usage
+            of ORM queries
+
+            :ref:`relationship_loader_options` - refers to options specific
+            to the usage of ORM queries
+
+        """
+        self._with_options += tuple(
+            coercions.expect(roles.ExecutableOptionRole, opt)
+            for opt in options
+        )
+        return self
+
+    @_generative
+    def _set_compile_options(self, compile_options: CacheableOptions) -> Self:
+        """Assign the compile options to a new value.
+
+        :param compile_options: appropriate CacheableOptions structure
+
+        """
+
+        self._compile_options = compile_options
+        return self
+
+    @_generative
+    def _update_compile_options(self, options: CacheableOptions) -> Self:
+        """update the _compile_options with new keys."""
+
+        assert self._compile_options is not None
+        self._compile_options += options
+        return self
+
+    @_generative
+    def _add_context_option(
+        self,
+        callable_: Callable[[CompileState], None],
+        cache_args: Any,
+    ) -> Self:
+        """Add a context option to this statement.
+
+        These are callable functions that will
+        be given the CompileState object upon compilation.
+
+        A second argument cache_args is required, which will be combined with
+        the ``__code__`` identity of the function itself in order to produce a
+        cache key.
+
+        """
+        self._with_context_options += ((callable_, cache_args),)
+        return self
+
+    @overload
+    def execution_options(
+        self,
+        *,
+        compiled_cache: Optional[CompiledCacheType] = ...,
+        logging_token: str = ...,
+        isolation_level: IsolationLevel = ...,
+        no_parameters: bool = False,
+        stream_results: bool = False,
+        max_row_buffer: int = ...,
+        yield_per: int = ...,
+        insertmanyvalues_page_size: int = ...,
+        schema_translate_map: Optional[SchemaTranslateMapType] = ...,
+        populate_existing: bool = False,
+        autoflush: bool = False,
+        synchronize_session: SynchronizeSessionArgument = ...,
+        dml_strategy: DMLStrategyArgument = ...,
+        render_nulls: bool = ...,
+        is_delete_using: bool = ...,
+        is_update_from: bool = ...,
+        preserve_rowcount: bool = False,
+        **opt: Any,
+    ) -> Self: ...
+
+    @overload
+    def execution_options(self, **opt: Any) -> Self: ...
+
+    @_generative
+    def execution_options(self, **kw: Any) -> Self:
+        """Set non-SQL options for the statement which take effect during
+        execution.
+
+        Execution options can be set at many scopes, including per-statement,
+        per-connection, or per execution, using methods such as
+        :meth:`_engine.Connection.execution_options` and parameters which
+        accept a dictionary of options such as
+        :paramref:`_engine.Connection.execute.execution_options` and
+        :paramref:`_orm.Session.execute.execution_options`.
+
+        The primary characteristic of an execution option, as opposed to
+        other kinds of options such as ORM loader options, is that
+        **execution options never affect the compiled SQL of a query, only
+        things that affect how the SQL statement itself is invoked or how
+        results are fetched**.  That is, execution options are not part of
+        what's accommodated by SQL compilation nor are they considered part of
+        the cached state of a statement.
+
+        The :meth:`_sql.Executable.execution_options` method is
+        :term:`generative`, as
+        is the case for the method as applied to the :class:`_engine.Engine`
+        and :class:`_orm.Query` objects, which means when the method is called,
+        a copy of the object is returned, which applies the given parameters to
+        that new copy, but leaves the original unchanged::
+
+            statement = select(table.c.x, table.c.y)
+            new_statement = statement.execution_options(my_option=True)
+
+        An exception to this behavior is the :class:`_engine.Connection`
+        object, where the :meth:`_engine.Connection.execution_options` method
+        is explicitly **not** generative.
+
+        The kinds of options that may be passed to
+        :meth:`_sql.Executable.execution_options` and other related methods and
+        parameter dictionaries include parameters that are explicitly consumed
+        by SQLAlchemy Core or ORM, as well as arbitrary keyword arguments not
+        defined by SQLAlchemy, which means the methods and/or parameter
+        dictionaries may be used for user-defined parameters that interact with
+        custom code, which may access the parameters using methods such as
+        :meth:`_sql.Executable.get_execution_options` and
+        :meth:`_engine.Connection.get_execution_options`, or within selected
+        event hooks using a dedicated ``execution_options`` event parameter
+        such as
+        :paramref:`_events.ConnectionEvents.before_execute.execution_options`
+        or :attr:`_orm.ORMExecuteState.execution_options`, e.g.::
+
+             from sqlalchemy import event
+
+
+             @event.listens_for(some_engine, "before_execute")
+             def _process_opt(conn, statement, multiparams, params, execution_options):
+                 "run a SQL function before invoking a statement"
+
+                 if execution_options.get("do_special_thing", False):
+                     conn.exec_driver_sql("run_special_function()")
+
+        Within the scope of options that are explicitly recognized by
+        SQLAlchemy, most apply to specific classes of objects and not others.
+        The most common execution options include:
+
+        * :paramref:`_engine.Connection.execution_options.isolation_level` -
+          sets the isolation level for a connection or a class of connections
+          via an :class:`_engine.Engine`.  This option is accepted only
+          by :class:`_engine.Connection` or :class:`_engine.Engine`.
+
+        * :paramref:`_engine.Connection.execution_options.stream_results` -
+          indicates results should be fetched using a server side cursor;
+          this option is accepted by :class:`_engine.Connection`, by the
+          :paramref:`_engine.Connection.execute.execution_options` parameter
+          on :meth:`_engine.Connection.execute`, and additionally by
+          :meth:`_sql.Executable.execution_options` on a SQL statement object,
+          as well as by ORM constructs like :meth:`_orm.Session.execute`.
+
+        * :paramref:`_engine.Connection.execution_options.compiled_cache` -
+          indicates a dictionary that will serve as the
+          :ref:`SQL compilation cache <sql_caching>`
+          for a :class:`_engine.Connection` or :class:`_engine.Engine`, as
+          well as for ORM methods like :meth:`_orm.Session.execute`.
+          Can be passed as ``None`` to disable caching for statements.
+          This option is not accepted by
+          :meth:`_sql.Executable.execution_options` as it is inadvisable to
+          carry along a compilation cache within a statement object.
+
+        * :paramref:`_engine.Connection.execution_options.schema_translate_map`
+          - a mapping of schema names used by the
+          :ref:`Schema Translate Map <schema_translating>` feature, accepted
+          by :class:`_engine.Connection`, :class:`_engine.Engine`,
+          :class:`_sql.Executable`, as well as by ORM constructs
+          like :meth:`_orm.Session.execute`.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.execution_options`
+
+            :paramref:`_engine.Connection.execute.execution_options`
+
+            :paramref:`_orm.Session.execute.execution_options`
+
+            :ref:`orm_queryguide_execution_options` - documentation on all
+            ORM-specific execution options
+
+        """  # noqa: E501
+        if "isolation_level" in kw:
+            raise exc.ArgumentError(
+                "'isolation_level' execution option may only be specified "
+                "on Connection.execution_options(), or "
+                "per-engine using the isolation_level "
+                "argument to create_engine()."
+            )
+        if "compiled_cache" in kw:
+            raise exc.ArgumentError(
+                "'compiled_cache' execution option may only be specified "
+                "on Connection.execution_options(), not per statement."
+            )
+        self._execution_options = self._execution_options.union(kw)
+        return self
+
+    def get_execution_options(self) -> _ExecuteOptions:
+        """Get the non-SQL options which will take effect during execution.
+
+        .. versionadded:: 1.3
+
+        .. seealso::
+
+            :meth:`.Executable.execution_options`
+        """
+        return self._execution_options
+
+
+class SchemaEventTarget(event.EventTarget):
+    """Base class for elements that are the targets of :class:`.DDLEvents`
+    events.
+
+    This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
+
+    """
+
+    dispatch: dispatcher[SchemaEventTarget]
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        """Associate with this SchemaEvent's parent object."""
+
+    def _set_parent_with_dispatch(
+        self, parent: SchemaEventTarget, **kw: Any
+    ) -> None:
+        self.dispatch.before_parent_attach(self, parent)
+        self._set_parent(parent, **kw)
+        self.dispatch.after_parent_attach(self, parent)
+
+
+class SchemaVisitor(ClauseVisitor):
+    """Define the visiting for ``SchemaItem`` objects."""
+
+    __traverse_options__ = {"schema_visitor": True}
+
+
+class _SentinelDefaultCharacterization(Enum):
+    NONE = "none"
+    UNKNOWN = "unknown"
+    CLIENTSIDE = "clientside"
+    SENTINEL_DEFAULT = "sentinel_default"
+    SERVERSIDE = "serverside"
+    IDENTITY = "identity"
+    SEQUENCE = "sequence"
+
+
+class _SentinelColumnCharacterization(NamedTuple):
+    columns: Optional[Sequence[Column[Any]]] = None
+    is_explicit: bool = False
+    is_autoinc: bool = False
+    default_characterization: _SentinelDefaultCharacterization = (
+        _SentinelDefaultCharacterization.NONE
+    )
+
+
+_COLKEY = TypeVar("_COLKEY", Union[None, str], str)
+
+_COL_co = TypeVar("_COL_co", bound="ColumnElement[Any]", covariant=True)
+_COL = TypeVar("_COL", bound="ColumnElement[Any]")
+
+
+class _ColumnMetrics(Generic[_COL_co]):
+    __slots__ = ("column",)
+
+    column: _COL_co
+
+    def __init__(
+        self, collection: ColumnCollection[Any, _COL_co], col: _COL_co
+    ):
+        self.column = col
+
+        # proxy_index being non-empty means it was initialized.
+        # so we need to update it
+        pi = collection._proxy_index
+        if pi:
+            for eps_col in col._expanded_proxy_set:
+                pi[eps_col].add(self)
+
+    def get_expanded_proxy_set(self):
+        return self.column._expanded_proxy_set
+
+    def dispose(self, collection):
+        pi = collection._proxy_index
+        if not pi:
+            return
+        for col in self.column._expanded_proxy_set:
+            colset = pi.get(col, None)
+            if colset:
+                colset.discard(self)
+            if colset is not None and not colset:
+                del pi[col]
+
+    def embedded(
+        self,
+        target_set: Union[
+            Set[ColumnElement[Any]], FrozenSet[ColumnElement[Any]]
+        ],
+    ) -> bool:
+        expanded_proxy_set = self.column._expanded_proxy_set
+        for t in target_set.difference(expanded_proxy_set):
+            if not expanded_proxy_set.intersection(_expand_cloned([t])):
+                return False
+        return True
+
+
+class ColumnCollection(Generic[_COLKEY, _COL_co]):
+    """Collection of :class:`_expression.ColumnElement` instances,
+    typically for
+    :class:`_sql.FromClause` objects.
+
+    The :class:`_sql.ColumnCollection` object is most commonly available
+    as the :attr:`_schema.Table.c` or :attr:`_schema.Table.columns` collection
+    on the :class:`_schema.Table` object, introduced at
+    :ref:`metadata_tables_and_columns`.
+
+    The :class:`_expression.ColumnCollection` has both mapping- and sequence-
+    like behaviors. A :class:`_expression.ColumnCollection` usually stores
+    :class:`_schema.Column` objects, which are then accessible both via mapping
+    style access as well as attribute access style.
+
+    To access :class:`_schema.Column` objects using ordinary attribute-style
+    access, specify the name like any other object attribute, such as below
+    a column named ``employee_name`` is accessed::
+
+        >>> employee_table.c.employee_name
+
+    To access columns that have names with special characters or spaces,
+    index-style access is used, such as below which illustrates a column named
+    ``employee ' payment`` is accessed::
+
+        >>> employee_table.c["employee ' payment"]
+
+    As the :class:`_sql.ColumnCollection` object provides a Python dictionary
+    interface, common dictionary method names like
+    :meth:`_sql.ColumnCollection.keys`, :meth:`_sql.ColumnCollection.values`,
+    and :meth:`_sql.ColumnCollection.items` are available, which means that
+    database columns that are keyed under these names also need to use indexed
+    access::
+
+        >>> employee_table.c["values"]
+
+
+    The name for which a :class:`_schema.Column` would be present is normally
+    that of the :paramref:`_schema.Column.key` parameter.  In some contexts,
+    such as a :class:`_sql.Select` object that uses a label style set
+    using the :meth:`_sql.Select.set_label_style` method, a column of a certain
+    key may instead be represented under a particular label name such
+    as ``tablename_columnname``::
+
+        >>> from sqlalchemy import select, column, table
+        >>> from sqlalchemy import LABEL_STYLE_TABLENAME_PLUS_COL
+        >>> t = table("t", column("c"))
+        >>> stmt = select(t).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+        >>> subq = stmt.subquery()
+        >>> subq.c.t_c
+        <sqlalchemy.sql.elements.ColumnClause at 0x7f59dcf04fa0; t_c>
+
+    :class:`.ColumnCollection` also indexes the columns in order and allows
+    them to be accessible by their integer position::
+
+        >>> cc[0]
+        Column('x', Integer(), table=None)
+        >>> cc[1]
+        Column('y', Integer(), table=None)
+
+    .. versionadded:: 1.4 :class:`_expression.ColumnCollection`
+       allows integer-based
+       index access to the collection.
+
+    Iterating the collection yields the column expressions in order::
+
+        >>> list(cc)
+        [Column('x', Integer(), table=None),
+         Column('y', Integer(), table=None)]
+
+    The base :class:`_expression.ColumnCollection` object can store
+    duplicates, which can
+    mean either two columns with the same key, in which case the column
+    returned by key  access is **arbitrary**::
+
+        >>> x1, x2 = Column("x", Integer), Column("x", Integer)
+        >>> cc = ColumnCollection(columns=[(x1.name, x1), (x2.name, x2)])
+        >>> list(cc)
+        [Column('x', Integer(), table=None),
+         Column('x', Integer(), table=None)]
+        >>> cc["x"] is x1
+        False
+        >>> cc["x"] is x2
+        True
+
+    Or it can also mean the same column multiple times.   These cases are
+    supported as :class:`_expression.ColumnCollection`
+    is used to represent the columns in
+    a SELECT statement which may include duplicates.
+
+    A special subclass :class:`.DedupeColumnCollection` exists which instead
+    maintains SQLAlchemy's older behavior of not allowing duplicates; this
+    collection is used for schema level objects like :class:`_schema.Table`
+    and
+    :class:`.PrimaryKeyConstraint` where this deduping is helpful.  The
+    :class:`.DedupeColumnCollection` class also has additional mutation methods
+    as the schema constructs have more use cases that require removal and
+    replacement of columns.
+
+    .. versionchanged:: 1.4 :class:`_expression.ColumnCollection`
+       now stores duplicate
+       column keys as well as the same column in multiple positions.  The
+       :class:`.DedupeColumnCollection` class is added to maintain the
+       former behavior in those cases where deduplication as well as
+       additional replace/remove operations are needed.
+
+
+    """
+
+    __slots__ = "_collection", "_index", "_colset", "_proxy_index"
+
+    _collection: List[Tuple[_COLKEY, _COL_co, _ColumnMetrics[_COL_co]]]
+    _index: Dict[Union[None, str, int], Tuple[_COLKEY, _COL_co]]
+    _proxy_index: Dict[ColumnElement[Any], Set[_ColumnMetrics[_COL_co]]]
+    _colset: Set[_COL_co]
+
+    def __init__(
+        self, columns: Optional[Iterable[Tuple[_COLKEY, _COL_co]]] = None
+    ):
+        object.__setattr__(self, "_colset", set())
+        object.__setattr__(self, "_index", {})
+        object.__setattr__(
+            self, "_proxy_index", collections.defaultdict(util.OrderedSet)
+        )
+        object.__setattr__(self, "_collection", [])
+        if columns:
+            self._initial_populate(columns)
+
+    @util.preload_module("sqlalchemy.sql.elements")
+    def __clause_element__(self) -> ClauseList:
+        elements = util.preloaded.sql_elements
+
+        return elements.ClauseList(
+            _literal_as_text_role=roles.ColumnsClauseRole,
+            group=False,
+            *self._all_columns,
+        )
+
+    def _initial_populate(
+        self, iter_: Iterable[Tuple[_COLKEY, _COL_co]]
+    ) -> None:
+        self._populate_separate_keys(iter_)
+
+    @property
+    def _all_columns(self) -> List[_COL_co]:
+        return [col for (_, col, _) in self._collection]
+
+    def keys(self) -> List[_COLKEY]:
+        """Return a sequence of string key names for all columns in this
+        collection."""
+        return [k for (k, _, _) in self._collection]
+
+    def values(self) -> List[_COL_co]:
+        """Return a sequence of :class:`_sql.ColumnClause` or
+        :class:`_schema.Column` objects for all columns in this
+        collection."""
+        return [col for (_, col, _) in self._collection]
+
+    def items(self) -> List[Tuple[_COLKEY, _COL_co]]:
+        """Return a sequence of (key, column) tuples for all columns in this
+        collection each consisting of a string key name and a
+        :class:`_sql.ColumnClause` or
+        :class:`_schema.Column` object.
+        """
+
+        return [(k, col) for (k, col, _) in self._collection]
+
+    def __bool__(self) -> bool:
+        return bool(self._collection)
+
+    def __len__(self) -> int:
+        return len(self._collection)
+
+    def __iter__(self) -> Iterator[_COL_co]:
+        # turn to a list first to maintain over a course of changes
+        return iter([col for _, col, _ in self._collection])
+
+    @overload
+    def __getitem__(self, key: Union[str, int]) -> _COL_co: ...
+
+    @overload
+    def __getitem__(
+        self, key: Tuple[Union[str, int], ...]
+    ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ...
+
+    @overload
+    def __getitem__(
+        self, key: slice
+    ) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]: ...
+
+    def __getitem__(
+        self, key: Union[str, int, slice, Tuple[Union[str, int], ...]]
+    ) -> Union[ReadOnlyColumnCollection[_COLKEY, _COL_co], _COL_co]:
+        try:
+            if isinstance(key, (tuple, slice)):
+                if isinstance(key, slice):
+                    cols = (
+                        (sub_key, col)
+                        for (sub_key, col, _) in self._collection[key]
+                    )
+                else:
+                    cols = (self._index[sub_key] for sub_key in key)
+
+                return ColumnCollection(cols).as_readonly()
+            else:
+                return self._index[key][1]
+        except KeyError as err:
+            if isinstance(err.args[0], int):
+                raise IndexError(err.args[0]) from err
+            else:
+                raise
+
+    def __getattr__(self, key: str) -> _COL_co:
+        try:
+            return self._index[key][1]
+        except KeyError as err:
+            raise AttributeError(key) from err
+
+    def __contains__(self, key: str) -> bool:
+        if key not in self._index:
+            if not isinstance(key, str):
+                raise exc.ArgumentError(
+                    "__contains__ requires a string argument"
+                )
+            return False
+        else:
+            return True
+
+    def compare(self, other: ColumnCollection[Any, Any]) -> bool:
+        """Compare this :class:`_expression.ColumnCollection` to another
+        based on the names of the keys"""
+
+        for l, r in zip_longest(self, other):
+            if l is not r:
+                return False
+        else:
+            return True
+
+    def __eq__(self, other: Any) -> bool:
+        return self.compare(other)
+
+    @overload
+    def get(self, key: str, default: None = None) -> Optional[_COL_co]: ...
+
+    @overload
+    def get(self, key: str, default: _COL) -> Union[_COL_co, _COL]: ...
+
+    def get(
+        self, key: str, default: Optional[_COL] = None
+    ) -> Optional[Union[_COL_co, _COL]]:
+        """Get a :class:`_sql.ColumnClause` or :class:`_schema.Column` object
+        based on a string key name from this
+        :class:`_expression.ColumnCollection`."""
+
+        if key in self._index:
+            return self._index[key][1]
+        else:
+            return default
+
+    def __str__(self) -> str:
+        return "%s(%s)" % (
+            self.__class__.__name__,
+            ", ".join(str(c) for c in self),
+        )
+
+    def __setitem__(self, key: str, value: Any) -> NoReturn:
+        raise NotImplementedError()
+
+    def __delitem__(self, key: str) -> NoReturn:
+        raise NotImplementedError()
+
+    def __setattr__(self, key: str, obj: Any) -> NoReturn:
+        raise NotImplementedError()
+
+    def clear(self) -> NoReturn:
+        """Dictionary clear() is not implemented for
+        :class:`_sql.ColumnCollection`."""
+        raise NotImplementedError()
+
+    def remove(self, column: Any) -> None:
+        raise NotImplementedError()
+
+    def update(self, iter_: Any) -> NoReturn:
+        """Dictionary update() is not implemented for
+        :class:`_sql.ColumnCollection`."""
+        raise NotImplementedError()
+
+    # https://github.com/python/mypy/issues/4266
+    __hash__ = None  # type: ignore
+
+    def _populate_separate_keys(
+        self, iter_: Iterable[Tuple[_COLKEY, _COL_co]]
+    ) -> None:
+        """populate from an iterator of (key, column)"""
+
+        self._collection[:] = collection = [
+            (k, c, _ColumnMetrics(self, c)) for k, c in iter_
+        ]
+        self._colset.update(c._deannotate() for _, c, _ in collection)
+        self._index.update(
+            {idx: (k, c) for idx, (k, c, _) in enumerate(collection)}
+        )
+        self._index.update({k: (k, col) for k, col, _ in reversed(collection)})
+
+    def add(
+        self, column: ColumnElement[Any], key: Optional[_COLKEY] = None
+    ) -> None:
+        """Add a column to this :class:`_sql.ColumnCollection`.
+
+        .. note::
+
+            This method is **not normally used by user-facing code**, as the
+            :class:`_sql.ColumnCollection` is usually part of an existing
+            object such as a :class:`_schema.Table`. To add a
+            :class:`_schema.Column` to an existing :class:`_schema.Table`
+            object, use the :meth:`_schema.Table.append_column` method.
+
+        """
+        colkey: _COLKEY
+
+        if key is None:
+            colkey = column.key  # type: ignore
+        else:
+            colkey = key
+
+        l = len(self._collection)
+
+        # don't really know how this part is supposed to work w/ the
+        # covariant thing
+
+        _column = cast(_COL_co, column)
+
+        self._collection.append(
+            (colkey, _column, _ColumnMetrics(self, _column))
+        )
+        self._colset.add(_column._deannotate())
+        self._index[l] = (colkey, _column)
+        if colkey not in self._index:
+            self._index[colkey] = (colkey, _column)
+
+    def __getstate__(self) -> Dict[str, Any]:
+        return {
+            "_collection": [(k, c) for k, c, _ in self._collection],
+            "_index": self._index,
+        }
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        object.__setattr__(self, "_index", state["_index"])
+        object.__setattr__(
+            self, "_proxy_index", collections.defaultdict(util.OrderedSet)
+        )
+        object.__setattr__(
+            self,
+            "_collection",
+            [
+                (k, c, _ColumnMetrics(self, c))
+                for (k, c) in state["_collection"]
+            ],
+        )
+        object.__setattr__(
+            self, "_colset", {col for k, col, _ in self._collection}
+        )
+
+    def contains_column(self, col: ColumnElement[Any]) -> bool:
+        """Checks if a column object exists in this collection"""
+        if col not in self._colset:
+            if isinstance(col, str):
+                raise exc.ArgumentError(
+                    "contains_column cannot be used with string arguments. "
+                    "Use ``col_name in table.c`` instead."
+                )
+            return False
+        else:
+            return True
+
+    def as_readonly(self) -> ReadOnlyColumnCollection[_COLKEY, _COL_co]:
+        """Return a "read only" form of this
+        :class:`_sql.ColumnCollection`."""
+
+        return ReadOnlyColumnCollection(self)
+
+    def _init_proxy_index(self):
+        """populate the "proxy index", if empty.
+
+        proxy index is added in 2.0 to provide more efficient operation
+        for the corresponding_column() method.
+
+        For reasons of both time to construct new .c collections as well as
+        memory conservation for large numbers of large .c collections, the
+        proxy_index is only filled if corresponding_column() is called. once
+        filled it stays that way, and new _ColumnMetrics objects created after
+        that point will populate it with new data. Note this case would be
+        unusual, if not nonexistent, as it means a .c collection is being
+        mutated after corresponding_column() were used, however it is tested in
+        test/base/test_utils.py.
+
+        """
+        pi = self._proxy_index
+        if pi:
+            return
+
+        for _, _, metrics in self._collection:
+            eps = metrics.column._expanded_proxy_set
+
+            for eps_col in eps:
+                pi[eps_col].add(metrics)
+
+    def corresponding_column(
+        self, column: _COL, require_embedded: bool = False
+    ) -> Optional[Union[_COL, _COL_co]]:
+        """Given a :class:`_expression.ColumnElement`, return the exported
+        :class:`_expression.ColumnElement` object from this
+        :class:`_expression.ColumnCollection`
+        which corresponds to that original :class:`_expression.ColumnElement`
+        via a common
+        ancestor column.
+
+        :param column: the target :class:`_expression.ColumnElement`
+                      to be matched.
+
+        :param require_embedded: only return corresponding columns for
+         the given :class:`_expression.ColumnElement`, if the given
+         :class:`_expression.ColumnElement`
+         is actually present within a sub-element
+         of this :class:`_expression.Selectable`.
+         Normally the column will match if
+         it merely shares a common ancestor with one of the exported
+         columns of this :class:`_expression.Selectable`.
+
+        .. seealso::
+
+            :meth:`_expression.Selectable.corresponding_column`
+            - invokes this method
+            against the collection returned by
+            :attr:`_expression.Selectable.exported_columns`.
+
+        .. versionchanged:: 1.4 the implementation for ``corresponding_column``
+           was moved onto the :class:`_expression.ColumnCollection` itself.
+
+        """
+        # TODO: cython candidate
+
+        # don't dig around if the column is locally present
+        if column in self._colset:
+            return column
+
+        selected_intersection, selected_metrics = None, None
+        target_set = column.proxy_set
+
+        pi = self._proxy_index
+        if not pi:
+            self._init_proxy_index()
+
+        for current_metrics in (
+            mm for ts in target_set if ts in pi for mm in pi[ts]
+        ):
+            if not require_embedded or current_metrics.embedded(target_set):
+                if selected_metrics is None:
+                    # no corresponding column yet, pick this one.
+                    selected_metrics = current_metrics
+                    continue
+
+                current_intersection = target_set.intersection(
+                    current_metrics.column._expanded_proxy_set
+                )
+                if selected_intersection is None:
+                    selected_intersection = target_set.intersection(
+                        selected_metrics.column._expanded_proxy_set
+                    )
+
+                if len(current_intersection) > len(selected_intersection):
+                    # 'current' has a larger field of correspondence than
+                    # 'selected'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
+                    # matches a1.c.x->table.c.x better than
+                    # selectable.c.x->table.c.x does.
+
+                    selected_metrics = current_metrics
+                    selected_intersection = current_intersection
+                elif current_intersection == selected_intersection:
+                    # they have the same field of correspondence. see
+                    # which proxy_set has fewer columns in it, which
+                    # indicates a closer relationship with the root
+                    # column. Also take into account the "weight"
+                    # attribute which CompoundSelect() uses to give
+                    # higher precedence to columns based on vertical
+                    # position in the compound statement, and discard
+                    # columns that have no reference to the target
+                    # column (also occurs with CompoundSelect)
+
+                    selected_col_distance = sum(
+                        [
+                            sc._annotations.get("weight", 1)
+                            for sc in (
+                                selected_metrics.column._uncached_proxy_list()
+                            )
+                            if sc.shares_lineage(column)
+                        ],
+                    )
+                    current_col_distance = sum(
+                        [
+                            sc._annotations.get("weight", 1)
+                            for sc in (
+                                current_metrics.column._uncached_proxy_list()
+                            )
+                            if sc.shares_lineage(column)
+                        ],
+                    )
+                    if current_col_distance < selected_col_distance:
+                        selected_metrics = current_metrics
+                        selected_intersection = current_intersection
+
+        return selected_metrics.column if selected_metrics else None
+
+
+_NAMEDCOL = TypeVar("_NAMEDCOL", bound="NamedColumn[Any]")
+
+
+class DedupeColumnCollection(ColumnCollection[str, _NAMEDCOL]):
+    """A :class:`_expression.ColumnCollection`
+    that maintains deduplicating behavior.
+
+    This is useful by schema level objects such as :class:`_schema.Table` and
+    :class:`.PrimaryKeyConstraint`.    The collection includes more
+    sophisticated mutator methods as well to suit schema objects which
+    require mutable column collections.
+
+    .. versionadded:: 1.4
+
+    """
+
+    def add(  # type: ignore[override]
+        self, column: _NAMEDCOL, key: Optional[str] = None
+    ) -> None:
+        if key is not None and column.key != key:
+            raise exc.ArgumentError(
+                "DedupeColumnCollection requires columns be under "
+                "the same key as their .key"
+            )
+        key = column.key
+
+        if key is None:
+            raise exc.ArgumentError(
+                "Can't add unnamed column to column collection"
+            )
+
+        if key in self._index:
+            existing = self._index[key][1]
+
+            if existing is column:
+                return
+
+            self.replace(column)
+
+            # pop out memoized proxy_set as this
+            # operation may very well be occurring
+            # in a _make_proxy operation
+            util.memoized_property.reset(column, "proxy_set")
+        else:
+            self._append_new_column(key, column)
+
+    def _append_new_column(self, key: str, named_column: _NAMEDCOL) -> None:
+        l = len(self._collection)
+        self._collection.append(
+            (key, named_column, _ColumnMetrics(self, named_column))
+        )
+        self._colset.add(named_column._deannotate())
+        self._index[l] = (key, named_column)
+        self._index[key] = (key, named_column)
+
+    def _populate_separate_keys(
+        self, iter_: Iterable[Tuple[str, _NAMEDCOL]]
+    ) -> None:
+        """populate from an iterator of (key, column)"""
+        cols = list(iter_)
+
+        replace_col = []
+        for k, col in cols:
+            if col.key != k:
+                raise exc.ArgumentError(
+                    "DedupeColumnCollection requires columns be under "
+                    "the same key as their .key"
+                )
+            if col.name in self._index and col.key != col.name:
+                replace_col.append(col)
+            elif col.key in self._index:
+                replace_col.append(col)
+            else:
+                self._index[k] = (k, col)
+                self._collection.append((k, col, _ColumnMetrics(self, col)))
+        self._colset.update(c._deannotate() for (k, c, _) in self._collection)
+
+        self._index.update(
+            (idx, (k, c)) for idx, (k, c, _) in enumerate(self._collection)
+        )
+        for col in replace_col:
+            self.replace(col)
+
+    def extend(self, iter_: Iterable[_NAMEDCOL]) -> None:
+        self._populate_separate_keys((col.key, col) for col in iter_)
+
+    def remove(self, column: _NAMEDCOL) -> None:
+        if column not in self._colset:
+            raise ValueError(
+                "Can't remove column %r; column is not in this collection"
+                % column
+            )
+        del self._index[column.key]
+        self._colset.remove(column)
+        self._collection[:] = [
+            (k, c, metrics)
+            for (k, c, metrics) in self._collection
+            if c is not column
+        ]
+        for metrics in self._proxy_index.get(column, ()):
+            metrics.dispose(self)
+
+        self._index.update(
+            {idx: (k, col) for idx, (k, col, _) in enumerate(self._collection)}
+        )
+        # delete higher index
+        del self._index[len(self._collection)]
+
+    def replace(
+        self,
+        column: _NAMEDCOL,
+        extra_remove: Optional[Iterable[_NAMEDCOL]] = None,
+    ) -> None:
+        """add the given column to this collection, removing unaliased
+        versions of this column  as well as existing columns with the
+        same key.
+
+        e.g.::
+
+            t = Table("sometable", metadata, Column("col1", Integer))
+            t.columns.replace(Column("col1", Integer, key="columnone"))
+
+        will remove the original 'col1' from the collection, and add
+        the new column under the name 'columnname'.
+
+        Used by schema.Column to override columns during table reflection.
+
+        """
+
+        if extra_remove:
+            remove_col = set(extra_remove)
+        else:
+            remove_col = set()
+        # remove up to two columns based on matches of name as well as key
+        if column.name in self._index and column.key != column.name:
+            other = self._index[column.name][1]
+            if other.name == other.key:
+                remove_col.add(other)
+
+        if column.key in self._index:
+            remove_col.add(self._index[column.key][1])
+
+        if not remove_col:
+            self._append_new_column(column.key, column)
+            return
+        new_cols: List[Tuple[str, _NAMEDCOL, _ColumnMetrics[_NAMEDCOL]]] = []
+        replaced = False
+        for k, col, metrics in self._collection:
+            if col in remove_col:
+                if not replaced:
+                    replaced = True
+                    new_cols.append(
+                        (column.key, column, _ColumnMetrics(self, column))
+                    )
+            else:
+                new_cols.append((k, col, metrics))
+
+        if remove_col:
+            self._colset.difference_update(remove_col)
+
+            for rc in remove_col:
+                for metrics in self._proxy_index.get(rc, ()):
+                    metrics.dispose(self)
+
+        if not replaced:
+            new_cols.append((column.key, column, _ColumnMetrics(self, column)))
+
+        self._colset.add(column._deannotate())
+        self._collection[:] = new_cols
+
+        self._index.clear()
+
+        self._index.update(
+            {idx: (k, col) for idx, (k, col, _) in enumerate(self._collection)}
+        )
+        self._index.update({k: (k, col) for (k, col, _) in self._collection})
+
+
+class ReadOnlyColumnCollection(
+    util.ReadOnlyContainer, ColumnCollection[_COLKEY, _COL_co]
+):
+    __slots__ = ("_parent",)
+
+    def __init__(self, collection):
+        object.__setattr__(self, "_parent", collection)
+        object.__setattr__(self, "_colset", collection._colset)
+        object.__setattr__(self, "_index", collection._index)
+        object.__setattr__(self, "_collection", collection._collection)
+        object.__setattr__(self, "_proxy_index", collection._proxy_index)
+
+    def __getstate__(self):
+        return {"_parent": self._parent}
+
+    def __setstate__(self, state):
+        parent = state["_parent"]
+        self.__init__(parent)  # type: ignore
+
+    def add(self, column: Any, key: Any = ...) -> Any:
+        self._readonly()
+
+    def extend(self, elements: Any) -> NoReturn:
+        self._readonly()
+
+    def remove(self, item: Any) -> NoReturn:
+        self._readonly()
+
+
+class ColumnSet(util.OrderedSet["ColumnClause[Any]"]):
+    def contains_column(self, col):
+        return col in self
+
+    def extend(self, cols):
+        for col in cols:
+            self.add(col)
+
+    def __eq__(self, other):
+        l = []
+        for c in other:
+            for local in self:
+                if c.shares_lineage(local):
+                    l.append(c == local)
+        return elements.and_(*l)
+
+    def __hash__(self):  # type: ignore[override]
+        return hash(tuple(x for x in self))
+
+
+def _entity_namespace(
+    entity: Union[_HasEntityNamespace, ExternallyTraversible]
+) -> _EntityNamespace:
+    """Return the nearest .entity_namespace for the given entity.
+
+    If not immediately available, does an iterate to find a sub-element
+    that has one, if any.
+
+    """
+    try:
+        return cast(_HasEntityNamespace, entity).entity_namespace
+    except AttributeError:
+        for elem in visitors.iterate(cast(ExternallyTraversible, entity)):
+            if _is_has_entity_namespace(elem):
+                return elem.entity_namespace
+        else:
+            raise
+
+
+def _entity_namespace_key(
+    entity: Union[_HasEntityNamespace, ExternallyTraversible],
+    key: str,
+    default: Union[SQLCoreOperations[Any], _NoArg] = NO_ARG,
+) -> SQLCoreOperations[Any]:
+    """Return an entry from an entity_namespace.
+
+
+    Raises :class:`_exc.InvalidRequestError` rather than attribute error
+    on not found.
+
+    """
+
+    try:
+        ns = _entity_namespace(entity)
+        if default is not NO_ARG:
+            return getattr(ns, key, default)
+        else:
+            return getattr(ns, key)  # type: ignore
+    except AttributeError as err:
+        raise exc.InvalidRequestError(
+            'Entity namespace for "%s" has no property "%s"' % (entity, key)
+        ) from err
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py
new file mode 100644
index 00000000..1f562f2e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/cache_key.py
@@ -0,0 +1,1057 @@
+# sql/cache_key.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import enum
+from itertools import zip_longest
+import typing
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import MutableMapping
+from typing import NamedTuple
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+from .visitors import anon_map
+from .visitors import HasTraversalDispatch
+from .visitors import HasTraverseInternals
+from .visitors import InternalTraversal
+from .visitors import prefix_anon_map
+from .. import util
+from ..inspection import inspect
+from ..util import HasMemoized
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if typing.TYPE_CHECKING:
+    from .elements import BindParameter
+    from .elements import ClauseElement
+    from .elements import ColumnElement
+    from .visitors import _TraverseInternalsType
+    from ..engine.interfaces import _CoreSingleExecuteParams
+
+
+class _CacheKeyTraversalDispatchType(Protocol):
+    def __call__(
+        s, self: HasCacheKey, visitor: _CacheKeyTraversal
+    ) -> _CacheKeyTraversalDispatchTypeReturn: ...
+
+
+class CacheConst(enum.Enum):
+    NO_CACHE = 0
+
+
+NO_CACHE = CacheConst.NO_CACHE
+
+
+_CacheKeyTraversalType = Union[
+    "_TraverseInternalsType", Literal[CacheConst.NO_CACHE], Literal[None]
+]
+
+
+class CacheTraverseTarget(enum.Enum):
+    CACHE_IN_PLACE = 0
+    CALL_GEN_CACHE_KEY = 1
+    STATIC_CACHE_KEY = 2
+    PROPAGATE_ATTRS = 3
+    ANON_NAME = 4
+
+
+(
+    CACHE_IN_PLACE,
+    CALL_GEN_CACHE_KEY,
+    STATIC_CACHE_KEY,
+    PROPAGATE_ATTRS,
+    ANON_NAME,
+) = tuple(CacheTraverseTarget)
+
+_CacheKeyTraversalDispatchTypeReturn = Sequence[
+    Tuple[
+        str,
+        Any,
+        Union[
+            Callable[..., Tuple[Any, ...]],
+            CacheTraverseTarget,
+            InternalTraversal,
+        ],
+    ]
+]
+
+
+class HasCacheKey:
+    """Mixin for objects which can produce a cache key.
+
+    This class is usually in a hierarchy that starts with the
+    :class:`.HasTraverseInternals` base, but this is optional.  Currently,
+    the class should be able to work on its own without including
+    :class:`.HasTraverseInternals`.
+
+    .. seealso::
+
+        :class:`.CacheKey`
+
+        :ref:`sql_caching`
+
+    """
+
+    __slots__ = ()
+
+    _cache_key_traversal: _CacheKeyTraversalType = NO_CACHE
+
+    _is_has_cache_key = True
+
+    _hierarchy_supports_caching = True
+    """private attribute which may be set to False to prevent the
+    inherit_cache warning from being emitted for a hierarchy of subclasses.
+
+    Currently applies to the :class:`.ExecutableDDLElement` hierarchy which
+    does not implement caching.
+
+    """
+
+    inherit_cache: Optional[bool] = None
+    """Indicate if this :class:`.HasCacheKey` instance should make use of the
+    cache key generation scheme used by its immediate superclass.
+
+    The attribute defaults to ``None``, which indicates that a construct has
+    not yet taken into account whether or not its appropriate for it to
+    participate in caching; this is functionally equivalent to setting the
+    value to ``False``, except that a warning is also emitted.
+
+    This flag can be set to ``True`` on a particular class, if the SQL that
+    corresponds to the object does not change based on attributes which
+    are local to this class, and not its superclass.
+
+    .. seealso::
+
+        :ref:`compilerext_caching` - General guideslines for setting the
+        :attr:`.HasCacheKey.inherit_cache` attribute for third-party or user
+        defined SQL constructs.
+
+    """
+
+    __slots__ = ()
+
+    _generated_cache_key_traversal: Any
+
+    @classmethod
+    def _generate_cache_attrs(
+        cls,
+    ) -> Union[_CacheKeyTraversalDispatchType, Literal[CacheConst.NO_CACHE]]:
+        """generate cache key dispatcher for a new class.
+
+        This sets the _generated_cache_key_traversal attribute once called
+        so should only be called once per class.
+
+        """
+        inherit_cache = cls.__dict__.get("inherit_cache", None)
+        inherit = bool(inherit_cache)
+
+        if inherit:
+            _cache_key_traversal = getattr(cls, "_cache_key_traversal", None)
+            if _cache_key_traversal is None:
+                try:
+                    assert issubclass(cls, HasTraverseInternals)
+                    _cache_key_traversal = cls._traverse_internals
+                except AttributeError:
+                    cls._generated_cache_key_traversal = NO_CACHE
+                    return NO_CACHE
+
+            assert _cache_key_traversal is not NO_CACHE, (
+                f"class {cls} has _cache_key_traversal=NO_CACHE, "
+                "which conflicts with inherit_cache=True"
+            )
+
+            # TODO: wouldn't we instead get this from our superclass?
+            # also, our superclass may not have this yet, but in any case,
+            # we'd generate for the superclass that has it.   this is a little
+            # more complicated, so for the moment this is a little less
+            # efficient on startup but simpler.
+            return _cache_key_traversal_visitor.generate_dispatch(
+                cls,
+                _cache_key_traversal,
+                "_generated_cache_key_traversal",
+            )
+        else:
+            _cache_key_traversal = cls.__dict__.get(
+                "_cache_key_traversal", None
+            )
+            if _cache_key_traversal is None:
+                _cache_key_traversal = cls.__dict__.get(
+                    "_traverse_internals", None
+                )
+                if _cache_key_traversal is None:
+                    cls._generated_cache_key_traversal = NO_CACHE
+                    if (
+                        inherit_cache is None
+                        and cls._hierarchy_supports_caching
+                    ):
+                        util.warn(
+                            "Class %s will not make use of SQL compilation "
+                            "caching as it does not set the 'inherit_cache' "
+                            "attribute to ``True``.  This can have "
+                            "significant performance implications including "
+                            "some performance degradations in comparison to "
+                            "prior SQLAlchemy versions.  Set this attribute "
+                            "to True if this object can make use of the cache "
+                            "key generated by the superclass.  Alternatively, "
+                            "this attribute may be set to False which will "
+                            "disable this warning." % (cls.__name__),
+                            code="cprf",
+                        )
+                    return NO_CACHE
+
+            return _cache_key_traversal_visitor.generate_dispatch(
+                cls,
+                _cache_key_traversal,
+                "_generated_cache_key_traversal",
+            )
+
+    @util.preload_module("sqlalchemy.sql.elements")
+    def _gen_cache_key(
+        self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
+    ) -> Optional[Tuple[Any, ...]]:
+        """return an optional cache key.
+
+        The cache key is a tuple which can contain any series of
+        objects that are hashable and also identifies
+        this object uniquely within the presence of a larger SQL expression
+        or statement, for the purposes of caching the resulting query.
+
+        The cache key should be based on the SQL compiled structure that would
+        ultimately be produced.   That is, two structures that are composed in
+        exactly the same way should produce the same cache key; any difference
+        in the structures that would affect the SQL string or the type handlers
+        should result in a different cache key.
+
+        If a structure cannot produce a useful cache key, the NO_CACHE
+        symbol should be added to the anon_map and the method should
+        return None.
+
+        """
+
+        cls = self.__class__
+
+        id_, found = anon_map.get_anon(self)
+        if found:
+            return (id_, cls)
+
+        dispatcher: Union[
+            Literal[CacheConst.NO_CACHE],
+            _CacheKeyTraversalDispatchType,
+        ]
+
+        try:
+            dispatcher = cls.__dict__["_generated_cache_key_traversal"]
+        except KeyError:
+            # traversals.py -> _preconfigure_traversals()
+            # may be used to run these ahead of time, but
+            # is not enabled right now.
+            # this block will generate any remaining dispatchers.
+            dispatcher = cls._generate_cache_attrs()
+
+        if dispatcher is NO_CACHE:
+            anon_map[NO_CACHE] = True
+            return None
+
+        result: Tuple[Any, ...] = (id_, cls)
+
+        # inline of _cache_key_traversal_visitor.run_generated_dispatch()
+
+        for attrname, obj, meth in dispatcher(
+            self, _cache_key_traversal_visitor
+        ):
+            if obj is not None:
+                # TODO: see if C code can help here as Python lacks an
+                # efficient switch construct
+
+                if meth is STATIC_CACHE_KEY:
+                    sck = obj._static_cache_key
+                    if sck is NO_CACHE:
+                        anon_map[NO_CACHE] = True
+                        return None
+                    result += (attrname, sck)
+                elif meth is ANON_NAME:
+                    elements = util.preloaded.sql_elements
+                    if isinstance(obj, elements._anonymous_label):
+                        obj = obj.apply_map(anon_map)  # type: ignore
+                    result += (attrname, obj)
+                elif meth is CALL_GEN_CACHE_KEY:
+                    result += (
+                        attrname,
+                        obj._gen_cache_key(anon_map, bindparams),
+                    )
+
+                # remaining cache functions are against
+                # Python tuples, dicts, lists, etc. so we can skip
+                # if they are empty
+                elif obj:
+                    if meth is CACHE_IN_PLACE:
+                        result += (attrname, obj)
+                    elif meth is PROPAGATE_ATTRS:
+                        result += (
+                            attrname,
+                            obj["compile_state_plugin"],
+                            (
+                                obj["plugin_subject"]._gen_cache_key(
+                                    anon_map, bindparams
+                                )
+                                if obj["plugin_subject"]
+                                else None
+                            ),
+                        )
+                    elif meth is InternalTraversal.dp_annotations_key:
+                        # obj is here is the _annotations dict.  Table uses
+                        # a memoized version of it.  however in other cases,
+                        # we generate it given anon_map as we may be from a
+                        # Join, Aliased, etc.
+                        # see #8790
+
+                        if self._gen_static_annotations_cache_key:  # type: ignore  # noqa: E501
+                            result += self._annotations_cache_key  # type: ignore  # noqa: E501
+                        else:
+                            result += self._gen_annotations_cache_key(anon_map)  # type: ignore  # noqa: E501
+
+                    elif (
+                        meth is InternalTraversal.dp_clauseelement_list
+                        or meth is InternalTraversal.dp_clauseelement_tuple
+                        or meth
+                        is InternalTraversal.dp_memoized_select_entities
+                    ):
+                        result += (
+                            attrname,
+                            tuple(
+                                [
+                                    elem._gen_cache_key(anon_map, bindparams)
+                                    for elem in obj
+                                ]
+                            ),
+                        )
+                    else:
+                        result += meth(  # type: ignore
+                            attrname, obj, self, anon_map, bindparams
+                        )
+        return result
+
+    def _generate_cache_key(self) -> Optional[CacheKey]:
+        """return a cache key.
+
+        The cache key is a tuple which can contain any series of
+        objects that are hashable and also identifies
+        this object uniquely within the presence of a larger SQL expression
+        or statement, for the purposes of caching the resulting query.
+
+        The cache key should be based on the SQL compiled structure that would
+        ultimately be produced.   That is, two structures that are composed in
+        exactly the same way should produce the same cache key; any difference
+        in the structures that would affect the SQL string or the type handlers
+        should result in a different cache key.
+
+        The cache key returned by this method is an instance of
+        :class:`.CacheKey`, which consists of a tuple representing the
+        cache key, as well as a list of :class:`.BindParameter` objects
+        which are extracted from the expression.   While two expressions
+        that produce identical cache key tuples will themselves generate
+        identical SQL strings, the list of :class:`.BindParameter` objects
+        indicates the bound values which may have different values in
+        each one; these bound parameters must be consulted in order to
+        execute the statement with the correct parameters.
+
+        a :class:`_expression.ClauseElement` structure that does not implement
+        a :meth:`._gen_cache_key` method and does not implement a
+        :attr:`.traverse_internals` attribute will not be cacheable; when
+        such an element is embedded into a larger structure, this method
+        will return None, indicating no cache key is available.
+
+        """
+
+        bindparams: List[BindParameter[Any]] = []
+
+        _anon_map = anon_map()
+        key = self._gen_cache_key(_anon_map, bindparams)
+        if NO_CACHE in _anon_map:
+            return None
+        else:
+            assert key is not None
+            return CacheKey(key, bindparams)
+
+    @classmethod
+    def _generate_cache_key_for_object(
+        cls, obj: HasCacheKey
+    ) -> Optional[CacheKey]:
+        bindparams: List[BindParameter[Any]] = []
+
+        _anon_map = anon_map()
+        key = obj._gen_cache_key(_anon_map, bindparams)
+        if NO_CACHE in _anon_map:
+            return None
+        else:
+            assert key is not None
+            return CacheKey(key, bindparams)
+
+
+class HasCacheKeyTraverse(HasTraverseInternals, HasCacheKey):
+    pass
+
+
+class MemoizedHasCacheKey(HasCacheKey, HasMemoized):
+    __slots__ = ()
+
+    @HasMemoized.memoized_instancemethod
+    def _generate_cache_key(self) -> Optional[CacheKey]:
+        return HasCacheKey._generate_cache_key(self)
+
+
+class SlotsMemoizedHasCacheKey(HasCacheKey, util.MemoizedSlots):
+    __slots__ = ()
+
+    def _memoized_method__generate_cache_key(self) -> Optional[CacheKey]:
+        return HasCacheKey._generate_cache_key(self)
+
+
+class CacheKey(NamedTuple):
+    """The key used to identify a SQL statement construct in the
+    SQL compilation cache.
+
+    .. seealso::
+
+        :ref:`sql_caching`
+
+    """
+
+    key: Tuple[Any, ...]
+    bindparams: Sequence[BindParameter[Any]]
+
+    # can't set __hash__ attribute because it interferes
+    # with namedtuple
+    # can't use "if not TYPE_CHECKING" because mypy rejects it
+    # inside of a NamedTuple
+    def __hash__(self) -> Optional[int]:  # type: ignore
+        """CacheKey itself is not hashable - hash the .key portion"""
+        return None
+
+    def to_offline_string(
+        self,
+        statement_cache: MutableMapping[Any, str],
+        statement: ClauseElement,
+        parameters: _CoreSingleExecuteParams,
+    ) -> str:
+        """Generate an "offline string" form of this :class:`.CacheKey`
+
+        The "offline string" is basically the string SQL for the
+        statement plus a repr of the bound parameter values in series.
+        Whereas the :class:`.CacheKey` object is dependent on in-memory
+        identities in order to work as a cache key, the "offline" version
+        is suitable for a cache that will work for other processes as well.
+
+        The given ``statement_cache`` is a dictionary-like object where the
+        string form of the statement itself will be cached.  This dictionary
+        should be in a longer lived scope in order to reduce the time spent
+        stringifying statements.
+
+
+        """
+        if self.key not in statement_cache:
+            statement_cache[self.key] = sql_str = str(statement)
+        else:
+            sql_str = statement_cache[self.key]
+
+        if not self.bindparams:
+            param_tuple = tuple(parameters[key] for key in sorted(parameters))
+        else:
+            param_tuple = tuple(
+                parameters.get(bindparam.key, bindparam.value)
+                for bindparam in self.bindparams
+            )
+
+        return repr((sql_str, param_tuple))
+
+    def __eq__(self, other: Any) -> bool:
+        return bool(self.key == other.key)
+
+    def __ne__(self, other: Any) -> bool:
+        return not (self.key == other.key)
+
+    @classmethod
+    def _diff_tuples(cls, left: CacheKey, right: CacheKey) -> str:
+        ck1 = CacheKey(left, [])
+        ck2 = CacheKey(right, [])
+        return ck1._diff(ck2)
+
+    def _whats_different(self, other: CacheKey) -> Iterator[str]:
+        k1 = self.key
+        k2 = other.key
+
+        stack: List[int] = []
+        pickup_index = 0
+        while True:
+            s1, s2 = k1, k2
+            for idx in stack:
+                s1 = s1[idx]
+                s2 = s2[idx]
+
+            for idx, (e1, e2) in enumerate(zip_longest(s1, s2)):
+                if idx < pickup_index:
+                    continue
+                if e1 != e2:
+                    if isinstance(e1, tuple) and isinstance(e2, tuple):
+                        stack.append(idx)
+                        break
+                    else:
+                        yield "key%s[%d]:  %s != %s" % (
+                            "".join("[%d]" % id_ for id_ in stack),
+                            idx,
+                            e1,
+                            e2,
+                        )
+            else:
+                pickup_index = stack.pop(-1)
+                break
+
+    def _diff(self, other: CacheKey) -> str:
+        return ", ".join(self._whats_different(other))
+
+    def __str__(self) -> str:
+        stack: List[Union[Tuple[Any, ...], HasCacheKey]] = [self.key]
+
+        output = []
+        sentinel = object()
+        indent = -1
+        while stack:
+            elem = stack.pop(0)
+            if elem is sentinel:
+                output.append((" " * (indent * 2)) + "),")
+                indent -= 1
+            elif isinstance(elem, tuple):
+                if not elem:
+                    output.append((" " * ((indent + 1) * 2)) + "()")
+                else:
+                    indent += 1
+                    stack = list(elem) + [sentinel] + stack
+                    output.append((" " * (indent * 2)) + "(")
+            else:
+                if isinstance(elem, HasCacheKey):
+                    repr_ = "<%s object at %s>" % (
+                        type(elem).__name__,
+                        hex(id(elem)),
+                    )
+                else:
+                    repr_ = repr(elem)
+                output.append((" " * (indent * 2)) + "  " + repr_ + ", ")
+
+        return "CacheKey(key=%s)" % ("\n".join(output),)
+
+    def _generate_param_dict(self) -> Dict[str, Any]:
+        """used for testing"""
+
+        _anon_map = prefix_anon_map()
+        return {b.key % _anon_map: b.effective_value for b in self.bindparams}
+
+    @util.preload_module("sqlalchemy.sql.elements")
+    def _apply_params_to_element(
+        self, original_cache_key: CacheKey, target_element: ColumnElement[Any]
+    ) -> ColumnElement[Any]:
+        if target_element._is_immutable or original_cache_key is self:
+            return target_element
+
+        elements = util.preloaded.sql_elements
+        return elements._OverrideBinds(
+            target_element, self.bindparams, original_cache_key.bindparams
+        )
+
+
+def _ad_hoc_cache_key_from_args(
+    tokens: Tuple[Any, ...],
+    traverse_args: Iterable[Tuple[str, InternalTraversal]],
+    args: Iterable[Any],
+) -> Tuple[Any, ...]:
+    """a quick cache key generator used by reflection.flexi_cache."""
+    bindparams: List[BindParameter[Any]] = []
+
+    _anon_map = anon_map()
+
+    tup = tokens
+
+    for (attrname, sym), arg in zip(traverse_args, args):
+        key = sym.name
+        visit_key = key.replace("dp_", "visit_")
+
+        if arg is None:
+            tup += (attrname, None)
+            continue
+
+        meth = getattr(_cache_key_traversal_visitor, visit_key)
+        if meth is CACHE_IN_PLACE:
+            tup += (attrname, arg)
+        elif meth in (
+            CALL_GEN_CACHE_KEY,
+            STATIC_CACHE_KEY,
+            ANON_NAME,
+            PROPAGATE_ATTRS,
+        ):
+            raise NotImplementedError(
+                f"Haven't implemented symbol {meth} for ad-hoc key from args"
+            )
+        else:
+            tup += meth(attrname, arg, None, _anon_map, bindparams)
+    return tup
+
+
+class _CacheKeyTraversal(HasTraversalDispatch):
+    # very common elements are inlined into the main _get_cache_key() method
+    # to produce a dramatic savings in Python function call overhead
+
+    visit_has_cache_key = visit_clauseelement = CALL_GEN_CACHE_KEY
+    visit_clauseelement_list = InternalTraversal.dp_clauseelement_list
+    visit_annotations_key = InternalTraversal.dp_annotations_key
+    visit_clauseelement_tuple = InternalTraversal.dp_clauseelement_tuple
+    visit_memoized_select_entities = (
+        InternalTraversal.dp_memoized_select_entities
+    )
+
+    visit_string = visit_boolean = visit_operator = visit_plain_obj = (
+        CACHE_IN_PLACE
+    )
+    visit_statement_hint_list = CACHE_IN_PLACE
+    visit_type = STATIC_CACHE_KEY
+    visit_anon_name = ANON_NAME
+
+    visit_propagate_attrs = PROPAGATE_ATTRS
+
+    def visit_with_context_options(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return tuple((fn.__code__, c_key) for fn, c_key in obj)
+
+    def visit_inspectable(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (attrname, inspect(obj)._gen_cache_key(anon_map, bindparams))
+
+    def visit_string_list(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return tuple(obj)
+
+    def visit_multi(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (
+            attrname,
+            (
+                obj._gen_cache_key(anon_map, bindparams)
+                if isinstance(obj, HasCacheKey)
+                else obj
+            ),
+        )
+
+    def visit_multi_list(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (
+            attrname,
+            tuple(
+                (
+                    elem._gen_cache_key(anon_map, bindparams)
+                    if isinstance(elem, HasCacheKey)
+                    else elem
+                )
+                for elem in obj
+            ),
+        )
+
+    def visit_has_cache_key_tuples(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+        return (
+            attrname,
+            tuple(
+                tuple(
+                    elem._gen_cache_key(anon_map, bindparams)
+                    for elem in tup_elem
+                )
+                for tup_elem in obj
+            ),
+        )
+
+    def visit_has_cache_key_list(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+        return (
+            attrname,
+            tuple(elem._gen_cache_key(anon_map, bindparams) for elem in obj),
+        )
+
+    def visit_executable_options(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+        return (
+            attrname,
+            tuple(
+                elem._gen_cache_key(anon_map, bindparams)
+                for elem in obj
+                if elem._is_has_cache_key
+            ),
+        )
+
+    def visit_inspectable_list(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return self.visit_has_cache_key_list(
+            attrname, [inspect(o) for o in obj], parent, anon_map, bindparams
+        )
+
+    def visit_clauseelement_tuples(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return self.visit_has_cache_key_tuples(
+            attrname, obj, parent, anon_map, bindparams
+        )
+
+    def visit_fromclause_ordered_set(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+        return (
+            attrname,
+            tuple([elem._gen_cache_key(anon_map, bindparams) for elem in obj]),
+        )
+
+    def visit_clauseelement_unordered_set(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+        cache_keys = [
+            elem._gen_cache_key(anon_map, bindparams) for elem in obj
+        ]
+        return (
+            attrname,
+            tuple(
+                sorted(cache_keys)
+            ),  # cache keys all start with (id_, class)
+        )
+
+    def visit_named_ddl_element(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (attrname, obj.name)
+
+    def visit_prefix_sequence(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+
+        return (
+            attrname,
+            tuple(
+                [
+                    (clause._gen_cache_key(anon_map, bindparams), strval)
+                    for clause, strval in obj
+                ]
+            ),
+        )
+
+    def visit_setup_join_tuple(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return tuple(
+            (
+                target._gen_cache_key(anon_map, bindparams),
+                (
+                    onclause._gen_cache_key(anon_map, bindparams)
+                    if onclause is not None
+                    else None
+                ),
+                (
+                    from_._gen_cache_key(anon_map, bindparams)
+                    if from_ is not None
+                    else None
+                ),
+                tuple([(key, flags[key]) for key in sorted(flags)]),
+            )
+            for (target, onclause, from_, flags) in obj
+        )
+
+    def visit_table_hint_list(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        if not obj:
+            return ()
+
+        return (
+            attrname,
+            tuple(
+                [
+                    (
+                        clause._gen_cache_key(anon_map, bindparams),
+                        dialect_name,
+                        text,
+                    )
+                    for (clause, dialect_name), text in obj.items()
+                ]
+            ),
+        )
+
+    def visit_plain_dict(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (attrname, tuple([(key, obj[key]) for key in sorted(obj)]))
+
+    def visit_dialect_options(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (
+            attrname,
+            tuple(
+                (
+                    dialect_name,
+                    tuple(
+                        [
+                            (key, obj[dialect_name][key])
+                            for key in sorted(obj[dialect_name])
+                        ]
+                    ),
+                )
+                for dialect_name in sorted(obj)
+            ),
+        )
+
+    def visit_string_clauseelement_dict(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (
+            attrname,
+            tuple(
+                (key, obj[key]._gen_cache_key(anon_map, bindparams))
+                for key in sorted(obj)
+            ),
+        )
+
+    def visit_string_multi_dict(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (
+            attrname,
+            tuple(
+                (
+                    key,
+                    (
+                        value._gen_cache_key(anon_map, bindparams)
+                        if isinstance(value, HasCacheKey)
+                        else value
+                    ),
+                )
+                for key, value in [(key, obj[key]) for key in sorted(obj)]
+            ),
+        )
+
+    def visit_fromclause_canonical_column_collection(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        # inlining into the internals of ColumnCollection
+        return (
+            attrname,
+            tuple(
+                col._gen_cache_key(anon_map, bindparams)
+                for k, col, _ in obj._collection
+            ),
+        )
+
+    def visit_unknown_structure(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        anon_map[NO_CACHE] = True
+        return ()
+
+    def visit_dml_ordered_values(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        return (
+            attrname,
+            tuple(
+                (
+                    (
+                        key._gen_cache_key(anon_map, bindparams)
+                        if hasattr(key, "__clause_element__")
+                        else key
+                    ),
+                    value._gen_cache_key(anon_map, bindparams),
+                )
+                for key, value in obj
+            ),
+        )
+
+    def visit_dml_values(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        # in py37 we can assume two dictionaries created in the same
+        # insert ordering will retain that sorting
+        return (
+            attrname,
+            tuple(
+                (
+                    (
+                        k._gen_cache_key(anon_map, bindparams)
+                        if hasattr(k, "__clause_element__")
+                        else k
+                    ),
+                    obj[k]._gen_cache_key(anon_map, bindparams),
+                )
+                for k in obj
+            ),
+        )
+
+    def visit_dml_multi_values(
+        self,
+        attrname: str,
+        obj: Any,
+        parent: Any,
+        anon_map: anon_map,
+        bindparams: List[BindParameter[Any]],
+    ) -> Tuple[Any, ...]:
+        # multivalues are simply not cacheable right now
+        anon_map[NO_CACHE] = True
+        return ()
+
+
+_cache_key_traversal_visitor = _CacheKeyTraversal()
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py
new file mode 100644
index 00000000..802ce757
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/coercions.py
@@ -0,0 +1,1403 @@
+# sql/coercions.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+from __future__ import annotations
+
+import collections.abc as collections_abc
+import numbers
+import re
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import roles
+from . import visitors
+from ._typing import is_from_clause
+from .base import ExecutableOption
+from .base import Options
+from .cache_key import HasCacheKey
+from .visitors import Visitable
+from .. import exc
+from .. import inspection
+from .. import util
+from ..util.typing import Literal
+
+if typing.TYPE_CHECKING:
+    # elements lambdas schema selectable are set by __init__
+    from . import elements
+    from . import lambdas
+    from . import schema
+    from . import selectable
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _ColumnsClauseArgument
+    from ._typing import _DDLColumnArgument
+    from ._typing import _DMLTableArgument
+    from ._typing import _FromClauseArgument
+    from .dml import _DMLTableElement
+    from .elements import BindParameter
+    from .elements import ClauseElement
+    from .elements import ColumnClause
+    from .elements import ColumnElement
+    from .elements import NamedColumn
+    from .elements import SQLCoreOperations
+    from .elements import TextClause
+    from .schema import Column
+    from .selectable import _ColumnsClauseElement
+    from .selectable import _JoinTargetProtocol
+    from .selectable import FromClause
+    from .selectable import HasCTE
+    from .selectable import SelectBase
+    from .selectable import Subquery
+    from .visitors import _TraverseCallableType
+
+_SR = TypeVar("_SR", bound=roles.SQLRole)
+_F = TypeVar("_F", bound=Callable[..., Any])
+_StringOnlyR = TypeVar("_StringOnlyR", bound=roles.StringRole)
+_T = TypeVar("_T", bound=Any)
+
+
+def _is_literal(element):
+    """Return whether or not the element is a "literal" in the context
+    of a SQL expression construct.
+
+    """
+
+    return not isinstance(
+        element,
+        (Visitable, schema.SchemaEventTarget),
+    ) and not hasattr(element, "__clause_element__")
+
+
+def _deep_is_literal(element):
+    """Return whether or not the element is a "literal" in the context
+    of a SQL expression construct.
+
+    does a deeper more esoteric check than _is_literal.   is used
+    for lambda elements that have to distinguish values that would
+    be bound vs. not without any context.
+
+    """
+
+    if isinstance(element, collections_abc.Sequence) and not isinstance(
+        element, str
+    ):
+        for elem in element:
+            if not _deep_is_literal(elem):
+                return False
+        else:
+            return True
+
+    return (
+        not isinstance(
+            element,
+            (
+                Visitable,
+                schema.SchemaEventTarget,
+                HasCacheKey,
+                Options,
+                util.langhelpers.symbol,
+            ),
+        )
+        and not hasattr(element, "__clause_element__")
+        and (
+            not isinstance(element, type)
+            or not issubclass(element, HasCacheKey)
+        )
+    )
+
+
+def _document_text_coercion(
+    paramname: str, meth_rst: str, param_rst: str
+) -> Callable[[_F], _F]:
+    return util.add_parameter_text(
+        paramname,
+        (
+            ".. warning:: "
+            "The %s argument to %s can be passed as a Python string argument, "
+            "which will be treated "
+            "as **trusted SQL text** and rendered as given.  **DO NOT PASS "
+            "UNTRUSTED INPUT TO THIS PARAMETER**."
+        )
+        % (param_rst, meth_rst),
+    )
+
+
+def _expression_collection_was_a_list(
+    attrname: str,
+    fnname: str,
+    args: Union[Sequence[_T], Sequence[Sequence[_T]]],
+) -> Sequence[_T]:
+    if args and isinstance(args[0], (list, set, dict)) and len(args) == 1:
+        if isinstance(args[0], list):
+            raise exc.ArgumentError(
+                f'The "{attrname}" argument to {fnname}(), when '
+                "referring to a sequence "
+                "of items, is now passed as a series of positional "
+                "elements, rather than as a list. "
+            )
+        return cast("Sequence[_T]", args[0])
+
+    return cast("Sequence[_T]", args)
+
+
+@overload
+def expect(
+    role: Type[roles.TruncatedLabelRole],
+    element: Any,
+    **kw: Any,
+) -> str: ...
+
+
+@overload
+def expect(
+    role: Type[roles.DMLColumnRole],
+    element: Any,
+    *,
+    as_key: Literal[True] = ...,
+    **kw: Any,
+) -> str: ...
+
+
+@overload
+def expect(
+    role: Type[roles.LiteralValueRole],
+    element: Any,
+    **kw: Any,
+) -> BindParameter[Any]: ...
+
+
+@overload
+def expect(
+    role: Type[roles.DDLReferredColumnRole],
+    element: Any,
+    **kw: Any,
+) -> Union[Column[Any], str]: ...
+
+
+@overload
+def expect(
+    role: Type[roles.DDLConstraintColumnRole],
+    element: Any,
+    **kw: Any,
+) -> Union[Column[Any], str]: ...
+
+
+@overload
+def expect(
+    role: Type[roles.StatementOptionRole],
+    element: Any,
+    **kw: Any,
+) -> Union[ColumnElement[Any], TextClause]: ...
+
+
+@overload
+def expect(
+    role: Type[roles.LabeledColumnExprRole[Any]],
+    element: _ColumnExpressionArgument[_T],
+    **kw: Any,
+) -> NamedColumn[_T]: ...
+
+
+@overload
+def expect(
+    role: Union[
+        Type[roles.ExpressionElementRole[Any]],
+        Type[roles.LimitOffsetRole],
+        Type[roles.WhereHavingRole],
+    ],
+    element: _ColumnExpressionArgument[_T],
+    **kw: Any,
+) -> ColumnElement[_T]: ...
+
+
+@overload
+def expect(
+    role: Union[
+        Type[roles.ExpressionElementRole[Any]],
+        Type[roles.LimitOffsetRole],
+        Type[roles.WhereHavingRole],
+        Type[roles.OnClauseRole],
+        Type[roles.ColumnArgumentRole],
+    ],
+    element: Any,
+    **kw: Any,
+) -> ColumnElement[Any]: ...
+
+
+@overload
+def expect(
+    role: Type[roles.DMLTableRole],
+    element: _DMLTableArgument,
+    **kw: Any,
+) -> _DMLTableElement: ...
+
+
+@overload
+def expect(
+    role: Type[roles.HasCTERole],
+    element: HasCTE,
+    **kw: Any,
+) -> HasCTE: ...
+
+
+@overload
+def expect(
+    role: Type[roles.SelectStatementRole],
+    element: SelectBase,
+    **kw: Any,
+) -> SelectBase: ...
+
+
+@overload
+def expect(
+    role: Type[roles.FromClauseRole],
+    element: _FromClauseArgument,
+    **kw: Any,
+) -> FromClause: ...
+
+
+@overload
+def expect(
+    role: Type[roles.FromClauseRole],
+    element: SelectBase,
+    *,
+    explicit_subquery: Literal[True] = ...,
+    **kw: Any,
+) -> Subquery: ...
+
+
+@overload
+def expect(
+    role: Type[roles.ColumnsClauseRole],
+    element: _ColumnsClauseArgument[Any],
+    **kw: Any,
+) -> _ColumnsClauseElement: ...
+
+
+@overload
+def expect(
+    role: Type[roles.JoinTargetRole],
+    element: _JoinTargetProtocol,
+    **kw: Any,
+) -> _JoinTargetProtocol: ...
+
+
+# catchall for not-yet-implemented overloads
+@overload
+def expect(
+    role: Type[_SR],
+    element: Any,
+    **kw: Any,
+) -> Any: ...
+
+
+def expect(
+    role: Type[_SR],
+    element: Any,
+    *,
+    apply_propagate_attrs: Optional[ClauseElement] = None,
+    argname: Optional[str] = None,
+    post_inspect: bool = False,
+    disable_inspection: bool = False,
+    **kw: Any,
+) -> Any:
+    if (
+        role.allows_lambda
+        # note callable() will not invoke a __getattr__() method, whereas
+        # hasattr(obj, "__call__") will. by keeping the callable() check here
+        # we prevent most needless calls to hasattr()  and therefore
+        # __getattr__(), which is present on ColumnElement.
+        and callable(element)
+        and hasattr(element, "__code__")
+    ):
+        return lambdas.LambdaElement(
+            element,
+            role,
+            lambdas.LambdaOptions(**kw),
+            apply_propagate_attrs=apply_propagate_attrs,
+        )
+
+    # major case is that we are given a ClauseElement already, skip more
+    # elaborate logic up front if possible
+    impl = _impl_lookup[role]
+
+    original_element = element
+
+    if not isinstance(
+        element,
+        (
+            elements.CompilerElement,
+            schema.SchemaItem,
+            schema.FetchedValue,
+            lambdas.PyWrapper,
+        ),
+    ):
+        resolved = None
+
+        if impl._resolve_literal_only:
+            resolved = impl._literal_coercion(element, **kw)
+        else:
+            original_element = element
+
+            is_clause_element = False
+
+            # this is a special performance optimization for ORM
+            # joins used by JoinTargetImpl that we don't go through the
+            # work of creating __clause_element__() when we only need the
+            # original QueryableAttribute, as the former will do clause
+            # adaption and all that which is just thrown away here.
+            if (
+                impl._skip_clauseelement_for_target_match
+                and isinstance(element, role)
+                and hasattr(element, "__clause_element__")
+            ):
+                is_clause_element = True
+            else:
+                while hasattr(element, "__clause_element__"):
+                    is_clause_element = True
+
+                    if not getattr(element, "is_clause_element", False):
+                        element = element.__clause_element__()
+                    else:
+                        break
+
+            if not is_clause_element:
+                if impl._use_inspection and not disable_inspection:
+                    insp = inspection.inspect(element, raiseerr=False)
+                    if insp is not None:
+                        if post_inspect:
+                            insp._post_inspect
+                        try:
+                            resolved = insp.__clause_element__()
+                        except AttributeError:
+                            impl._raise_for_expected(original_element, argname)
+
+                if resolved is None:
+                    resolved = impl._literal_coercion(
+                        element, argname=argname, **kw
+                    )
+            else:
+                resolved = element
+    elif isinstance(element, lambdas.PyWrapper):
+        resolved = element._sa__py_wrapper_literal(**kw)
+    else:
+        resolved = element
+
+    if apply_propagate_attrs is not None:
+        if typing.TYPE_CHECKING:
+            assert isinstance(resolved, (SQLCoreOperations, ClauseElement))
+
+        if not apply_propagate_attrs._propagate_attrs and getattr(
+            resolved, "_propagate_attrs", None
+        ):
+            apply_propagate_attrs._propagate_attrs = resolved._propagate_attrs
+
+    if impl._role_class in resolved.__class__.__mro__:
+        if impl._post_coercion:
+            resolved = impl._post_coercion(
+                resolved,
+                argname=argname,
+                original_element=original_element,
+                **kw,
+            )
+        return resolved
+    else:
+        return impl._implicit_coercions(
+            original_element, resolved, argname=argname, **kw
+        )
+
+
+def expect_as_key(
+    role: Type[roles.DMLColumnRole], element: Any, **kw: Any
+) -> str:
+    kw.pop("as_key", None)
+    return expect(role, element, as_key=True, **kw)
+
+
+def expect_col_expression_collection(
+    role: Type[roles.DDLConstraintColumnRole],
+    expressions: Iterable[_DDLColumnArgument],
+) -> Iterator[
+    Tuple[
+        Union[str, Column[Any]],
+        Optional[ColumnClause[Any]],
+        Optional[str],
+        Optional[Union[Column[Any], str]],
+    ]
+]:
+    for expr in expressions:
+        strname = None
+        column = None
+
+        resolved: Union[Column[Any], str] = expect(role, expr)
+        if isinstance(resolved, str):
+            assert isinstance(expr, str)
+            strname = resolved = expr
+        else:
+            cols: List[Column[Any]] = []
+            col_append: _TraverseCallableType[Column[Any]] = cols.append
+            visitors.traverse(resolved, {}, {"column": col_append})
+            if cols:
+                column = cols[0]
+        add_element = column if column is not None else strname
+
+        yield resolved, column, strname, add_element
+
+
+class RoleImpl:
+    __slots__ = ("_role_class", "name", "_use_inspection")
+
+    def _literal_coercion(self, element, **kw):
+        raise NotImplementedError()
+
+    _post_coercion: Any = None
+    _resolve_literal_only = False
+    _skip_clauseelement_for_target_match = False
+
+    def __init__(self, role_class):
+        self._role_class = role_class
+        self.name = role_class._role_name
+        self._use_inspection = issubclass(role_class, roles.UsesInspection)
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        self._raise_for_expected(element, argname, resolved)
+
+    def _raise_for_expected(
+        self,
+        element: Any,
+        argname: Optional[str] = None,
+        resolved: Optional[Any] = None,
+        *,
+        advice: Optional[str] = None,
+        code: Optional[str] = None,
+        err: Optional[Exception] = None,
+        **kw: Any,
+    ) -> NoReturn:
+        if resolved is not None and resolved is not element:
+            got = "%r object resolved from %r object" % (resolved, element)
+        else:
+            got = repr(element)
+
+        if argname:
+            msg = "%s expected for argument %r; got %s." % (
+                self.name,
+                argname,
+                got,
+            )
+        else:
+            msg = "%s expected, got %s." % (self.name, got)
+
+        if advice:
+            msg += " " + advice
+
+        raise exc.ArgumentError(msg, code=code) from err
+
+
+class _Deannotate:
+    __slots__ = ()
+
+    def _post_coercion(self, resolved, **kw):
+        from .util import _deep_deannotate
+
+        return _deep_deannotate(resolved)
+
+
+class _StringOnly:
+    __slots__ = ()
+
+    _resolve_literal_only = True
+
+
+class _ReturnsStringKey(RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(self, element, resolved, argname=None, **kw):
+        if isinstance(element, str):
+            return element
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _literal_coercion(self, element, **kw):
+        return element
+
+
+class _ColumnCoercions(RoleImpl):
+    __slots__ = ()
+
+    def _warn_for_scalar_subquery_coercion(self):
+        util.warn(
+            "implicitly coercing SELECT object to scalar subquery; "
+            "please use the .scalar_subquery() method to produce a scalar "
+            "subquery.",
+        )
+
+    def _implicit_coercions(self, element, resolved, argname=None, **kw):
+        original_element = element
+        if not getattr(resolved, "is_clause_element", False):
+            self._raise_for_expected(original_element, argname, resolved)
+        elif resolved._is_select_base:
+            self._warn_for_scalar_subquery_coercion()
+            return resolved.scalar_subquery()
+        elif resolved._is_from_clause and isinstance(
+            resolved, selectable.Subquery
+        ):
+            self._warn_for_scalar_subquery_coercion()
+            return resolved.element.scalar_subquery()
+        elif self._role_class.allows_lambda and resolved._is_lambda_element:
+            return resolved
+        else:
+            self._raise_for_expected(original_element, argname, resolved)
+
+
+def _no_text_coercion(
+    element: Any,
+    argname: Optional[str] = None,
+    exc_cls: Type[exc.SQLAlchemyError] = exc.ArgumentError,
+    extra: Optional[str] = None,
+    err: Optional[Exception] = None,
+) -> NoReturn:
+    raise exc_cls(
+        "%(extra)sTextual SQL expression %(expr)r %(argname)sshould be "
+        "explicitly declared as text(%(expr)r)"
+        % {
+            "expr": util.ellipses_string(element),
+            "argname": "for argument %s" % (argname,) if argname else "",
+            "extra": "%s " % extra if extra else "",
+        }
+    ) from err
+
+
+class _NoTextCoercion(RoleImpl):
+    __slots__ = ()
+
+    def _literal_coercion(self, element, *, argname=None, **kw):
+        if isinstance(element, str) and issubclass(
+            elements.TextClause, self._role_class
+        ):
+            _no_text_coercion(element, argname)
+        else:
+            self._raise_for_expected(element, argname)
+
+
+class _CoerceLiterals(RoleImpl):
+    __slots__ = ()
+    _coerce_consts = False
+    _coerce_star = False
+    _coerce_numerics = False
+
+    def _text_coercion(self, element, argname=None):
+        return _no_text_coercion(element, argname)
+
+    def _literal_coercion(self, element, *, argname=None, **kw):
+        if isinstance(element, str):
+            if self._coerce_star and element == "*":
+                return elements.ColumnClause("*", is_literal=True)
+            else:
+                return self._text_coercion(element, argname, **kw)
+
+        if self._coerce_consts:
+            if element is None:
+                return elements.Null()
+            elif element is False:
+                return elements.False_()
+            elif element is True:
+                return elements.True_()
+
+        if self._coerce_numerics and isinstance(element, (numbers.Number)):
+            return elements.ColumnClause(str(element), is_literal=True)
+
+        self._raise_for_expected(element, argname)
+
+
+class LiteralValueImpl(RoleImpl):
+    _resolve_literal_only = True
+
+    def _implicit_coercions(
+        self,
+        element,
+        resolved,
+        argname=None,
+        *,
+        type_=None,
+        literal_execute=False,
+        **kw,
+    ):
+        if not _is_literal(resolved):
+            self._raise_for_expected(
+                element, resolved=resolved, argname=argname, **kw
+            )
+
+        return elements.BindParameter(
+            None,
+            element,
+            type_=type_,
+            unique=True,
+            literal_execute=literal_execute,
+        )
+
+    def _literal_coercion(self, element, **kw):
+        return element
+
+
+class _SelectIsNotFrom(RoleImpl):
+    __slots__ = ()
+
+    def _raise_for_expected(
+        self,
+        element: Any,
+        argname: Optional[str] = None,
+        resolved: Optional[Any] = None,
+        *,
+        advice: Optional[str] = None,
+        code: Optional[str] = None,
+        err: Optional[Exception] = None,
+        **kw: Any,
+    ) -> NoReturn:
+        if (
+            not advice
+            and isinstance(element, roles.SelectStatementRole)
+            or isinstance(resolved, roles.SelectStatementRole)
+        ):
+            advice = (
+                "To create a "
+                "FROM clause from a %s object, use the .subquery() method."
+                % (resolved.__class__ if resolved is not None else element,)
+            )
+            code = "89ve"
+        else:
+            code = None
+
+        super()._raise_for_expected(
+            element,
+            argname=argname,
+            resolved=resolved,
+            advice=advice,
+            code=code,
+            err=err,
+            **kw,
+        )
+        # never reached
+        assert False
+
+
+class HasCacheKeyImpl(RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if isinstance(element, HasCacheKey):
+            return element
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _literal_coercion(self, element, **kw):
+        return element
+
+
+class ExecutableOptionImpl(RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if isinstance(element, ExecutableOption):
+            return element
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _literal_coercion(self, element, **kw):
+        return element
+
+
+class ExpressionElementImpl(_ColumnCoercions, RoleImpl):
+    __slots__ = ()
+
+    def _literal_coercion(
+        self, element, *, name=None, type_=None, is_crud=False, **kw
+    ):
+        if (
+            element is None
+            and not is_crud
+            and (type_ is None or not type_.should_evaluate_none)
+        ):
+            # TODO: there's no test coverage now for the
+            # "should_evaluate_none" part of this, as outside of "crud" this
+            # codepath is not normally used except in some special cases
+            return elements.Null()
+        else:
+            try:
+                return elements.BindParameter(
+                    name, element, type_, unique=True, _is_crud=is_crud
+                )
+            except exc.ArgumentError as err:
+                self._raise_for_expected(element, err=err)
+
+    def _raise_for_expected(self, element, argname=None, resolved=None, **kw):
+        # select uses implicit coercion with warning instead of raising
+        if isinstance(element, selectable.Values):
+            advice = (
+                "To create a column expression from a VALUES clause, "
+                "use the .scalar_values() method."
+            )
+        elif isinstance(element, roles.AnonymizedFromClauseRole):
+            advice = (
+                "To create a column expression from a FROM clause row "
+                "as a whole, use the .table_valued() method."
+            )
+        else:
+            advice = None
+
+        return super()._raise_for_expected(
+            element, argname=argname, resolved=resolved, advice=advice, **kw
+        )
+
+
+class BinaryElementImpl(ExpressionElementImpl, RoleImpl):
+    __slots__ = ()
+
+    def _literal_coercion(  # type: ignore[override]
+        self,
+        element,
+        *,
+        expr,
+        operator,
+        bindparam_type=None,
+        argname=None,
+        **kw,
+    ):
+        try:
+            return expr._bind_param(operator, element, type_=bindparam_type)
+        except exc.ArgumentError as err:
+            self._raise_for_expected(element, err=err)
+
+    def _post_coercion(self, resolved, *, expr, bindparam_type=None, **kw):
+        if resolved.type._isnull and not expr.type._isnull:
+            resolved = resolved._with_binary_element_type(
+                bindparam_type if bindparam_type is not None else expr.type
+            )
+        return resolved
+
+
+class InElementImpl(RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if resolved._is_from_clause:
+            if (
+                isinstance(resolved, selectable.Alias)
+                and resolved.element._is_select_base
+            ):
+                self._warn_for_implicit_coercion(resolved)
+                return self._post_coercion(resolved.element, **kw)
+            else:
+                self._warn_for_implicit_coercion(resolved)
+                return self._post_coercion(resolved.select(), **kw)
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _warn_for_implicit_coercion(self, elem):
+        util.warn(
+            "Coercing %s object into a select() for use in IN(); "
+            "please pass a select() construct explicitly"
+            % (elem.__class__.__name__)
+        )
+
+    @util.preload_module("sqlalchemy.sql.elements")
+    def _literal_coercion(self, element, *, expr, operator, **kw):
+        if util.is_non_string_iterable(element):
+            non_literal_expressions: Dict[
+                Optional[_ColumnExpressionArgument[Any]],
+                _ColumnExpressionArgument[Any],
+            ] = {}
+            element = list(element)
+            for o in element:
+                if not _is_literal(o):
+                    if not isinstance(
+                        o, util.preloaded.sql_elements.ColumnElement
+                    ) and not hasattr(o, "__clause_element__"):
+                        self._raise_for_expected(element, **kw)
+
+                    else:
+                        non_literal_expressions[o] = o
+
+            if non_literal_expressions:
+                return elements.ClauseList(
+                    *[
+                        (
+                            non_literal_expressions[o]
+                            if o in non_literal_expressions
+                            else expr._bind_param(operator, o)
+                        )
+                        for o in element
+                    ]
+                )
+            else:
+                return expr._bind_param(operator, element, expanding=True)
+
+        else:
+            self._raise_for_expected(element, **kw)
+
+    def _post_coercion(self, element, *, expr, operator, **kw):
+        if element._is_select_base:
+            # for IN, we are doing scalar_subquery() coercion without
+            # a warning
+            return element.scalar_subquery()
+        elif isinstance(element, elements.ClauseList):
+            assert not len(element.clauses) == 0
+            return element.self_group(against=operator)
+
+        elif isinstance(element, elements.BindParameter):
+            element = element._clone(maintain_key=True)
+            element.expanding = True
+            element.expand_op = operator
+
+            return element
+        elif isinstance(element, selectable.Values):
+            return element.scalar_values()
+        else:
+            return element
+
+
+class OnClauseImpl(_ColumnCoercions, RoleImpl):
+    __slots__ = ()
+
+    _coerce_consts = True
+
+    def _literal_coercion(self, element, **kw):
+        self._raise_for_expected(element)
+
+    def _post_coercion(self, resolved, *, original_element=None, **kw):
+        # this is a hack right now as we want to use coercion on an
+        # ORM InstrumentedAttribute, but we want to return the object
+        # itself if it is one, not its clause element.
+        # ORM context _join and _legacy_join() would need to be improved
+        # to look for annotations in a clause element form.
+        if isinstance(original_element, roles.JoinTargetRole):
+            return original_element
+        return resolved
+
+
+class WhereHavingImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl):
+    __slots__ = ()
+
+    _coerce_consts = True
+
+    def _text_coercion(self, element, argname=None):
+        return _no_text_coercion(element, argname)
+
+
+class StatementOptionImpl(_CoerceLiterals, RoleImpl):
+    __slots__ = ()
+
+    _coerce_consts = True
+
+    def _text_coercion(self, element, argname=None):
+        return elements.TextClause(element)
+
+
+class ColumnArgumentImpl(_NoTextCoercion, RoleImpl):
+    __slots__ = ()
+
+
+class ColumnArgumentOrKeyImpl(_ReturnsStringKey, RoleImpl):
+    __slots__ = ()
+
+
+class StrAsPlainColumnImpl(_CoerceLiterals, RoleImpl):
+    __slots__ = ()
+
+    def _text_coercion(self, element, argname=None):
+        return elements.ColumnClause(element)
+
+
+class ByOfImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl, roles.ByOfRole):
+    __slots__ = ()
+
+    _coerce_consts = True
+
+    def _text_coercion(self, element, argname=None):
+        return elements._textual_label_reference(element)
+
+
+class OrderByImpl(ByOfImpl, RoleImpl):
+    __slots__ = ()
+
+    def _post_coercion(self, resolved, **kw):
+        if (
+            isinstance(resolved, self._role_class)
+            and resolved._order_by_label_element is not None
+        ):
+            return elements._label_reference(resolved)
+        else:
+            return resolved
+
+
+class GroupByImpl(ByOfImpl, RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if is_from_clause(resolved):
+            return elements.ClauseList(*resolved.c)
+        else:
+            return resolved
+
+
+class DMLColumnImpl(_ReturnsStringKey, RoleImpl):
+    __slots__ = ()
+
+    def _post_coercion(self, element, *, as_key=False, **kw):
+        if as_key:
+            return element.key
+        else:
+            return element
+
+
+class ConstExprImpl(RoleImpl):
+    __slots__ = ()
+
+    def _literal_coercion(self, element, *, argname=None, **kw):
+        if element is None:
+            return elements.Null()
+        elif element is False:
+            return elements.False_()
+        elif element is True:
+            return elements.True_()
+        else:
+            self._raise_for_expected(element, argname)
+
+
+class TruncatedLabelImpl(_StringOnly, RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if isinstance(element, str):
+            return resolved
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _literal_coercion(self, element, **kw):
+        """coerce the given value to :class:`._truncated_label`.
+
+        Existing :class:`._truncated_label` and
+        :class:`._anonymous_label` objects are passed
+        unchanged.
+        """
+
+        if isinstance(element, elements._truncated_label):
+            return element
+        else:
+            return elements._truncated_label(element)
+
+
+class DDLExpressionImpl(_Deannotate, _CoerceLiterals, RoleImpl):
+    __slots__ = ()
+
+    _coerce_consts = True
+
+    def _text_coercion(self, element, argname=None):
+        # see #5754 for why we can't easily deprecate this coercion.
+        # essentially expressions like postgresql_where would have to be
+        # text() as they come back from reflection and we don't want to
+        # have text() elements wired into the inspection dictionaries.
+        return elements.TextClause(element)
+
+
+class DDLConstraintColumnImpl(_Deannotate, _ReturnsStringKey, RoleImpl):
+    __slots__ = ()
+
+
+class DDLReferredColumnImpl(DDLConstraintColumnImpl):
+    __slots__ = ()
+
+
+class LimitOffsetImpl(RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if resolved is None:
+            return None
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _literal_coercion(  # type: ignore[override]
+        self, element, *, name, type_, **kw
+    ):
+        if element is None:
+            return None
+        else:
+            value = util.asint(element)
+            return selectable._OffsetLimitParam(
+                name, value, type_=type_, unique=True
+            )
+
+
+class LabeledColumnExprImpl(ExpressionElementImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if isinstance(resolved, roles.ExpressionElementRole):
+            return resolved.label(None)
+        else:
+            new = super()._implicit_coercions(
+                element, resolved, argname=argname, **kw
+            )
+            if isinstance(new, roles.ExpressionElementRole):
+                return new.label(None)
+            else:
+                self._raise_for_expected(element, argname, resolved)
+
+
+class ColumnsClauseImpl(_SelectIsNotFrom, _CoerceLiterals, RoleImpl):
+    __slots__ = ()
+
+    _coerce_consts = True
+    _coerce_numerics = True
+    _coerce_star = True
+
+    _guess_straight_column = re.compile(r"^\w\S*$", re.I)
+
+    def _raise_for_expected(
+        self, element, argname=None, resolved=None, *, advice=None, **kw
+    ):
+        if not advice and isinstance(element, list):
+            advice = (
+                f"Did you mean to say select("
+                f"{', '.join(repr(e) for e in element)})?"
+            )
+
+        return super()._raise_for_expected(
+            element, argname=argname, resolved=resolved, advice=advice, **kw
+        )
+
+    def _text_coercion(self, element, argname=None):
+        element = str(element)
+
+        guess_is_literal = not self._guess_straight_column.match(element)
+        raise exc.ArgumentError(
+            "Textual column expression %(column)r %(argname)sshould be "
+            "explicitly declared with text(%(column)r), "
+            "or use %(literal_column)s(%(column)r) "
+            "for more specificity"
+            % {
+                "column": util.ellipses_string(element),
+                "argname": "for argument %s" % (argname,) if argname else "",
+                "literal_column": (
+                    "literal_column" if guess_is_literal else "column"
+                ),
+            }
+        )
+
+
+class ReturnsRowsImpl(RoleImpl):
+    __slots__ = ()
+
+
+class StatementImpl(_CoerceLiterals, RoleImpl):
+    __slots__ = ()
+
+    def _post_coercion(
+        self, resolved, *, original_element, argname=None, **kw
+    ):
+        if resolved is not original_element and not isinstance(
+            original_element, str
+        ):
+            # use same method as Connection uses; this will later raise
+            # ObjectNotExecutableError
+            try:
+                original_element._execute_on_connection
+            except AttributeError:
+                util.warn_deprecated(
+                    "Object %r should not be used directly in a SQL statement "
+                    "context, such as passing to methods such as "
+                    "session.execute().  This usage will be disallowed in a "
+                    "future release.  "
+                    "Please use Core select() / update() / delete() etc. "
+                    "with Session.execute() and other statement execution "
+                    "methods." % original_element,
+                    "1.4",
+                )
+
+        return resolved
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if resolved._is_lambda_element:
+            return resolved
+        else:
+            return super()._implicit_coercions(
+                element, resolved, argname=argname, **kw
+            )
+
+
+class SelectStatementImpl(_NoTextCoercion, RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if resolved._is_text_clause:
+            return resolved.columns()
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+
+class HasCTEImpl(ReturnsRowsImpl):
+    __slots__ = ()
+
+
+class IsCTEImpl(RoleImpl):
+    __slots__ = ()
+
+
+class JoinTargetImpl(RoleImpl):
+    __slots__ = ()
+
+    _skip_clauseelement_for_target_match = True
+
+    def _literal_coercion(self, element, *, argname=None, **kw):
+        self._raise_for_expected(element, argname)
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        *,
+        legacy: bool = False,
+        **kw: Any,
+    ) -> Any:
+        if isinstance(element, roles.JoinTargetRole):
+            # note that this codepath no longer occurs as of
+            # #6550, unless JoinTargetImpl._skip_clauseelement_for_target_match
+            # were set to False.
+            return element
+        elif legacy and resolved._is_select_base:
+            util.warn_deprecated(
+                "Implicit coercion of SELECT and textual SELECT "
+                "constructs into FROM clauses is deprecated; please call "
+                ".subquery() on any Core select or ORM Query object in "
+                "order to produce a subquery object.",
+                version="1.4",
+            )
+            # TODO: doing _implicit_subquery here causes tests to fail,
+            # how was this working before?  probably that ORM
+            # join logic treated it as a select and subquery would happen
+            # in _ORMJoin->Join
+            return resolved
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+
+class FromClauseImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        *,
+        explicit_subquery: bool = False,
+        allow_select: bool = True,
+        **kw: Any,
+    ) -> Any:
+        if resolved._is_select_base:
+            if explicit_subquery:
+                return resolved.subquery()
+            elif allow_select:
+                util.warn_deprecated(
+                    "Implicit coercion of SELECT and textual SELECT "
+                    "constructs into FROM clauses is deprecated; please call "
+                    ".subquery() on any Core select or ORM Query object in "
+                    "order to produce a subquery object.",
+                    version="1.4",
+                )
+                return resolved._implicit_subquery
+        elif resolved._is_text_clause:
+            return resolved
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+    def _post_coercion(self, element, *, deannotate=False, **kw):
+        if deannotate:
+            return element._deannotate()
+        else:
+            return element
+
+
+class StrictFromClauseImpl(FromClauseImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        *,
+        allow_select: bool = False,
+        **kw: Any,
+    ) -> Any:
+        if resolved._is_select_base and allow_select:
+            util.warn_deprecated(
+                "Implicit coercion of SELECT and textual SELECT constructs "
+                "into FROM clauses is deprecated; please call .subquery() "
+                "on any Core select or ORM Query object in order to produce a "
+                "subquery object.",
+                version="1.4",
+            )
+            return resolved._implicit_subquery
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+
+class AnonymizedFromClauseImpl(StrictFromClauseImpl):
+    __slots__ = ()
+
+    def _post_coercion(self, element, *, flat=False, name=None, **kw):
+        assert name is None
+
+        return element._anonymous_fromclause(flat=flat)
+
+
+class DMLTableImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl):
+    __slots__ = ()
+
+    def _post_coercion(self, element, **kw):
+        if "dml_table" in element._annotations:
+            return element._annotations["dml_table"]
+        else:
+            return element
+
+
+class DMLSelectImpl(_NoTextCoercion, RoleImpl):
+    __slots__ = ()
+
+    def _implicit_coercions(
+        self,
+        element: Any,
+        resolved: Any,
+        argname: Optional[str] = None,
+        **kw: Any,
+    ) -> Any:
+        if resolved._is_from_clause:
+            if (
+                isinstance(resolved, selectable.Alias)
+                and resolved.element._is_select_base
+            ):
+                return resolved.element
+            else:
+                return resolved.select()
+        else:
+            self._raise_for_expected(element, argname, resolved)
+
+
+class CompoundElementImpl(_NoTextCoercion, RoleImpl):
+    __slots__ = ()
+
+    def _raise_for_expected(self, element, argname=None, resolved=None, **kw):
+        if isinstance(element, roles.FromClauseRole):
+            if element._is_subquery:
+                advice = (
+                    "Use the plain select() object without "
+                    "calling .subquery() or .alias()."
+                )
+            else:
+                advice = (
+                    "To SELECT from any FROM clause, use the .select() method."
+                )
+        else:
+            advice = None
+        return super()._raise_for_expected(
+            element, argname=argname, resolved=resolved, advice=advice, **kw
+        )
+
+
+_impl_lookup = {}
+
+
+for name in dir(roles):
+    cls = getattr(roles, name)
+    if name.endswith("Role"):
+        name = name.replace("Role", "Impl")
+        if name in globals():
+            impl = globals()[name](cls)
+            _impl_lookup[cls] = impl
+
+if not TYPE_CHECKING:
+    ee_impl = _impl_lookup[roles.ExpressionElementRole]
+
+    for py_type in (int, bool, str, float):
+        _impl_lookup[roles.ExpressionElementRole[py_type]] = ee_impl
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py
new file mode 100644
index 00000000..da476849
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/compiler.py
@@ -0,0 +1,7840 @@
+# sql/compiler.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Base SQL and DDL compiler implementations.
+
+Classes provided include:
+
+:class:`.compiler.SQLCompiler` - renders SQL
+strings
+
+:class:`.compiler.DDLCompiler` - renders DDL
+(data definition language) strings
+
+:class:`.compiler.GenericTypeCompiler` - renders
+type specification strings.
+
+To generate user-defined SQL strings, see
+:doc:`/ext/compiler`.
+
+"""
+from __future__ import annotations
+
+import collections
+import collections.abc as collections_abc
+import contextlib
+from enum import IntEnum
+import functools
+import itertools
+import operator
+import re
+from time import perf_counter
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import ClassVar
+from typing import Dict
+from typing import FrozenSet
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import NamedTuple
+from typing import NoReturn
+from typing import Optional
+from typing import Pattern
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import base
+from . import coercions
+from . import crud
+from . import elements
+from . import functions
+from . import operators
+from . import roles
+from . import schema
+from . import selectable
+from . import sqltypes
+from . import util as sql_util
+from ._typing import is_column_element
+from ._typing import is_dml
+from .base import _de_clone
+from .base import _from_objects
+from .base import _NONE_NAME
+from .base import _SentinelDefaultCharacterization
+from .base import Executable
+from .base import NO_ARG
+from .elements import ClauseElement
+from .elements import quoted_name
+from .schema import Column
+from .sqltypes import TupleType
+from .type_api import TypeEngine
+from .visitors import prefix_anon_map
+from .visitors import Visitable
+from .. import exc
+from .. import util
+from ..util import FastIntFlag
+from ..util.typing import Literal
+from ..util.typing import Protocol
+from ..util.typing import TypedDict
+
+if typing.TYPE_CHECKING:
+    from .annotation import _AnnotationDict
+    from .base import _AmbiguousTableNameMap
+    from .base import CompileState
+    from .cache_key import CacheKey
+    from .ddl import ExecutableDDLElement
+    from .dml import Insert
+    from .dml import UpdateBase
+    from .dml import ValuesBase
+    from .elements import _truncated_label
+    from .elements import BindParameter
+    from .elements import ColumnClause
+    from .elements import ColumnElement
+    from .elements import Label
+    from .functions import Function
+    from .schema import Table
+    from .selectable import AliasedReturnsRows
+    from .selectable import CompoundSelectState
+    from .selectable import CTE
+    from .selectable import FromClause
+    from .selectable import NamedFromClause
+    from .selectable import ReturnsRows
+    from .selectable import Select
+    from .selectable import SelectState
+    from .type_api import _BindProcessorType
+    from ..engine.cursor import CursorResultMetaData
+    from ..engine.interfaces import _CoreSingleExecuteParams
+    from ..engine.interfaces import _DBAPIAnyExecuteParams
+    from ..engine.interfaces import _DBAPIMultiExecuteParams
+    from ..engine.interfaces import _DBAPISingleExecuteParams
+    from ..engine.interfaces import _ExecuteOptions
+    from ..engine.interfaces import _GenericSetInputSizesType
+    from ..engine.interfaces import _MutableCoreSingleExecuteParams
+    from ..engine.interfaces import Dialect
+    from ..engine.interfaces import SchemaTranslateMapType
+
+_FromHintsType = Dict["FromClause", str]
+
+RESERVED_WORDS = {
+    "all",
+    "analyse",
+    "analyze",
+    "and",
+    "any",
+    "array",
+    "as",
+    "asc",
+    "asymmetric",
+    "authorization",
+    "between",
+    "binary",
+    "both",
+    "case",
+    "cast",
+    "check",
+    "collate",
+    "column",
+    "constraint",
+    "create",
+    "cross",
+    "current_date",
+    "current_role",
+    "current_time",
+    "current_timestamp",
+    "current_user",
+    "default",
+    "deferrable",
+    "desc",
+    "distinct",
+    "do",
+    "else",
+    "end",
+    "except",
+    "false",
+    "for",
+    "foreign",
+    "freeze",
+    "from",
+    "full",
+    "grant",
+    "group",
+    "having",
+    "ilike",
+    "in",
+    "initially",
+    "inner",
+    "intersect",
+    "into",
+    "is",
+    "isnull",
+    "join",
+    "leading",
+    "left",
+    "like",
+    "limit",
+    "localtime",
+    "localtimestamp",
+    "natural",
+    "new",
+    "not",
+    "notnull",
+    "null",
+    "off",
+    "offset",
+    "old",
+    "on",
+    "only",
+    "or",
+    "order",
+    "outer",
+    "overlaps",
+    "placing",
+    "primary",
+    "references",
+    "right",
+    "select",
+    "session_user",
+    "set",
+    "similar",
+    "some",
+    "symmetric",
+    "table",
+    "then",
+    "to",
+    "trailing",
+    "true",
+    "union",
+    "unique",
+    "user",
+    "using",
+    "verbose",
+    "when",
+    "where",
+}
+
+LEGAL_CHARACTERS = re.compile(r"^[A-Z0-9_$]+$", re.I)
+LEGAL_CHARACTERS_PLUS_SPACE = re.compile(r"^[A-Z0-9_ $]+$", re.I)
+ILLEGAL_INITIAL_CHARACTERS = {str(x) for x in range(0, 10)}.union(["$"])
+
+FK_ON_DELETE = re.compile(
+    r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I
+)
+FK_ON_UPDATE = re.compile(
+    r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I
+)
+FK_INITIALLY = re.compile(r"^(?:DEFERRED|IMMEDIATE)$", re.I)
+BIND_PARAMS = re.compile(r"(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])", re.UNICODE)
+BIND_PARAMS_ESC = re.compile(r"\x5c(:[\w\$]*)(?![:\w\$])", re.UNICODE)
+
+_pyformat_template = "%%(%(name)s)s"
+BIND_TEMPLATES = {
+    "pyformat": _pyformat_template,
+    "qmark": "?",
+    "format": "%%s",
+    "numeric": ":[_POSITION]",
+    "numeric_dollar": "$[_POSITION]",
+    "named": ":%(name)s",
+}
+
+
+OPERATORS = {
+    # binary
+    operators.and_: " AND ",
+    operators.or_: " OR ",
+    operators.add: " + ",
+    operators.mul: " * ",
+    operators.sub: " - ",
+    operators.mod: " % ",
+    operators.neg: "-",
+    operators.lt: " < ",
+    operators.le: " <= ",
+    operators.ne: " != ",
+    operators.gt: " > ",
+    operators.ge: " >= ",
+    operators.eq: " = ",
+    operators.is_distinct_from: " IS DISTINCT FROM ",
+    operators.is_not_distinct_from: " IS NOT DISTINCT FROM ",
+    operators.concat_op: " || ",
+    operators.match_op: " MATCH ",
+    operators.not_match_op: " NOT MATCH ",
+    operators.in_op: " IN ",
+    operators.not_in_op: " NOT IN ",
+    operators.comma_op: ", ",
+    operators.from_: " FROM ",
+    operators.as_: " AS ",
+    operators.is_: " IS ",
+    operators.is_not: " IS NOT ",
+    operators.collate: " COLLATE ",
+    # unary
+    operators.exists: "EXISTS ",
+    operators.distinct_op: "DISTINCT ",
+    operators.inv: "NOT ",
+    operators.any_op: "ANY ",
+    operators.all_op: "ALL ",
+    # modifiers
+    operators.desc_op: " DESC",
+    operators.asc_op: " ASC",
+    operators.nulls_first_op: " NULLS FIRST",
+    operators.nulls_last_op: " NULLS LAST",
+    # bitwise
+    operators.bitwise_xor_op: " ^ ",
+    operators.bitwise_or_op: " | ",
+    operators.bitwise_and_op: " & ",
+    operators.bitwise_not_op: "~",
+    operators.bitwise_lshift_op: " << ",
+    operators.bitwise_rshift_op: " >> ",
+}
+
+FUNCTIONS: Dict[Type[Function[Any]], str] = {
+    functions.coalesce: "coalesce",
+    functions.current_date: "CURRENT_DATE",
+    functions.current_time: "CURRENT_TIME",
+    functions.current_timestamp: "CURRENT_TIMESTAMP",
+    functions.current_user: "CURRENT_USER",
+    functions.localtime: "LOCALTIME",
+    functions.localtimestamp: "LOCALTIMESTAMP",
+    functions.random: "random",
+    functions.sysdate: "sysdate",
+    functions.session_user: "SESSION_USER",
+    functions.user: "USER",
+    functions.cube: "CUBE",
+    functions.rollup: "ROLLUP",
+    functions.grouping_sets: "GROUPING SETS",
+}
+
+
+EXTRACT_MAP = {
+    "month": "month",
+    "day": "day",
+    "year": "year",
+    "second": "second",
+    "hour": "hour",
+    "doy": "doy",
+    "minute": "minute",
+    "quarter": "quarter",
+    "dow": "dow",
+    "week": "week",
+    "epoch": "epoch",
+    "milliseconds": "milliseconds",
+    "microseconds": "microseconds",
+    "timezone_hour": "timezone_hour",
+    "timezone_minute": "timezone_minute",
+}
+
+COMPOUND_KEYWORDS = {
+    selectable._CompoundSelectKeyword.UNION: "UNION",
+    selectable._CompoundSelectKeyword.UNION_ALL: "UNION ALL",
+    selectable._CompoundSelectKeyword.EXCEPT: "EXCEPT",
+    selectable._CompoundSelectKeyword.EXCEPT_ALL: "EXCEPT ALL",
+    selectable._CompoundSelectKeyword.INTERSECT: "INTERSECT",
+    selectable._CompoundSelectKeyword.INTERSECT_ALL: "INTERSECT ALL",
+}
+
+
+class ResultColumnsEntry(NamedTuple):
+    """Tracks a column expression that is expected to be represented
+    in the result rows for this statement.
+
+    This normally refers to the columns clause of a SELECT statement
+    but may also refer to a RETURNING clause, as well as for dialect-specific
+    emulations.
+
+    """
+
+    keyname: str
+    """string name that's expected in cursor.description"""
+
+    name: str
+    """column name, may be labeled"""
+
+    objects: Tuple[Any, ...]
+    """sequence of objects that should be able to locate this column
+    in a RowMapping.  This is typically string names and aliases
+    as well as Column objects.
+
+    """
+
+    type: TypeEngine[Any]
+    """Datatype to be associated with this column.   This is where
+    the "result processing" logic directly links the compiled statement
+    to the rows that come back from the cursor.
+
+    """
+
+
+class _ResultMapAppender(Protocol):
+    def __call__(
+        self,
+        keyname: str,
+        name: str,
+        objects: Sequence[Any],
+        type_: TypeEngine[Any],
+    ) -> None: ...
+
+
+# integer indexes into ResultColumnsEntry used by cursor.py.
+# some profiling showed integer access faster than named tuple
+RM_RENDERED_NAME: Literal[0] = 0
+RM_NAME: Literal[1] = 1
+RM_OBJECTS: Literal[2] = 2
+RM_TYPE: Literal[3] = 3
+
+
+class _BaseCompilerStackEntry(TypedDict):
+    asfrom_froms: Set[FromClause]
+    correlate_froms: Set[FromClause]
+    selectable: ReturnsRows
+
+
+class _CompilerStackEntry(_BaseCompilerStackEntry, total=False):
+    compile_state: CompileState
+    need_result_map_for_nested: bool
+    need_result_map_for_compound: bool
+    select_0: ReturnsRows
+    insert_from_select: Select[Any]
+
+
+class ExpandedState(NamedTuple):
+    """represents state to use when producing "expanded" and
+    "post compile" bound parameters for a statement.
+
+    "expanded" parameters are parameters that are generated at
+    statement execution time to suit a number of parameters passed, the most
+    prominent example being the individual elements inside of an IN expression.
+
+    "post compile" parameters are parameters where the SQL literal value
+    will be rendered into the SQL statement at execution time, rather than
+    being passed as separate parameters to the driver.
+
+    To create an :class:`.ExpandedState` instance, use the
+    :meth:`.SQLCompiler.construct_expanded_state` method on any
+    :class:`.SQLCompiler` instance.
+
+    """
+
+    statement: str
+    """String SQL statement with parameters fully expanded"""
+
+    parameters: _CoreSingleExecuteParams
+    """Parameter dictionary with parameters fully expanded.
+
+    For a statement that uses named parameters, this dictionary will map
+    exactly to the names in the statement.  For a statement that uses
+    positional parameters, the :attr:`.ExpandedState.positional_parameters`
+    will yield a tuple with the positional parameter set.
+
+    """
+
+    processors: Mapping[str, _BindProcessorType[Any]]
+    """mapping of bound value processors"""
+
+    positiontup: Optional[Sequence[str]]
+    """Sequence of string names indicating the order of positional
+    parameters"""
+
+    parameter_expansion: Mapping[str, List[str]]
+    """Mapping representing the intermediary link from original parameter
+    name to list of "expanded" parameter names, for those parameters that
+    were expanded."""
+
+    @property
+    def positional_parameters(self) -> Tuple[Any, ...]:
+        """Tuple of positional parameters, for statements that were compiled
+        using a positional paramstyle.
+
+        """
+        if self.positiontup is None:
+            raise exc.InvalidRequestError(
+                "statement does not use a positional paramstyle"
+            )
+        return tuple(self.parameters[key] for key in self.positiontup)
+
+    @property
+    def additional_parameters(self) -> _CoreSingleExecuteParams:
+        """synonym for :attr:`.ExpandedState.parameters`."""
+        return self.parameters
+
+
+class _InsertManyValues(NamedTuple):
+    """represents state to use for executing an "insertmanyvalues" statement.
+
+    The primary consumers of this object are the
+    :meth:`.SQLCompiler._deliver_insertmanyvalues_batches` and
+    :meth:`.DefaultDialect._deliver_insertmanyvalues_batches` methods.
+
+    .. versionadded:: 2.0
+
+    """
+
+    is_default_expr: bool
+    """if True, the statement is of the form
+    ``INSERT INTO TABLE DEFAULT VALUES``, and can't be rewritten as a "batch"
+
+    """
+
+    single_values_expr: str
+    """The rendered "values" clause of the INSERT statement.
+
+    This is typically the parenthesized section e.g. "(?, ?, ?)" or similar.
+    The insertmanyvalues logic uses this string as a search and replace
+    target.
+
+    """
+
+    insert_crud_params: List[crud._CrudParamElementStr]
+    """List of Column / bind names etc. used while rewriting the statement"""
+
+    num_positional_params_counted: int
+    """the number of bound parameters in a single-row statement.
+
+    This count may be larger or smaller than the actual number of columns
+    targeted in the INSERT, as it accommodates for SQL expressions
+    in the values list that may have zero or more parameters embedded
+    within them.
+
+    This count is part of what's used to organize rewritten parameter lists
+    when batching.
+
+    """
+
+    sort_by_parameter_order: bool = False
+    """if the deterministic_returnined_order parameter were used on the
+    insert.
+
+    All of the attributes following this will only be used if this is True.
+
+    """
+
+    includes_upsert_behaviors: bool = False
+    """if True, we have to accommodate for upsert behaviors.
+
+    This will in some cases downgrade "insertmanyvalues" that requests
+    deterministic ordering.
+
+    """
+
+    sentinel_columns: Optional[Sequence[Column[Any]]] = None
+    """List of sentinel columns that were located.
+
+    This list is only here if the INSERT asked for
+    sort_by_parameter_order=True,
+    and dialect-appropriate sentinel columns were located.
+
+    .. versionadded:: 2.0.10
+
+    """
+
+    num_sentinel_columns: int = 0
+    """how many sentinel columns are in the above list, if any.
+
+    This is the same as
+    ``len(sentinel_columns) if sentinel_columns is not None else 0``
+
+    """
+
+    sentinel_param_keys: Optional[Sequence[str]] = None
+    """parameter str keys in each param dictionary / tuple
+    that would link to the client side "sentinel" values for that row, which
+    we can use to match up parameter sets to result rows.
+
+    This is only present if sentinel_columns is present and the INSERT
+    statement actually refers to client side values for these sentinel
+    columns.
+
+    .. versionadded:: 2.0.10
+
+    .. versionchanged:: 2.0.29 - the sequence is now string dictionary keys
+       only, used against the "compiled parameteters" collection before
+       the parameters were converted by bound parameter processors
+
+    """
+
+    implicit_sentinel: bool = False
+    """if True, we have exactly one sentinel column and it uses a server side
+    value, currently has to generate an incrementing integer value.
+
+    The dialect in question would have asserted that it supports receiving
+    these values back and sorting on that value as a means of guaranteeing
+    correlation with the incoming parameter list.
+
+    .. versionadded:: 2.0.10
+
+    """
+
+    embed_values_counter: bool = False
+    """Whether to embed an incrementing integer counter in each parameter
+    set within the VALUES clause as parameters are batched over.
+
+    This is only used for a specific INSERT..SELECT..VALUES..RETURNING syntax
+    where a subquery is used to produce value tuples.  Current support
+    includes PostgreSQL, Microsoft SQL Server.
+
+    .. versionadded:: 2.0.10
+
+    """
+
+
+class _InsertManyValuesBatch(NamedTuple):
+    """represents an individual batch SQL statement for insertmanyvalues.
+
+    This is passed through the
+    :meth:`.SQLCompiler._deliver_insertmanyvalues_batches` and
+    :meth:`.DefaultDialect._deliver_insertmanyvalues_batches` methods out
+    to the :class:`.Connection` within the
+    :meth:`.Connection._exec_insertmany_context` method.
+
+    .. versionadded:: 2.0.10
+
+    """
+
+    replaced_statement: str
+    replaced_parameters: _DBAPIAnyExecuteParams
+    processed_setinputsizes: Optional[_GenericSetInputSizesType]
+    batch: Sequence[_DBAPISingleExecuteParams]
+    sentinel_values: Sequence[Tuple[Any, ...]]
+    current_batch_size: int
+    batchnum: int
+    total_batches: int
+    rows_sorted: bool
+    is_downgraded: bool
+
+
+class InsertmanyvaluesSentinelOpts(FastIntFlag):
+    """bitflag enum indicating styles of PK defaults
+    which can work as implicit sentinel columns
+
+    """
+
+    NOT_SUPPORTED = 1
+    AUTOINCREMENT = 2
+    IDENTITY = 4
+    SEQUENCE = 8
+
+    ANY_AUTOINCREMENT = AUTOINCREMENT | IDENTITY | SEQUENCE
+    _SUPPORTED_OR_NOT = NOT_SUPPORTED | ANY_AUTOINCREMENT
+
+    USE_INSERT_FROM_SELECT = 16
+    RENDER_SELECT_COL_CASTS = 64
+
+
+class CompilerState(IntEnum):
+    COMPILING = 0
+    """statement is present, compilation phase in progress"""
+
+    STRING_APPLIED = 1
+    """statement is present, string form of the statement has been applied.
+
+    Additional processors by subclasses may still be pending.
+
+    """
+
+    NO_STATEMENT = 2
+    """compiler does not have a statement to compile, is used
+    for method access"""
+
+
+class Linting(IntEnum):
+    """represent preferences for the 'SQL linting' feature.
+
+    this feature currently includes support for flagging cartesian products
+    in SQL statements.
+
+    """
+
+    NO_LINTING = 0
+    "Disable all linting."
+
+    COLLECT_CARTESIAN_PRODUCTS = 1
+    """Collect data on FROMs and cartesian products and gather into
+    'self.from_linter'"""
+
+    WARN_LINTING = 2
+    "Emit warnings for linters that find problems"
+
+    FROM_LINTING = COLLECT_CARTESIAN_PRODUCTS | WARN_LINTING
+    """Warn for cartesian products; combines COLLECT_CARTESIAN_PRODUCTS
+    and WARN_LINTING"""
+
+
+NO_LINTING, COLLECT_CARTESIAN_PRODUCTS, WARN_LINTING, FROM_LINTING = tuple(
+    Linting
+)
+
+
+class FromLinter(collections.namedtuple("FromLinter", ["froms", "edges"])):
+    """represents current state for the "cartesian product" detection
+    feature."""
+
+    def lint(self, start=None):
+        froms = self.froms
+        if not froms:
+            return None, None
+
+        edges = set(self.edges)
+        the_rest = set(froms)
+
+        if start is not None:
+            start_with = start
+            the_rest.remove(start_with)
+        else:
+            start_with = the_rest.pop()
+
+        stack = collections.deque([start_with])
+
+        while stack and the_rest:
+            node = stack.popleft()
+            the_rest.discard(node)
+
+            # comparison of nodes in edges here is based on hash equality, as
+            # there are "annotated" elements that match the non-annotated ones.
+            #   to remove the need for in-python hash() calls, use native
+            # containment routines (e.g. "node in edge", "edge.index(node)")
+            to_remove = {edge for edge in edges if node in edge}
+
+            # appendleft the node in each edge that is not
+            # the one that matched.
+            stack.extendleft(edge[not edge.index(node)] for edge in to_remove)
+            edges.difference_update(to_remove)
+
+        # FROMS left over?  boom
+        if the_rest:
+            return the_rest, start_with
+        else:
+            return None, None
+
+    def warn(self, stmt_type="SELECT"):
+        the_rest, start_with = self.lint()
+
+        # FROMS left over?  boom
+        if the_rest:
+            froms = the_rest
+            if froms:
+                template = (
+                    "{stmt_type} statement has a cartesian product between "
+                    "FROM element(s) {froms} and "
+                    'FROM element "{start}".  Apply join condition(s) '
+                    "between each element to resolve."
+                )
+                froms_str = ", ".join(
+                    f'"{self.froms[from_]}"' for from_ in froms
+                )
+                message = template.format(
+                    stmt_type=stmt_type,
+                    froms=froms_str,
+                    start=self.froms[start_with],
+                )
+
+                util.warn(message)
+
+
+class Compiled:
+    """Represent a compiled SQL or DDL expression.
+
+    The ``__str__`` method of the ``Compiled`` object should produce
+    the actual text of the statement.  ``Compiled`` objects are
+    specific to their underlying database dialect, and also may
+    or may not be specific to the columns referenced within a
+    particular set of bind parameters.  In no case should the
+    ``Compiled`` object be dependent on the actual values of those
+    bind parameters, even though it may reference those values as
+    defaults.
+    """
+
+    statement: Optional[ClauseElement] = None
+    "The statement to compile."
+    string: str = ""
+    "The string representation of the ``statement``"
+
+    state: CompilerState
+    """description of the compiler's state"""
+
+    is_sql = False
+    is_ddl = False
+
+    _cached_metadata: Optional[CursorResultMetaData] = None
+
+    _result_columns: Optional[List[ResultColumnsEntry]] = None
+
+    schema_translate_map: Optional[SchemaTranslateMapType] = None
+
+    execution_options: _ExecuteOptions = util.EMPTY_DICT
+    """
+    Execution options propagated from the statement.   In some cases,
+    sub-elements of the statement can modify these.
+    """
+
+    preparer: IdentifierPreparer
+
+    _annotations: _AnnotationDict = util.EMPTY_DICT
+
+    compile_state: Optional[CompileState] = None
+    """Optional :class:`.CompileState` object that maintains additional
+    state used by the compiler.
+
+    Major executable objects such as :class:`_expression.Insert`,
+    :class:`_expression.Update`, :class:`_expression.Delete`,
+    :class:`_expression.Select` will generate this
+    state when compiled in order to calculate additional information about the
+    object.   For the top level object that is to be executed, the state can be
+    stored here where it can also have applicability towards result set
+    processing.
+
+    .. versionadded:: 1.4
+
+    """
+
+    dml_compile_state: Optional[CompileState] = None
+    """Optional :class:`.CompileState` assigned at the same point that
+    .isinsert, .isupdate, or .isdelete is assigned.
+
+    This will normally be the same object as .compile_state, with the
+    exception of cases like the :class:`.ORMFromStatementCompileState`
+    object.
+
+    .. versionadded:: 1.4.40
+
+    """
+
+    cache_key: Optional[CacheKey] = None
+    """The :class:`.CacheKey` that was generated ahead of creating this
+    :class:`.Compiled` object.
+
+    This is used for routines that need access to the original
+    :class:`.CacheKey` instance generated when the :class:`.Compiled`
+    instance was first cached, typically in order to reconcile
+    the original list of :class:`.BindParameter` objects with a
+    per-statement list that's generated on each call.
+
+    """
+
+    _gen_time: float
+    """Generation time of this :class:`.Compiled`, used for reporting
+    cache stats."""
+
+    def __init__(
+        self,
+        dialect: Dialect,
+        statement: Optional[ClauseElement],
+        schema_translate_map: Optional[SchemaTranslateMapType] = None,
+        render_schema_translate: bool = False,
+        compile_kwargs: Mapping[str, Any] = util.immutabledict(),
+    ):
+        """Construct a new :class:`.Compiled` object.
+
+        :param dialect: :class:`.Dialect` to compile against.
+
+        :param statement: :class:`_expression.ClauseElement` to be compiled.
+
+        :param schema_translate_map: dictionary of schema names to be
+         translated when forming the resultant SQL
+
+         .. seealso::
+
+            :ref:`schema_translating`
+
+        :param compile_kwargs: additional kwargs that will be
+         passed to the initial call to :meth:`.Compiled.process`.
+
+
+        """
+        self.dialect = dialect
+        self.preparer = self.dialect.identifier_preparer
+        if schema_translate_map:
+            self.schema_translate_map = schema_translate_map
+            self.preparer = self.preparer._with_schema_translate(
+                schema_translate_map
+            )
+
+        if statement is not None:
+            self.state = CompilerState.COMPILING
+            self.statement = statement
+            self.can_execute = statement.supports_execution
+            self._annotations = statement._annotations
+            if self.can_execute:
+                if TYPE_CHECKING:
+                    assert isinstance(statement, Executable)
+                self.execution_options = statement._execution_options
+            self.string = self.process(self.statement, **compile_kwargs)
+
+            if render_schema_translate:
+                self.string = self.preparer._render_schema_translates(
+                    self.string, schema_translate_map
+                )
+
+            self.state = CompilerState.STRING_APPLIED
+        else:
+            self.state = CompilerState.NO_STATEMENT
+
+        self._gen_time = perf_counter()
+
+    def __init_subclass__(cls) -> None:
+        cls._init_compiler_cls()
+        return super().__init_subclass__()
+
+    @classmethod
+    def _init_compiler_cls(cls):
+        pass
+
+    def _execute_on_connection(
+        self, connection, distilled_params, execution_options
+    ):
+        if self.can_execute:
+            return connection._execute_compiled(
+                self, distilled_params, execution_options
+            )
+        else:
+            raise exc.ObjectNotExecutableError(self.statement)
+
+    def visit_unsupported_compilation(self, element, err, **kw):
+        raise exc.UnsupportedCompilationError(self, type(element)) from err
+
+    @property
+    def sql_compiler(self):
+        """Return a Compiled that is capable of processing SQL expressions.
+
+        If this compiler is one, it would likely just return 'self'.
+
+        """
+
+        raise NotImplementedError()
+
+    def process(self, obj: Visitable, **kwargs: Any) -> str:
+        return obj._compiler_dispatch(self, **kwargs)
+
+    def __str__(self) -> str:
+        """Return the string text of the generated SQL or DDL."""
+
+        if self.state is CompilerState.STRING_APPLIED:
+            return self.string
+        else:
+            return ""
+
+    def construct_params(
+        self,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None,
+        escape_names: bool = True,
+    ) -> Optional[_MutableCoreSingleExecuteParams]:
+        """Return the bind params for this compiled object.
+
+        :param params: a dict of string/object pairs whose values will
+                       override bind values compiled in to the
+                       statement.
+        """
+
+        raise NotImplementedError()
+
+    @property
+    def params(self):
+        """Return the bind params for this compiled object."""
+        return self.construct_params()
+
+
+class TypeCompiler(util.EnsureKWArg):
+    """Produces DDL specification for TypeEngine objects."""
+
+    ensure_kwarg = r"visit_\w+"
+
+    def __init__(self, dialect: Dialect):
+        self.dialect = dialect
+
+    def process(self, type_: TypeEngine[Any], **kw: Any) -> str:
+        if (
+            type_._variant_mapping
+            and self.dialect.name in type_._variant_mapping
+        ):
+            type_ = type_._variant_mapping[self.dialect.name]
+        return type_._compiler_dispatch(self, **kw)
+
+    def visit_unsupported_compilation(
+        self, element: Any, err: Exception, **kw: Any
+    ) -> NoReturn:
+        raise exc.UnsupportedCompilationError(self, element) from err
+
+
+# this was a Visitable, but to allow accurate detection of
+# column elements this is actually a column element
+class _CompileLabel(
+    roles.BinaryElementRole[Any], elements.CompilerColumnElement
+):
+    """lightweight label object which acts as an expression.Label."""
+
+    __visit_name__ = "label"
+    __slots__ = "element", "name", "_alt_names"
+
+    def __init__(self, col, name, alt_names=()):
+        self.element = col
+        self.name = name
+        self._alt_names = (col,) + alt_names
+
+    @property
+    def proxy_set(self):
+        return self.element.proxy_set
+
+    @property
+    def type(self):
+        return self.element.type
+
+    def self_group(self, **kw):
+        return self
+
+
+class ilike_case_insensitive(
+    roles.BinaryElementRole[Any], elements.CompilerColumnElement
+):
+    """produce a wrapping element for a case-insensitive portion of
+    an ILIKE construct.
+
+    The construct usually renders the ``lower()`` function, but on
+    PostgreSQL will pass silently with the assumption that "ILIKE"
+    is being used.
+
+    .. versionadded:: 2.0
+
+    """
+
+    __visit_name__ = "ilike_case_insensitive_operand"
+    __slots__ = "element", "comparator"
+
+    def __init__(self, element):
+        self.element = element
+        self.comparator = element.comparator
+
+    @property
+    def proxy_set(self):
+        return self.element.proxy_set
+
+    @property
+    def type(self):
+        return self.element.type
+
+    def self_group(self, **kw):
+        return self
+
+    def _with_binary_element_type(self, type_):
+        return ilike_case_insensitive(
+            self.element._with_binary_element_type(type_)
+        )
+
+
+class SQLCompiler(Compiled):
+    """Default implementation of :class:`.Compiled`.
+
+    Compiles :class:`_expression.ClauseElement` objects into SQL strings.
+
+    """
+
+    extract_map = EXTRACT_MAP
+
+    bindname_escape_characters: ClassVar[Mapping[str, str]] = (
+        util.immutabledict(
+            {
+                "%": "P",
+                "(": "A",
+                ")": "Z",
+                ":": "C",
+                ".": "_",
+                "[": "_",
+                "]": "_",
+                " ": "_",
+            }
+        )
+    )
+    """A mapping (e.g. dict or similar) containing a lookup of
+    characters keyed to replacement characters which will be applied to all
+    'bind names' used in SQL statements as a form of 'escaping'; the given
+    characters are replaced entirely with the 'replacement' character when
+    rendered in the SQL statement, and a similar translation is performed
+    on the incoming names used in parameter dictionaries passed to methods
+    like :meth:`_engine.Connection.execute`.
+
+    This allows bound parameter names used in :func:`_sql.bindparam` and
+    other constructs to have any arbitrary characters present without any
+    concern for characters that aren't allowed at all on the target database.
+
+    Third party dialects can establish their own dictionary here to replace the
+    default mapping, which will ensure that the particular characters in the
+    mapping will never appear in a bound parameter name.
+
+    The dictionary is evaluated at **class creation time**, so cannot be
+    modified at runtime; it must be present on the class when the class
+    is first declared.
+
+    Note that for dialects that have additional bound parameter rules such
+    as additional restrictions on leading characters, the
+    :meth:`_sql.SQLCompiler.bindparam_string` method may need to be augmented.
+    See the cx_Oracle compiler for an example of this.
+
+    .. versionadded:: 2.0.0rc1
+
+    """
+
+    _bind_translate_re: ClassVar[Pattern[str]]
+    _bind_translate_chars: ClassVar[Mapping[str, str]]
+
+    is_sql = True
+
+    compound_keywords = COMPOUND_KEYWORDS
+
+    isdelete: bool = False
+    isinsert: bool = False
+    isupdate: bool = False
+    """class-level defaults which can be set at the instance
+    level to define if this Compiled instance represents
+    INSERT/UPDATE/DELETE
+    """
+
+    postfetch: Optional[List[Column[Any]]]
+    """list of columns that can be post-fetched after INSERT or UPDATE to
+    receive server-updated values"""
+
+    insert_prefetch: Sequence[Column[Any]] = ()
+    """list of columns for which default values should be evaluated before
+    an INSERT takes place"""
+
+    update_prefetch: Sequence[Column[Any]] = ()
+    """list of columns for which onupdate default values should be evaluated
+    before an UPDATE takes place"""
+
+    implicit_returning: Optional[Sequence[ColumnElement[Any]]] = None
+    """list of "implicit" returning columns for a toplevel INSERT or UPDATE
+    statement, used to receive newly generated values of columns.
+
+    .. versionadded:: 2.0  ``implicit_returning`` replaces the previous
+       ``returning`` collection, which was not a generalized RETURNING
+       collection and instead was in fact specific to the "implicit returning"
+       feature.
+
+    """
+
+    isplaintext: bool = False
+
+    binds: Dict[str, BindParameter[Any]]
+    """a dictionary of bind parameter keys to BindParameter instances."""
+
+    bind_names: Dict[BindParameter[Any], str]
+    """a dictionary of BindParameter instances to "compiled" names
+    that are actually present in the generated SQL"""
+
+    stack: List[_CompilerStackEntry]
+    """major statements such as SELECT, INSERT, UPDATE, DELETE are
+    tracked in this stack using an entry format."""
+
+    returning_precedes_values: bool = False
+    """set to True classwide to generate RETURNING
+    clauses before the VALUES or WHERE clause (i.e. MSSQL)
+    """
+
+    render_table_with_column_in_update_from: bool = False
+    """set to True classwide to indicate the SET clause
+    in a multi-table UPDATE statement should qualify
+    columns with the table name (i.e. MySQL only)
+    """
+
+    ansi_bind_rules: bool = False
+    """SQL 92 doesn't allow bind parameters to be used
+    in the columns clause of a SELECT, nor does it allow
+    ambiguous expressions like "? = ?".  A compiler
+    subclass can set this flag to False if the target
+    driver/DB enforces this
+    """
+
+    bindtemplate: str
+    """template to render bound parameters based on paramstyle."""
+
+    compilation_bindtemplate: str
+    """template used by compiler to render parameters before positional
+    paramstyle application"""
+
+    _numeric_binds_identifier_char: str
+    """Character that's used to as the identifier of a numerical bind param.
+    For example if this char is set to ``$``, numerical binds will be rendered
+    in the form ``$1, $2, $3``.
+    """
+
+    _result_columns: List[ResultColumnsEntry]
+    """relates label names in the final SQL to a tuple of local
+    column/label name, ColumnElement object (if any) and
+    TypeEngine. CursorResult uses this for type processing and
+    column targeting"""
+
+    _textual_ordered_columns: bool = False
+    """tell the result object that the column names as rendered are important,
+    but they are also "ordered" vs. what is in the compiled object here.
+
+    As of 1.4.42 this condition is only present when the statement is a
+    TextualSelect, e.g. text("....").columns(...), where it is required
+    that the columns are considered positionally and not by name.
+
+    """
+
+    _ad_hoc_textual: bool = False
+    """tell the result that we encountered text() or '*' constructs in the
+    middle of the result columns, but we also have compiled columns, so
+    if the number of columns in cursor.description does not match how many
+    expressions we have, that means we can't rely on positional at all and
+    should match on name.
+
+    """
+
+    _ordered_columns: bool = True
+    """
+    if False, means we can't be sure the list of entries
+    in _result_columns is actually the rendered order.  Usually
+    True unless using an unordered TextualSelect.
+    """
+
+    _loose_column_name_matching: bool = False
+    """tell the result object that the SQL statement is textual, wants to match
+    up to Column objects, and may be using the ._tq_label in the SELECT rather
+    than the base name.
+
+    """
+
+    _numeric_binds: bool = False
+    """
+    True if paramstyle is "numeric".  This paramstyle is trickier than
+    all the others.
+
+    """
+
+    _render_postcompile: bool = False
+    """
+    whether to render out POSTCOMPILE params during the compile phase.
+
+    This attribute is used only for end-user invocation of stmt.compile();
+    it's never used for actual statement execution, where instead the
+    dialect internals access and render the internal postcompile structure
+    directly.
+
+    """
+
+    _post_compile_expanded_state: Optional[ExpandedState] = None
+    """When render_postcompile is used, the ``ExpandedState`` used to create
+    the "expanded" SQL is assigned here, and then used by the ``.params``
+    accessor and ``.construct_params()`` methods for their return values.
+
+    .. versionadded:: 2.0.0rc1
+
+    """
+
+    _pre_expanded_string: Optional[str] = None
+    """Stores the original string SQL before 'post_compile' is applied,
+    for cases where 'post_compile' were used.
+
+    """
+
+    _pre_expanded_positiontup: Optional[List[str]] = None
+
+    _insertmanyvalues: Optional[_InsertManyValues] = None
+
+    _insert_crud_params: Optional[crud._CrudParamSequence] = None
+
+    literal_execute_params: FrozenSet[BindParameter[Any]] = frozenset()
+    """bindparameter objects that are rendered as literal values at statement
+    execution time.
+
+    """
+
+    post_compile_params: FrozenSet[BindParameter[Any]] = frozenset()
+    """bindparameter objects that are rendered as bound parameter placeholders
+    at statement execution time.
+
+    """
+
+    escaped_bind_names: util.immutabledict[str, str] = util.EMPTY_DICT
+    """Late escaping of bound parameter names that has to be converted
+    to the original name when looking in the parameter dictionary.
+
+    """
+
+    has_out_parameters = False
+    """if True, there are bindparam() objects that have the isoutparam
+    flag set."""
+
+    postfetch_lastrowid = False
+    """if True, and this in insert, use cursor.lastrowid to populate
+    result.inserted_primary_key. """
+
+    _cache_key_bind_match: Optional[
+        Tuple[
+            Dict[
+                BindParameter[Any],
+                List[BindParameter[Any]],
+            ],
+            Dict[
+                str,
+                BindParameter[Any],
+            ],
+        ]
+    ] = None
+    """a mapping that will relate the BindParameter object we compile
+    to those that are part of the extracted collection of parameters
+    in the cache key, if we were given a cache key.
+
+    """
+
+    positiontup: Optional[List[str]] = None
+    """for a compiled construct that uses a positional paramstyle, will be
+    a sequence of strings, indicating the names of bound parameters in order.
+
+    This is used in order to render bound parameters in their correct order,
+    and is combined with the :attr:`_sql.Compiled.params` dictionary to
+    render parameters.
+
+    This sequence always contains the unescaped name of the parameters.
+
+    .. seealso::
+
+        :ref:`faq_sql_expression_string` - includes a usage example for
+        debugging use cases.
+
+    """
+    _values_bindparam: Optional[List[str]] = None
+
+    _visited_bindparam: Optional[List[str]] = None
+
+    inline: bool = False
+
+    ctes: Optional[MutableMapping[CTE, str]]
+
+    # Detect same CTE references - Dict[(level, name), cte]
+    # Level is required for supporting nesting
+    ctes_by_level_name: Dict[Tuple[int, str], CTE]
+
+    # To retrieve key/level in ctes_by_level_name -
+    # Dict[cte_reference, (level, cte_name, cte_opts)]
+    level_name_by_cte: Dict[CTE, Tuple[int, str, selectable._CTEOpts]]
+
+    ctes_recursive: bool
+
+    _post_compile_pattern = re.compile(r"__\[POSTCOMPILE_(\S+?)(~~.+?~~)?\]")
+    _pyformat_pattern = re.compile(r"%\(([^)]+?)\)s")
+    _positional_pattern = re.compile(
+        f"{_pyformat_pattern.pattern}|{_post_compile_pattern.pattern}"
+    )
+
+    @classmethod
+    def _init_compiler_cls(cls):
+        cls._init_bind_translate()
+
+    @classmethod
+    def _init_bind_translate(cls):
+        reg = re.escape("".join(cls.bindname_escape_characters))
+        cls._bind_translate_re = re.compile(f"[{reg}]")
+        cls._bind_translate_chars = cls.bindname_escape_characters
+
+    def __init__(
+        self,
+        dialect: Dialect,
+        statement: Optional[ClauseElement],
+        cache_key: Optional[CacheKey] = None,
+        column_keys: Optional[Sequence[str]] = None,
+        for_executemany: bool = False,
+        linting: Linting = NO_LINTING,
+        _supporting_against: Optional[SQLCompiler] = None,
+        **kwargs: Any,
+    ):
+        """Construct a new :class:`.SQLCompiler` object.
+
+        :param dialect: :class:`.Dialect` to be used
+
+        :param statement: :class:`_expression.ClauseElement` to be compiled
+
+        :param column_keys:  a list of column names to be compiled into an
+         INSERT or UPDATE statement.
+
+        :param for_executemany: whether INSERT / UPDATE statements should
+         expect that they are to be invoked in an "executemany" style,
+         which may impact how the statement will be expected to return the
+         values of defaults and autoincrement / sequences and similar.
+         Depending on the backend and driver in use, support for retrieving
+         these values may be disabled which means SQL expressions may
+         be rendered inline, RETURNING may not be rendered, etc.
+
+        :param kwargs: additional keyword arguments to be consumed by the
+         superclass.
+
+        """
+        self.column_keys = column_keys
+
+        self.cache_key = cache_key
+
+        if cache_key:
+            cksm = {b.key: b for b in cache_key[1]}
+            ckbm = {b: [b] for b in cache_key[1]}
+            self._cache_key_bind_match = (ckbm, cksm)
+
+        # compile INSERT/UPDATE defaults/sequences to expect executemany
+        # style execution, which may mean no pre-execute of defaults,
+        # or no RETURNING
+        self.for_executemany = for_executemany
+
+        self.linting = linting
+
+        # a dictionary of bind parameter keys to BindParameter
+        # instances.
+        self.binds = {}
+
+        # a dictionary of BindParameter instances to "compiled" names
+        # that are actually present in the generated SQL
+        self.bind_names = util.column_dict()
+
+        # stack which keeps track of nested SELECT statements
+        self.stack = []
+
+        self._result_columns = []
+
+        # true if the paramstyle is positional
+        self.positional = dialect.positional
+        if self.positional:
+            self._numeric_binds = nb = dialect.paramstyle.startswith("numeric")
+            if nb:
+                self._numeric_binds_identifier_char = (
+                    "$" if dialect.paramstyle == "numeric_dollar" else ":"
+                )
+
+            self.compilation_bindtemplate = _pyformat_template
+        else:
+            self.compilation_bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
+
+        self.ctes = None
+
+        self.label_length = (
+            dialect.label_length or dialect.max_identifier_length
+        )
+
+        # a map which tracks "anonymous" identifiers that are created on
+        # the fly here
+        self.anon_map = prefix_anon_map()
+
+        # a map which tracks "truncated" names based on
+        # dialect.label_length or dialect.max_identifier_length
+        self.truncated_names: Dict[Tuple[str, str], str] = {}
+        self._truncated_counters: Dict[str, int] = {}
+
+        Compiled.__init__(self, dialect, statement, **kwargs)
+
+        if self.isinsert or self.isupdate or self.isdelete:
+            if TYPE_CHECKING:
+                assert isinstance(statement, UpdateBase)
+
+            if self.isinsert or self.isupdate:
+                if TYPE_CHECKING:
+                    assert isinstance(statement, ValuesBase)
+                if statement._inline:
+                    self.inline = True
+                elif self.for_executemany and (
+                    not self.isinsert
+                    or (
+                        self.dialect.insert_executemany_returning
+                        and statement._return_defaults
+                    )
+                ):
+                    self.inline = True
+
+        self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
+
+        if _supporting_against:
+            self.__dict__.update(
+                {
+                    k: v
+                    for k, v in _supporting_against.__dict__.items()
+                    if k
+                    not in {
+                        "state",
+                        "dialect",
+                        "preparer",
+                        "positional",
+                        "_numeric_binds",
+                        "compilation_bindtemplate",
+                        "bindtemplate",
+                    }
+                }
+            )
+
+        if self.state is CompilerState.STRING_APPLIED:
+            if self.positional:
+                if self._numeric_binds:
+                    self._process_numeric()
+                else:
+                    self._process_positional()
+
+            if self._render_postcompile:
+                parameters = self.construct_params(
+                    escape_names=False,
+                    _no_postcompile=True,
+                )
+
+                self._process_parameters_for_postcompile(
+                    parameters, _populate_self=True
+                )
+
+    @property
+    def insert_single_values_expr(self) -> Optional[str]:
+        """When an INSERT is compiled with a single set of parameters inside
+        a VALUES expression, the string is assigned here, where it can be
+        used for insert batching schemes to rewrite the VALUES expression.
+
+        .. versionadded:: 1.3.8
+
+        .. versionchanged:: 2.0 This collection is no longer used by
+           SQLAlchemy's built-in dialects, in favor of the currently
+           internal ``_insertmanyvalues`` collection that is used only by
+           :class:`.SQLCompiler`.
+
+        """
+        if self._insertmanyvalues is None:
+            return None
+        else:
+            return self._insertmanyvalues.single_values_expr
+
+    @util.ro_memoized_property
+    def effective_returning(self) -> Optional[Sequence[ColumnElement[Any]]]:
+        """The effective "returning" columns for INSERT, UPDATE or DELETE.
+
+        This is either the so-called "implicit returning" columns which are
+        calculated by the compiler on the fly, or those present based on what's
+        present in ``self.statement._returning`` (expanded into individual
+        columns using the ``._all_selected_columns`` attribute) i.e. those set
+        explicitly using the :meth:`.UpdateBase.returning` method.
+
+        .. versionadded:: 2.0
+
+        """
+        if self.implicit_returning:
+            return self.implicit_returning
+        elif self.statement is not None and is_dml(self.statement):
+            return [
+                c
+                for c in self.statement._all_selected_columns
+                if is_column_element(c)
+            ]
+
+        else:
+            return None
+
+    @property
+    def returning(self):
+        """backwards compatibility; returns the
+        effective_returning collection.
+
+        """
+        return self.effective_returning
+
+    @property
+    def current_executable(self):
+        """Return the current 'executable' that is being compiled.
+
+        This is currently the :class:`_sql.Select`, :class:`_sql.Insert`,
+        :class:`_sql.Update`, :class:`_sql.Delete`,
+        :class:`_sql.CompoundSelect` object that is being compiled.
+        Specifically it's assigned to the ``self.stack`` list of elements.
+
+        When a statement like the above is being compiled, it normally
+        is also assigned to the ``.statement`` attribute of the
+        :class:`_sql.Compiler` object.   However, all SQL constructs are
+        ultimately nestable, and this attribute should never be consulted
+        by a ``visit_`` method, as it is not guaranteed to be assigned
+        nor guaranteed to correspond to the current statement being compiled.
+
+        .. versionadded:: 1.3.21
+
+            For compatibility with previous versions, use the following
+            recipe::
+
+                statement = getattr(self, "current_executable", False)
+                if statement is False:
+                    statement = self.stack[-1]["selectable"]
+
+            For versions 1.4 and above, ensure only .current_executable
+            is used; the format of "self.stack" may change.
+
+
+        """
+        try:
+            return self.stack[-1]["selectable"]
+        except IndexError as ie:
+            raise IndexError("Compiler does not have a stack entry") from ie
+
+    @property
+    def prefetch(self):
+        return list(self.insert_prefetch) + list(self.update_prefetch)
+
+    @util.memoized_property
+    def _global_attributes(self) -> Dict[Any, Any]:
+        return {}
+
+    @util.memoized_instancemethod
+    def _init_cte_state(self) -> MutableMapping[CTE, str]:
+        """Initialize collections related to CTEs only if
+        a CTE is located, to save on the overhead of
+        these collections otherwise.
+
+        """
+        # collect CTEs to tack on top of a SELECT
+        # To store the query to print - Dict[cte, text_query]
+        ctes: MutableMapping[CTE, str] = util.OrderedDict()
+        self.ctes = ctes
+
+        # Detect same CTE references - Dict[(level, name), cte]
+        # Level is required for supporting nesting
+        self.ctes_by_level_name = {}
+
+        # To retrieve key/level in ctes_by_level_name -
+        # Dict[cte_reference, (level, cte_name, cte_opts)]
+        self.level_name_by_cte = {}
+
+        self.ctes_recursive = False
+
+        return ctes
+
+    @contextlib.contextmanager
+    def _nested_result(self):
+        """special API to support the use case of 'nested result sets'"""
+        result_columns, ordered_columns = (
+            self._result_columns,
+            self._ordered_columns,
+        )
+        self._result_columns, self._ordered_columns = [], False
+
+        try:
+            if self.stack:
+                entry = self.stack[-1]
+                entry["need_result_map_for_nested"] = True
+            else:
+                entry = None
+            yield self._result_columns, self._ordered_columns
+        finally:
+            if entry:
+                entry.pop("need_result_map_for_nested")
+            self._result_columns, self._ordered_columns = (
+                result_columns,
+                ordered_columns,
+            )
+
+    def _process_positional(self):
+        assert not self.positiontup
+        assert self.state is CompilerState.STRING_APPLIED
+        assert not self._numeric_binds
+
+        if self.dialect.paramstyle == "format":
+            placeholder = "%s"
+        else:
+            assert self.dialect.paramstyle == "qmark"
+            placeholder = "?"
+
+        positions = []
+
+        def find_position(m: re.Match[str]) -> str:
+            normal_bind = m.group(1)
+            if normal_bind:
+                positions.append(normal_bind)
+                return placeholder
+            else:
+                # this a post-compile bind
+                positions.append(m.group(2))
+                return m.group(0)
+
+        self.string = re.sub(
+            self._positional_pattern, find_position, self.string
+        )
+
+        if self.escaped_bind_names:
+            reverse_escape = {v: k for k, v in self.escaped_bind_names.items()}
+            assert len(self.escaped_bind_names) == len(reverse_escape)
+            self.positiontup = [
+                reverse_escape.get(name, name) for name in positions
+            ]
+        else:
+            self.positiontup = positions
+
+        if self._insertmanyvalues:
+            positions = []
+
+            single_values_expr = re.sub(
+                self._positional_pattern,
+                find_position,
+                self._insertmanyvalues.single_values_expr,
+            )
+            insert_crud_params = [
+                (
+                    v[0],
+                    v[1],
+                    re.sub(self._positional_pattern, find_position, v[2]),
+                    v[3],
+                )
+                for v in self._insertmanyvalues.insert_crud_params
+            ]
+
+            self._insertmanyvalues = self._insertmanyvalues._replace(
+                single_values_expr=single_values_expr,
+                insert_crud_params=insert_crud_params,
+            )
+
+    def _process_numeric(self):
+        assert self._numeric_binds
+        assert self.state is CompilerState.STRING_APPLIED
+
+        num = 1
+        param_pos: Dict[str, str] = {}
+        order: Iterable[str]
+        if self._insertmanyvalues and self._values_bindparam is not None:
+            # bindparams that are not in values are always placed first.
+            # this avoids the need of changing them when using executemany
+            # values () ()
+            order = itertools.chain(
+                (
+                    name
+                    for name in self.bind_names.values()
+                    if name not in self._values_bindparam
+                ),
+                self.bind_names.values(),
+            )
+        else:
+            order = self.bind_names.values()
+
+        for bind_name in order:
+            if bind_name in param_pos:
+                continue
+            bind = self.binds[bind_name]
+            if (
+                bind in self.post_compile_params
+                or bind in self.literal_execute_params
+            ):
+                # set to None to just mark the in positiontup, it will not
+                # be replaced below.
+                param_pos[bind_name] = None  # type: ignore
+            else:
+                ph = f"{self._numeric_binds_identifier_char}{num}"
+                num += 1
+                param_pos[bind_name] = ph
+
+        self.next_numeric_pos = num
+
+        self.positiontup = list(param_pos)
+        if self.escaped_bind_names:
+            len_before = len(param_pos)
+            param_pos = {
+                self.escaped_bind_names.get(name, name): pos
+                for name, pos in param_pos.items()
+            }
+            assert len(param_pos) == len_before
+
+        # Can't use format here since % chars are not escaped.
+        self.string = self._pyformat_pattern.sub(
+            lambda m: param_pos[m.group(1)], self.string
+        )
+
+        if self._insertmanyvalues:
+            single_values_expr = (
+                # format is ok here since single_values_expr includes only
+                # place-holders
+                self._insertmanyvalues.single_values_expr
+                % param_pos
+            )
+            insert_crud_params = [
+                (v[0], v[1], "%s", v[3])
+                for v in self._insertmanyvalues.insert_crud_params
+            ]
+
+            self._insertmanyvalues = self._insertmanyvalues._replace(
+                # This has the numbers (:1, :2)
+                single_values_expr=single_values_expr,
+                # The single binds are instead %s so they can be formatted
+                insert_crud_params=insert_crud_params,
+            )
+
+    @util.memoized_property
+    def _bind_processors(
+        self,
+    ) -> MutableMapping[
+        str, Union[_BindProcessorType[Any], Sequence[_BindProcessorType[Any]]]
+    ]:
+        # mypy is not able to see the two value types as the above Union,
+        # it just sees "object".  don't know how to resolve
+        return {
+            key: value  # type: ignore
+            for key, value in (
+                (
+                    self.bind_names[bindparam],
+                    (
+                        bindparam.type._cached_bind_processor(self.dialect)
+                        if not bindparam.type._is_tuple_type
+                        else tuple(
+                            elem_type._cached_bind_processor(self.dialect)
+                            for elem_type in cast(
+                                TupleType, bindparam.type
+                            ).types
+                        )
+                    ),
+                )
+                for bindparam in self.bind_names
+            )
+            if value is not None
+        }
+
+    def is_subquery(self):
+        return len(self.stack) > 1
+
+    @property
+    def sql_compiler(self):
+        return self
+
+    def construct_expanded_state(
+        self,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        escape_names: bool = True,
+    ) -> ExpandedState:
+        """Return a new :class:`.ExpandedState` for a given parameter set.
+
+        For queries that use "expanding" or other late-rendered parameters,
+        this method will provide for both the finalized SQL string as well
+        as the parameters that would be used for a particular parameter set.
+
+        .. versionadded:: 2.0.0rc1
+
+        """
+        parameters = self.construct_params(
+            params,
+            escape_names=escape_names,
+            _no_postcompile=True,
+        )
+        return self._process_parameters_for_postcompile(
+            parameters,
+        )
+
+    def construct_params(
+        self,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None,
+        escape_names: bool = True,
+        _group_number: Optional[int] = None,
+        _check: bool = True,
+        _no_postcompile: bool = False,
+    ) -> _MutableCoreSingleExecuteParams:
+        """return a dictionary of bind parameter keys and values"""
+
+        if self._render_postcompile and not _no_postcompile:
+            assert self._post_compile_expanded_state is not None
+            if not params:
+                return dict(self._post_compile_expanded_state.parameters)
+            else:
+                raise exc.InvalidRequestError(
+                    "can't construct new parameters when render_postcompile "
+                    "is used; the statement is hard-linked to the original "
+                    "parameters.  Use construct_expanded_state to generate a "
+                    "new statement and parameters."
+                )
+
+        has_escaped_names = escape_names and bool(self.escaped_bind_names)
+
+        if extracted_parameters:
+            # related the bound parameters collected in the original cache key
+            # to those collected in the incoming cache key.  They will not have
+            # matching names but they will line up positionally in the same
+            # way.   The parameters present in self.bind_names may be clones of
+            # these original cache key params in the case of DML but the .key
+            # will be guaranteed to match.
+            if self.cache_key is None:
+                raise exc.CompileError(
+                    "This compiled object has no original cache key; "
+                    "can't pass extracted_parameters to construct_params"
+                )
+            else:
+                orig_extracted = self.cache_key[1]
+
+            ckbm_tuple = self._cache_key_bind_match
+            assert ckbm_tuple is not None
+            ckbm, _ = ckbm_tuple
+            resolved_extracted = {
+                bind: extracted
+                for b, extracted in zip(orig_extracted, extracted_parameters)
+                for bind in ckbm[b]
+            }
+        else:
+            resolved_extracted = None
+
+        if params:
+            pd = {}
+            for bindparam, name in self.bind_names.items():
+                escaped_name = (
+                    self.escaped_bind_names.get(name, name)
+                    if has_escaped_names
+                    else name
+                )
+
+                if bindparam.key in params:
+                    pd[escaped_name] = params[bindparam.key]
+                elif name in params:
+                    pd[escaped_name] = params[name]
+
+                elif _check and bindparam.required:
+                    if _group_number:
+                        raise exc.InvalidRequestError(
+                            "A value is required for bind parameter %r, "
+                            "in parameter group %d"
+                            % (bindparam.key, _group_number),
+                            code="cd3x",
+                        )
+                    else:
+                        raise exc.InvalidRequestError(
+                            "A value is required for bind parameter %r"
+                            % bindparam.key,
+                            code="cd3x",
+                        )
+                else:
+                    if resolved_extracted:
+                        value_param = resolved_extracted.get(
+                            bindparam, bindparam
+                        )
+                    else:
+                        value_param = bindparam
+
+                    if bindparam.callable:
+                        pd[escaped_name] = value_param.effective_value
+                    else:
+                        pd[escaped_name] = value_param.value
+            return pd
+        else:
+            pd = {}
+            for bindparam, name in self.bind_names.items():
+                escaped_name = (
+                    self.escaped_bind_names.get(name, name)
+                    if has_escaped_names
+                    else name
+                )
+
+                if _check and bindparam.required:
+                    if _group_number:
+                        raise exc.InvalidRequestError(
+                            "A value is required for bind parameter %r, "
+                            "in parameter group %d"
+                            % (bindparam.key, _group_number),
+                            code="cd3x",
+                        )
+                    else:
+                        raise exc.InvalidRequestError(
+                            "A value is required for bind parameter %r"
+                            % bindparam.key,
+                            code="cd3x",
+                        )
+
+                if resolved_extracted:
+                    value_param = resolved_extracted.get(bindparam, bindparam)
+                else:
+                    value_param = bindparam
+
+                if bindparam.callable:
+                    pd[escaped_name] = value_param.effective_value
+                else:
+                    pd[escaped_name] = value_param.value
+
+            return pd
+
+    @util.memoized_instancemethod
+    def _get_set_input_sizes_lookup(self):
+        dialect = self.dialect
+
+        include_types = dialect.include_set_input_sizes
+        exclude_types = dialect.exclude_set_input_sizes
+
+        dbapi = dialect.dbapi
+
+        def lookup_type(typ):
+            dbtype = typ._unwrapped_dialect_impl(dialect).get_dbapi_type(dbapi)
+
+            if (
+                dbtype is not None
+                and (exclude_types is None or dbtype not in exclude_types)
+                and (include_types is None or dbtype in include_types)
+            ):
+                return dbtype
+            else:
+                return None
+
+        inputsizes = {}
+
+        literal_execute_params = self.literal_execute_params
+
+        for bindparam in self.bind_names:
+            if bindparam in literal_execute_params:
+                continue
+
+            if bindparam.type._is_tuple_type:
+                inputsizes[bindparam] = [
+                    lookup_type(typ)
+                    for typ in cast(TupleType, bindparam.type).types
+                ]
+            else:
+                inputsizes[bindparam] = lookup_type(bindparam.type)
+
+        return inputsizes
+
+    @property
+    def params(self):
+        """Return the bind param dictionary embedded into this
+        compiled object, for those values that are present.
+
+        .. seealso::
+
+            :ref:`faq_sql_expression_string` - includes a usage example for
+            debugging use cases.
+
+        """
+        return self.construct_params(_check=False)
+
+    def _process_parameters_for_postcompile(
+        self,
+        parameters: _MutableCoreSingleExecuteParams,
+        _populate_self: bool = False,
+    ) -> ExpandedState:
+        """handle special post compile parameters.
+
+        These include:
+
+        * "expanding" parameters -typically IN tuples that are rendered
+          on a per-parameter basis for an otherwise fixed SQL statement string.
+
+        * literal_binds compiled with the literal_execute flag.  Used for
+          things like SQL Server "TOP N" where the driver does not accommodate
+          N as a bound parameter.
+
+        """
+
+        expanded_parameters = {}
+        new_positiontup: Optional[List[str]]
+
+        pre_expanded_string = self._pre_expanded_string
+        if pre_expanded_string is None:
+            pre_expanded_string = self.string
+
+        if self.positional:
+            new_positiontup = []
+
+            pre_expanded_positiontup = self._pre_expanded_positiontup
+            if pre_expanded_positiontup is None:
+                pre_expanded_positiontup = self.positiontup
+
+        else:
+            new_positiontup = pre_expanded_positiontup = None
+
+        processors = self._bind_processors
+        single_processors = cast(
+            "Mapping[str, _BindProcessorType[Any]]", processors
+        )
+        tuple_processors = cast(
+            "Mapping[str, Sequence[_BindProcessorType[Any]]]", processors
+        )
+
+        new_processors: Dict[str, _BindProcessorType[Any]] = {}
+
+        replacement_expressions: Dict[str, Any] = {}
+        to_update_sets: Dict[str, Any] = {}
+
+        # notes:
+        # *unescaped* parameter names in:
+        # self.bind_names, self.binds, self._bind_processors, self.positiontup
+        #
+        # *escaped* parameter names in:
+        # construct_params(), replacement_expressions
+
+        numeric_positiontup: Optional[List[str]] = None
+
+        if self.positional and pre_expanded_positiontup is not None:
+            names: Iterable[str] = pre_expanded_positiontup
+            if self._numeric_binds:
+                numeric_positiontup = []
+        else:
+            names = self.bind_names.values()
+
+        ebn = self.escaped_bind_names
+        for name in names:
+            escaped_name = ebn.get(name, name) if ebn else name
+            parameter = self.binds[name]
+
+            if parameter in self.literal_execute_params:
+                if escaped_name not in replacement_expressions:
+                    replacement_expressions[escaped_name] = (
+                        self.render_literal_bindparam(
+                            parameter,
+                            render_literal_value=parameters.pop(escaped_name),
+                        )
+                    )
+                continue
+
+            if parameter in self.post_compile_params:
+                if escaped_name in replacement_expressions:
+                    to_update = to_update_sets[escaped_name]
+                    values = None
+                else:
+                    # we are removing the parameter from parameters
+                    # because it is a list value, which is not expected by
+                    # TypeEngine objects that would otherwise be asked to
+                    # process it. the single name is being replaced with
+                    # individual numbered parameters for each value in the
+                    # param.
+                    #
+                    # note we are also inserting *escaped* parameter names
+                    # into the given dictionary.   default dialect will
+                    # use these param names directly as they will not be
+                    # in the escaped_bind_names dictionary.
+                    values = parameters.pop(name)
+
+                    leep_res = self._literal_execute_expanding_parameter(
+                        escaped_name, parameter, values
+                    )
+                    (to_update, replacement_expr) = leep_res
+
+                    to_update_sets[escaped_name] = to_update
+                    replacement_expressions[escaped_name] = replacement_expr
+
+                if not parameter.literal_execute:
+                    parameters.update(to_update)
+                    if parameter.type._is_tuple_type:
+                        assert values is not None
+                        new_processors.update(
+                            (
+                                "%s_%s_%s" % (name, i, j),
+                                tuple_processors[name][j - 1],
+                            )
+                            for i, tuple_element in enumerate(values, 1)
+                            for j, _ in enumerate(tuple_element, 1)
+                            if name in tuple_processors
+                            and tuple_processors[name][j - 1] is not None
+                        )
+                    else:
+                        new_processors.update(
+                            (key, single_processors[name])
+                            for key, _ in to_update
+                            if name in single_processors
+                        )
+                    if numeric_positiontup is not None:
+                        numeric_positiontup.extend(
+                            name for name, _ in to_update
+                        )
+                    elif new_positiontup is not None:
+                        # to_update has escaped names, but that's ok since
+                        # these are new names, that aren't in the
+                        # escaped_bind_names dict.
+                        new_positiontup.extend(name for name, _ in to_update)
+                    expanded_parameters[name] = [
+                        expand_key for expand_key, _ in to_update
+                    ]
+            elif new_positiontup is not None:
+                new_positiontup.append(name)
+
+        def process_expanding(m):
+            key = m.group(1)
+            expr = replacement_expressions[key]
+
+            # if POSTCOMPILE included a bind_expression, render that
+            # around each element
+            if m.group(2):
+                tok = m.group(2).split("~~")
+                be_left, be_right = tok[1], tok[3]
+                expr = ", ".join(
+                    "%s%s%s" % (be_left, exp, be_right)
+                    for exp in expr.split(", ")
+                )
+            return expr
+
+        statement = re.sub(
+            self._post_compile_pattern, process_expanding, pre_expanded_string
+        )
+
+        if numeric_positiontup is not None:
+            assert new_positiontup is not None
+            param_pos = {
+                key: f"{self._numeric_binds_identifier_char}{num}"
+                for num, key in enumerate(
+                    numeric_positiontup, self.next_numeric_pos
+                )
+            }
+            # Can't use format here since % chars are not escaped.
+            statement = self._pyformat_pattern.sub(
+                lambda m: param_pos[m.group(1)], statement
+            )
+            new_positiontup.extend(numeric_positiontup)
+
+        expanded_state = ExpandedState(
+            statement,
+            parameters,
+            new_processors,
+            new_positiontup,
+            expanded_parameters,
+        )
+
+        if _populate_self:
+            # this is for the "render_postcompile" flag, which is not
+            # otherwise used internally and is for end-user debugging and
+            # special use cases.
+            self._pre_expanded_string = pre_expanded_string
+            self._pre_expanded_positiontup = pre_expanded_positiontup
+            self.string = expanded_state.statement
+            self.positiontup = (
+                list(expanded_state.positiontup or ())
+                if self.positional
+                else None
+            )
+            self._post_compile_expanded_state = expanded_state
+
+        return expanded_state
+
+    @util.preload_module("sqlalchemy.engine.cursor")
+    def _create_result_map(self):
+        """utility method used for unit tests only."""
+        cursor = util.preloaded.engine_cursor
+        return cursor.CursorResultMetaData._create_description_match_map(
+            self._result_columns
+        )
+
+    # assigned by crud.py for insert/update statements
+    _get_bind_name_for_col: _BindNameForColProtocol
+
+    @util.memoized_property
+    def _within_exec_param_key_getter(self) -> Callable[[Any], str]:
+        getter = self._get_bind_name_for_col
+        return getter
+
+    @util.memoized_property
+    @util.preload_module("sqlalchemy.engine.result")
+    def _inserted_primary_key_from_lastrowid_getter(self):
+        result = util.preloaded.engine_result
+
+        param_key_getter = self._within_exec_param_key_getter
+
+        assert self.compile_state is not None
+        statement = self.compile_state.statement
+
+        if TYPE_CHECKING:
+            assert isinstance(statement, Insert)
+
+        table = statement.table
+
+        getters = [
+            (operator.methodcaller("get", param_key_getter(col), None), col)
+            for col in table.primary_key
+        ]
+
+        autoinc_getter = None
+        autoinc_col = table._autoincrement_column
+        if autoinc_col is not None:
+            # apply type post processors to the lastrowid
+            lastrowid_processor = autoinc_col.type._cached_result_processor(
+                self.dialect, None
+            )
+            autoinc_key = param_key_getter(autoinc_col)
+
+            # if a bind value is present for the autoincrement column
+            # in the parameters, we need to do the logic dictated by
+            # #7998; honor a non-None user-passed parameter over lastrowid.
+            # previously in the 1.4 series we weren't fetching lastrowid
+            # at all if the key were present in the parameters
+            if autoinc_key in self.binds:
+
+                def _autoinc_getter(lastrowid, parameters):
+                    param_value = parameters.get(autoinc_key, lastrowid)
+                    if param_value is not None:
+                        # they supplied non-None parameter, use that.
+                        # SQLite at least is observed to return the wrong
+                        # cursor.lastrowid for INSERT..ON CONFLICT so it
+                        # can't be used in all cases
+                        return param_value
+                    else:
+                        # use lastrowid
+                        return lastrowid
+
+                # work around mypy https://github.com/python/mypy/issues/14027
+                autoinc_getter = _autoinc_getter
+
+        else:
+            lastrowid_processor = None
+
+        row_fn = result.result_tuple([col.key for col in table.primary_key])
+
+        def get(lastrowid, parameters):
+            """given cursor.lastrowid value and the parameters used for INSERT,
+            return a "row" that represents the primary key, either by
+            using the "lastrowid" or by extracting values from the parameters
+            that were sent along with the INSERT.
+
+            """
+            if lastrowid_processor is not None:
+                lastrowid = lastrowid_processor(lastrowid)
+
+            if lastrowid is None:
+                return row_fn(getter(parameters) for getter, col in getters)
+            else:
+                return row_fn(
+                    (
+                        (
+                            autoinc_getter(lastrowid, parameters)
+                            if autoinc_getter is not None
+                            else lastrowid
+                        )
+                        if col is autoinc_col
+                        else getter(parameters)
+                    )
+                    for getter, col in getters
+                )
+
+        return get
+
+    @util.memoized_property
+    @util.preload_module("sqlalchemy.engine.result")
+    def _inserted_primary_key_from_returning_getter(self):
+        if typing.TYPE_CHECKING:
+            from ..engine import result
+        else:
+            result = util.preloaded.engine_result
+
+        assert self.compile_state is not None
+        statement = self.compile_state.statement
+
+        if TYPE_CHECKING:
+            assert isinstance(statement, Insert)
+
+        param_key_getter = self._within_exec_param_key_getter
+        table = statement.table
+
+        returning = self.implicit_returning
+        assert returning is not None
+        ret = {col: idx for idx, col in enumerate(returning)}
+
+        getters = cast(
+            "List[Tuple[Callable[[Any], Any], bool]]",
+            [
+                (
+                    (operator.itemgetter(ret[col]), True)
+                    if col in ret
+                    else (
+                        operator.methodcaller(
+                            "get", param_key_getter(col), None
+                        ),
+                        False,
+                    )
+                )
+                for col in table.primary_key
+            ],
+        )
+
+        row_fn = result.result_tuple([col.key for col in table.primary_key])
+
+        def get(row, parameters):
+            return row_fn(
+                getter(row) if use_row else getter(parameters)
+                for getter, use_row in getters
+            )
+
+        return get
+
+    def default_from(self):
+        """Called when a SELECT statement has no froms, and no FROM clause is
+        to be appended.
+
+        Gives Oracle Database a chance to tack on a ``FROM DUAL`` to the string
+        output.
+
+        """
+        return ""
+
+    def visit_override_binds(self, override_binds, **kw):
+        """SQL compile the nested element of an _OverrideBinds with
+        bindparams swapped out.
+
+        The _OverrideBinds is not normally expected to be compiled; it
+        is meant to be used when an already cached statement is to be used,
+        the compilation was already performed, and only the bound params should
+        be swapped in at execution time.
+
+        However, there are test cases that exericise this object, and
+        additionally the ORM subquery loader is known to feed in expressions
+        which include this construct into new queries (discovered in #11173),
+        so it has to do the right thing at compile time as well.
+
+        """
+
+        # get SQL text first
+        sqltext = override_binds.element._compiler_dispatch(self, **kw)
+
+        # for a test compile that is not for caching, change binds after the
+        # fact.  note that we don't try to
+        # swap the bindparam as we compile, because our element may be
+        # elsewhere in the statement already (e.g. a subquery or perhaps a
+        # CTE) and was already visited / compiled. See
+        # test_relationship_criteria.py ->
+        #    test_selectinload_local_criteria_subquery
+        for k in override_binds.translate:
+            if k not in self.binds:
+                continue
+            bp = self.binds[k]
+
+            # so this would work, just change the value of bp in place.
+            # but we dont want to mutate things outside.
+            # bp.value = override_binds.translate[bp.key]
+            # continue
+
+            # instead, need to replace bp with new_bp or otherwise accommodate
+            # in all internal collections
+            new_bp = bp._with_value(
+                override_binds.translate[bp.key],
+                maintain_key=True,
+                required=False,
+            )
+
+            name = self.bind_names[bp]
+            self.binds[k] = self.binds[name] = new_bp
+            self.bind_names[new_bp] = name
+            self.bind_names.pop(bp, None)
+
+            if bp in self.post_compile_params:
+                self.post_compile_params |= {new_bp}
+            if bp in self.literal_execute_params:
+                self.literal_execute_params |= {new_bp}
+
+            ckbm_tuple = self._cache_key_bind_match
+            if ckbm_tuple:
+                ckbm, cksm = ckbm_tuple
+                for bp in bp._cloned_set:
+                    if bp.key in cksm:
+                        cb = cksm[bp.key]
+                        ckbm[cb].append(new_bp)
+
+        return sqltext
+
+    def visit_grouping(self, grouping, asfrom=False, **kwargs):
+        return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
+
+    def visit_select_statement_grouping(self, grouping, **kwargs):
+        return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
+
+    def visit_label_reference(
+        self, element, within_columns_clause=False, **kwargs
+    ):
+        if self.stack and self.dialect.supports_simple_order_by_label:
+            try:
+                compile_state = cast(
+                    "Union[SelectState, CompoundSelectState]",
+                    self.stack[-1]["compile_state"],
+                )
+            except KeyError as ke:
+                raise exc.CompileError(
+                    "Can't resolve label reference for ORDER BY / "
+                    "GROUP BY / DISTINCT etc."
+                ) from ke
+
+            (
+                with_cols,
+                only_froms,
+                only_cols,
+            ) = compile_state._label_resolve_dict
+            if within_columns_clause:
+                resolve_dict = only_froms
+            else:
+                resolve_dict = only_cols
+
+            # this can be None in the case that a _label_reference()
+            # were subject to a replacement operation, in which case
+            # the replacement of the Label element may have changed
+            # to something else like a ColumnClause expression.
+            order_by_elem = element.element._order_by_label_element
+
+            if (
+                order_by_elem is not None
+                and order_by_elem.name in resolve_dict
+                and order_by_elem.shares_lineage(
+                    resolve_dict[order_by_elem.name]
+                )
+            ):
+                kwargs["render_label_as_label"] = (
+                    element.element._order_by_label_element
+                )
+        return self.process(
+            element.element,
+            within_columns_clause=within_columns_clause,
+            **kwargs,
+        )
+
+    def visit_textual_label_reference(
+        self, element, within_columns_clause=False, **kwargs
+    ):
+        if not self.stack:
+            # compiling the element outside of the context of a SELECT
+            return self.process(element._text_clause)
+
+        try:
+            compile_state = cast(
+                "Union[SelectState, CompoundSelectState]",
+                self.stack[-1]["compile_state"],
+            )
+        except KeyError as ke:
+            coercions._no_text_coercion(
+                element.element,
+                extra=(
+                    "Can't resolve label reference for ORDER BY / "
+                    "GROUP BY / DISTINCT etc."
+                ),
+                exc_cls=exc.CompileError,
+                err=ke,
+            )
+
+        with_cols, only_froms, only_cols = compile_state._label_resolve_dict
+        try:
+            if within_columns_clause:
+                col = only_froms[element.element]
+            else:
+                col = with_cols[element.element]
+        except KeyError as err:
+            coercions._no_text_coercion(
+                element.element,
+                extra=(
+                    "Can't resolve label reference for ORDER BY / "
+                    "GROUP BY / DISTINCT etc."
+                ),
+                exc_cls=exc.CompileError,
+                err=err,
+            )
+        else:
+            kwargs["render_label_as_label"] = col
+            return self.process(
+                col, within_columns_clause=within_columns_clause, **kwargs
+            )
+
+    def visit_label(
+        self,
+        label,
+        add_to_result_map=None,
+        within_label_clause=False,
+        within_columns_clause=False,
+        render_label_as_label=None,
+        result_map_targets=(),
+        **kw,
+    ):
+        # only render labels within the columns clause
+        # or ORDER BY clause of a select.  dialect-specific compilers
+        # can modify this behavior.
+        render_label_with_as = (
+            within_columns_clause and not within_label_clause
+        )
+        render_label_only = render_label_as_label is label
+
+        if render_label_only or render_label_with_as:
+            if isinstance(label.name, elements._truncated_label):
+                labelname = self._truncated_identifier("colident", label.name)
+            else:
+                labelname = label.name
+
+        if render_label_with_as:
+            if add_to_result_map is not None:
+                add_to_result_map(
+                    labelname,
+                    label.name,
+                    (label, labelname) + label._alt_names + result_map_targets,
+                    label.type,
+                )
+            return (
+                label.element._compiler_dispatch(
+                    self,
+                    within_columns_clause=True,
+                    within_label_clause=True,
+                    **kw,
+                )
+                + OPERATORS[operators.as_]
+                + self.preparer.format_label(label, labelname)
+            )
+        elif render_label_only:
+            return self.preparer.format_label(label, labelname)
+        else:
+            return label.element._compiler_dispatch(
+                self, within_columns_clause=False, **kw
+            )
+
+    def _fallback_column_name(self, column):
+        raise exc.CompileError(
+            "Cannot compile Column object until its 'name' is assigned."
+        )
+
+    def visit_lambda_element(self, element, **kw):
+        sql_element = element._resolved
+        return self.process(sql_element, **kw)
+
+    def visit_column(
+        self,
+        column: ColumnClause[Any],
+        add_to_result_map: Optional[_ResultMapAppender] = None,
+        include_table: bool = True,
+        result_map_targets: Tuple[Any, ...] = (),
+        ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] = None,
+        **kwargs: Any,
+    ) -> str:
+        name = orig_name = column.name
+        if name is None:
+            name = self._fallback_column_name(column)
+
+        is_literal = column.is_literal
+        if not is_literal and isinstance(name, elements._truncated_label):
+            name = self._truncated_identifier("colident", name)
+
+        if add_to_result_map is not None:
+            targets = (column, name, column.key) + result_map_targets
+            if column._tq_label:
+                targets += (column._tq_label,)
+
+            add_to_result_map(name, orig_name, targets, column.type)
+
+        if is_literal:
+            # note we are not currently accommodating for
+            # literal_column(quoted_name('ident', True)) here
+            name = self.escape_literal_column(name)
+        else:
+            name = self.preparer.quote(name)
+        table = column.table
+        if table is None or not include_table or not table.named_with_column:
+            return name
+        else:
+            effective_schema = self.preparer.schema_for_object(table)
+
+            if effective_schema:
+                schema_prefix = (
+                    self.preparer.quote_schema(effective_schema) + "."
+                )
+            else:
+                schema_prefix = ""
+
+            if TYPE_CHECKING:
+                assert isinstance(table, NamedFromClause)
+            tablename = table.name
+
+            if (
+                not effective_schema
+                and ambiguous_table_name_map
+                and tablename in ambiguous_table_name_map
+            ):
+                tablename = ambiguous_table_name_map[tablename]
+
+            if isinstance(tablename, elements._truncated_label):
+                tablename = self._truncated_identifier("alias", tablename)
+
+            return schema_prefix + self.preparer.quote(tablename) + "." + name
+
+    def visit_collation(self, element, **kw):
+        return self.preparer.format_collation(element.collation)
+
+    def visit_fromclause(self, fromclause, **kwargs):
+        return fromclause.name
+
+    def visit_index(self, index, **kwargs):
+        return index.name
+
+    def visit_typeclause(self, typeclause, **kw):
+        kw["type_expression"] = typeclause
+        kw["identifier_preparer"] = self.preparer
+        return self.dialect.type_compiler_instance.process(
+            typeclause.type, **kw
+        )
+
+    def post_process_text(self, text):
+        if self.preparer._double_percents:
+            text = text.replace("%", "%%")
+        return text
+
+    def escape_literal_column(self, text):
+        if self.preparer._double_percents:
+            text = text.replace("%", "%%")
+        return text
+
+    def visit_textclause(self, textclause, add_to_result_map=None, **kw):
+        def do_bindparam(m):
+            name = m.group(1)
+            if name in textclause._bindparams:
+                return self.process(textclause._bindparams[name], **kw)
+            else:
+                return self.bindparam_string(name, **kw)
+
+        if not self.stack:
+            self.isplaintext = True
+
+        if add_to_result_map:
+            # text() object is present in the columns clause of a
+            # select().   Add a no-name entry to the result map so that
+            # row[text()] produces a result
+            add_to_result_map(None, None, (textclause,), sqltypes.NULLTYPE)
+
+        # un-escape any \:params
+        return BIND_PARAMS_ESC.sub(
+            lambda m: m.group(1),
+            BIND_PARAMS.sub(
+                do_bindparam, self.post_process_text(textclause.text)
+            ),
+        )
+
+    def visit_textual_select(
+        self, taf, compound_index=None, asfrom=False, **kw
+    ):
+        toplevel = not self.stack
+        entry = self._default_stack_entry if toplevel else self.stack[-1]
+
+        new_entry: _CompilerStackEntry = {
+            "correlate_froms": set(),
+            "asfrom_froms": set(),
+            "selectable": taf,
+        }
+        self.stack.append(new_entry)
+
+        if taf._independent_ctes:
+            self._dispatch_independent_ctes(taf, kw)
+
+        populate_result_map = (
+            toplevel
+            or (
+                compound_index == 0
+                and entry.get("need_result_map_for_compound", False)
+            )
+            or entry.get("need_result_map_for_nested", False)
+        )
+
+        if populate_result_map:
+            self._ordered_columns = self._textual_ordered_columns = (
+                taf.positional
+            )
+
+            # enable looser result column matching when the SQL text links to
+            # Column objects by name only
+            self._loose_column_name_matching = not taf.positional and bool(
+                taf.column_args
+            )
+
+            for c in taf.column_args:
+                self.process(
+                    c,
+                    within_columns_clause=True,
+                    add_to_result_map=self._add_to_result_map,
+                )
+
+        text = self.process(taf.element, **kw)
+        if self.ctes:
+            nesting_level = len(self.stack) if not toplevel else None
+            text = self._render_cte_clause(nesting_level=nesting_level) + text
+
+        self.stack.pop(-1)
+
+        return text
+
+    def visit_null(self, expr, **kw):
+        return "NULL"
+
+    def visit_true(self, expr, **kw):
+        if self.dialect.supports_native_boolean:
+            return "true"
+        else:
+            return "1"
+
+    def visit_false(self, expr, **kw):
+        if self.dialect.supports_native_boolean:
+            return "false"
+        else:
+            return "0"
+
+    def _generate_delimited_list(self, elements, separator, **kw):
+        return separator.join(
+            s
+            for s in (c._compiler_dispatch(self, **kw) for c in elements)
+            if s
+        )
+
+    def _generate_delimited_and_list(self, clauses, **kw):
+        lcc, clauses = elements.BooleanClauseList._process_clauses_for_boolean(
+            operators.and_,
+            elements.True_._singleton,
+            elements.False_._singleton,
+            clauses,
+        )
+        if lcc == 1:
+            return clauses[0]._compiler_dispatch(self, **kw)
+        else:
+            separator = OPERATORS[operators.and_]
+            return separator.join(
+                s
+                for s in (c._compiler_dispatch(self, **kw) for c in clauses)
+                if s
+            )
+
+    def visit_tuple(self, clauselist, **kw):
+        return "(%s)" % self.visit_clauselist(clauselist, **kw)
+
+    def visit_clauselist(self, clauselist, **kw):
+        sep = clauselist.operator
+        if sep is None:
+            sep = " "
+        else:
+            sep = OPERATORS[clauselist.operator]
+
+        return self._generate_delimited_list(clauselist.clauses, sep, **kw)
+
+    def visit_expression_clauselist(self, clauselist, **kw):
+        operator_ = clauselist.operator
+
+        disp = self._get_operator_dispatch(
+            operator_, "expression_clauselist", None
+        )
+        if disp:
+            return disp(clauselist, operator_, **kw)
+
+        try:
+            opstring = OPERATORS[operator_]
+        except KeyError as err:
+            raise exc.UnsupportedCompilationError(self, operator_) from err
+        else:
+            kw["_in_operator_expression"] = True
+            return self._generate_delimited_list(
+                clauselist.clauses, opstring, **kw
+            )
+
+    def visit_case(self, clause, **kwargs):
+        x = "CASE "
+        if clause.value is not None:
+            x += clause.value._compiler_dispatch(self, **kwargs) + " "
+        for cond, result in clause.whens:
+            x += (
+                "WHEN "
+                + cond._compiler_dispatch(self, **kwargs)
+                + " THEN "
+                + result._compiler_dispatch(self, **kwargs)
+                + " "
+            )
+        if clause.else_ is not None:
+            x += (
+                "ELSE " + clause.else_._compiler_dispatch(self, **kwargs) + " "
+            )
+        x += "END"
+        return x
+
+    def visit_type_coerce(self, type_coerce, **kw):
+        return type_coerce.typed_expression._compiler_dispatch(self, **kw)
+
+    def visit_cast(self, cast, **kwargs):
+        type_clause = cast.typeclause._compiler_dispatch(self, **kwargs)
+        match = re.match("(.*)( COLLATE .*)", type_clause)
+        return "CAST(%s AS %s)%s" % (
+            cast.clause._compiler_dispatch(self, **kwargs),
+            match.group(1) if match else type_clause,
+            match.group(2) if match else "",
+        )
+
+    def _format_frame_clause(self, range_, **kw):
+        return "%s AND %s" % (
+            (
+                "UNBOUNDED PRECEDING"
+                if range_[0] is elements.RANGE_UNBOUNDED
+                else (
+                    "CURRENT ROW"
+                    if range_[0] is elements.RANGE_CURRENT
+                    else (
+                        "%s PRECEDING"
+                        % (
+                            self.process(
+                                elements.literal(abs(range_[0])), **kw
+                            ),
+                        )
+                        if range_[0] < 0
+                        else "%s FOLLOWING"
+                        % (self.process(elements.literal(range_[0]), **kw),)
+                    )
+                )
+            ),
+            (
+                "UNBOUNDED FOLLOWING"
+                if range_[1] is elements.RANGE_UNBOUNDED
+                else (
+                    "CURRENT ROW"
+                    if range_[1] is elements.RANGE_CURRENT
+                    else (
+                        "%s PRECEDING"
+                        % (
+                            self.process(
+                                elements.literal(abs(range_[1])), **kw
+                            ),
+                        )
+                        if range_[1] < 0
+                        else "%s FOLLOWING"
+                        % (self.process(elements.literal(range_[1]), **kw),)
+                    )
+                )
+            ),
+        )
+
+    def visit_over(self, over, **kwargs):
+        text = over.element._compiler_dispatch(self, **kwargs)
+        if over.range_:
+            range_ = "RANGE BETWEEN %s" % self._format_frame_clause(
+                over.range_, **kwargs
+            )
+        elif over.rows:
+            range_ = "ROWS BETWEEN %s" % self._format_frame_clause(
+                over.rows, **kwargs
+            )
+        else:
+            range_ = None
+
+        return "%s OVER (%s)" % (
+            text,
+            " ".join(
+                [
+                    "%s BY %s"
+                    % (word, clause._compiler_dispatch(self, **kwargs))
+                    for word, clause in (
+                        ("PARTITION", over.partition_by),
+                        ("ORDER", over.order_by),
+                    )
+                    if clause is not None and len(clause)
+                ]
+                + ([range_] if range_ else [])
+            ),
+        )
+
+    def visit_withingroup(self, withingroup, **kwargs):
+        return "%s WITHIN GROUP (ORDER BY %s)" % (
+            withingroup.element._compiler_dispatch(self, **kwargs),
+            withingroup.order_by._compiler_dispatch(self, **kwargs),
+        )
+
+    def visit_funcfilter(self, funcfilter, **kwargs):
+        return "%s FILTER (WHERE %s)" % (
+            funcfilter.func._compiler_dispatch(self, **kwargs),
+            funcfilter.criterion._compiler_dispatch(self, **kwargs),
+        )
+
+    def visit_extract(self, extract, **kwargs):
+        field = self.extract_map.get(extract.field, extract.field)
+        return "EXTRACT(%s FROM %s)" % (
+            field,
+            extract.expr._compiler_dispatch(self, **kwargs),
+        )
+
+    def visit_scalar_function_column(self, element, **kw):
+        compiled_fn = self.visit_function(element.fn, **kw)
+        compiled_col = self.visit_column(element, **kw)
+        return "(%s).%s" % (compiled_fn, compiled_col)
+
+    def visit_function(
+        self,
+        func: Function[Any],
+        add_to_result_map: Optional[_ResultMapAppender] = None,
+        **kwargs: Any,
+    ) -> str:
+        if add_to_result_map is not None:
+            add_to_result_map(func.name, func.name, (func.name,), func.type)
+
+        disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
+
+        text: str
+
+        if disp:
+            text = disp(func, **kwargs)
+        else:
+            name = FUNCTIONS.get(func._deannotate().__class__, None)
+            if name:
+                if func._has_args:
+                    name += "%(expr)s"
+            else:
+                name = func.name
+                name = (
+                    self.preparer.quote(name)
+                    if self.preparer._requires_quotes_illegal_chars(name)
+                    or isinstance(name, elements.quoted_name)
+                    else name
+                )
+                name = name + "%(expr)s"
+            text = ".".join(
+                [
+                    (
+                        self.preparer.quote(tok)
+                        if self.preparer._requires_quotes_illegal_chars(tok)
+                        or isinstance(name, elements.quoted_name)
+                        else tok
+                    )
+                    for tok in func.packagenames
+                ]
+                + [name]
+            ) % {"expr": self.function_argspec(func, **kwargs)}
+
+        if func._with_ordinality:
+            text += " WITH ORDINALITY"
+        return text
+
+    def visit_next_value_func(self, next_value, **kw):
+        return self.visit_sequence(next_value.sequence)
+
+    def visit_sequence(self, sequence, **kw):
+        raise NotImplementedError(
+            "Dialect '%s' does not support sequence increments."
+            % self.dialect.name
+        )
+
+    def function_argspec(self, func, **kwargs):
+        return func.clause_expr._compiler_dispatch(self, **kwargs)
+
+    def visit_compound_select(
+        self, cs, asfrom=False, compound_index=None, **kwargs
+    ):
+        toplevel = not self.stack
+
+        compile_state = cs._compile_state_factory(cs, self, **kwargs)
+
+        if toplevel and not self.compile_state:
+            self.compile_state = compile_state
+
+        compound_stmt = compile_state.statement
+
+        entry = self._default_stack_entry if toplevel else self.stack[-1]
+        need_result_map = toplevel or (
+            not compound_index
+            and entry.get("need_result_map_for_compound", False)
+        )
+
+        # indicates there is already a CompoundSelect in play
+        if compound_index == 0:
+            entry["select_0"] = cs
+
+        self.stack.append(
+            {
+                "correlate_froms": entry["correlate_froms"],
+                "asfrom_froms": entry["asfrom_froms"],
+                "selectable": cs,
+                "compile_state": compile_state,
+                "need_result_map_for_compound": need_result_map,
+            }
+        )
+
+        if compound_stmt._independent_ctes:
+            self._dispatch_independent_ctes(compound_stmt, kwargs)
+
+        keyword = self.compound_keywords[cs.keyword]
+
+        text = (" " + keyword + " ").join(
+            (
+                c._compiler_dispatch(
+                    self, asfrom=asfrom, compound_index=i, **kwargs
+                )
+                for i, c in enumerate(cs.selects)
+            )
+        )
+
+        kwargs["include_table"] = False
+        text += self.group_by_clause(cs, **dict(asfrom=asfrom, **kwargs))
+        text += self.order_by_clause(cs, **kwargs)
+        if cs._has_row_limiting_clause:
+            text += self._row_limit_clause(cs, **kwargs)
+
+        if self.ctes:
+            nesting_level = len(self.stack) if not toplevel else None
+            text = (
+                self._render_cte_clause(
+                    nesting_level=nesting_level,
+                    include_following_stack=True,
+                )
+                + text
+            )
+
+        self.stack.pop(-1)
+        return text
+
+    def _row_limit_clause(self, cs, **kwargs):
+        if cs._fetch_clause is not None:
+            return self.fetch_clause(cs, **kwargs)
+        else:
+            return self.limit_clause(cs, **kwargs)
+
+    def _get_operator_dispatch(self, operator_, qualifier1, qualifier2):
+        attrname = "visit_%s_%s%s" % (
+            operator_.__name__,
+            qualifier1,
+            "_" + qualifier2 if qualifier2 else "",
+        )
+        return getattr(self, attrname, None)
+
+    def visit_unary(
+        self, unary, add_to_result_map=None, result_map_targets=(), **kw
+    ):
+        if add_to_result_map is not None:
+            result_map_targets += (unary,)
+            kw["add_to_result_map"] = add_to_result_map
+            kw["result_map_targets"] = result_map_targets
+
+        if unary.operator:
+            if unary.modifier:
+                raise exc.CompileError(
+                    "Unary expression does not support operator "
+                    "and modifier simultaneously"
+                )
+            disp = self._get_operator_dispatch(
+                unary.operator, "unary", "operator"
+            )
+            if disp:
+                return disp(unary, unary.operator, **kw)
+            else:
+                return self._generate_generic_unary_operator(
+                    unary, OPERATORS[unary.operator], **kw
+                )
+        elif unary.modifier:
+            disp = self._get_operator_dispatch(
+                unary.modifier, "unary", "modifier"
+            )
+            if disp:
+                return disp(unary, unary.modifier, **kw)
+            else:
+                return self._generate_generic_unary_modifier(
+                    unary, OPERATORS[unary.modifier], **kw
+                )
+        else:
+            raise exc.CompileError(
+                "Unary expression has no operator or modifier"
+            )
+
+    def visit_truediv_binary(self, binary, operator, **kw):
+        if self.dialect.div_is_floordiv:
+            return (
+                self.process(binary.left, **kw)
+                + " / "
+                # TODO: would need a fast cast again here,
+                # unless we want to use an implicit cast like "+ 0.0"
+                + self.process(
+                    elements.Cast(
+                        binary.right,
+                        (
+                            binary.right.type
+                            if binary.right.type._type_affinity
+                            is sqltypes.Numeric
+                            else sqltypes.Numeric()
+                        ),
+                    ),
+                    **kw,
+                )
+            )
+        else:
+            return (
+                self.process(binary.left, **kw)
+                + " / "
+                + self.process(binary.right, **kw)
+            )
+
+    def visit_floordiv_binary(self, binary, operator, **kw):
+        if (
+            self.dialect.div_is_floordiv
+            and binary.right.type._type_affinity is sqltypes.Integer
+        ):
+            return (
+                self.process(binary.left, **kw)
+                + " / "
+                + self.process(binary.right, **kw)
+            )
+        else:
+            return "FLOOR(%s)" % (
+                self.process(binary.left, **kw)
+                + " / "
+                + self.process(binary.right, **kw)
+            )
+
+    def visit_is_true_unary_operator(self, element, operator, **kw):
+        if (
+            element._is_implicitly_boolean
+            or self.dialect.supports_native_boolean
+        ):
+            return self.process(element.element, **kw)
+        else:
+            return "%s = 1" % self.process(element.element, **kw)
+
+    def visit_is_false_unary_operator(self, element, operator, **kw):
+        if (
+            element._is_implicitly_boolean
+            or self.dialect.supports_native_boolean
+        ):
+            return "NOT %s" % self.process(element.element, **kw)
+        else:
+            return "%s = 0" % self.process(element.element, **kw)
+
+    def visit_not_match_op_binary(self, binary, operator, **kw):
+        return "NOT %s" % self.visit_binary(
+            binary, override_operator=operators.match_op
+        )
+
+    def visit_not_in_op_binary(self, binary, operator, **kw):
+        # The brackets are required in the NOT IN operation because the empty
+        # case is handled using the form "(col NOT IN (null) OR 1 = 1)".
+        # The presence of the OR makes the brackets required.
+        return "(%s)" % self._generate_generic_binary(
+            binary, OPERATORS[operator], **kw
+        )
+
+    def visit_empty_set_op_expr(self, type_, expand_op, **kw):
+        if expand_op is operators.not_in_op:
+            if len(type_) > 1:
+                return "(%s)) OR (1 = 1" % (
+                    ", ".join("NULL" for element in type_)
+                )
+            else:
+                return "NULL) OR (1 = 1"
+        elif expand_op is operators.in_op:
+            if len(type_) > 1:
+                return "(%s)) AND (1 != 1" % (
+                    ", ".join("NULL" for element in type_)
+                )
+            else:
+                return "NULL) AND (1 != 1"
+        else:
+            return self.visit_empty_set_expr(type_)
+
+    def visit_empty_set_expr(self, element_types, **kw):
+        raise NotImplementedError(
+            "Dialect '%s' does not support empty set expression."
+            % self.dialect.name
+        )
+
+    def _literal_execute_expanding_parameter_literal_binds(
+        self, parameter, values, bind_expression_template=None
+    ):
+        typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect)
+
+        if not values:
+            # empty IN expression.  note we don't need to use
+            # bind_expression_template here because there are no
+            # expressions to render.
+
+            if typ_dialect_impl._is_tuple_type:
+                replacement_expression = (
+                    "VALUES " if self.dialect.tuple_in_values else ""
+                ) + self.visit_empty_set_op_expr(
+                    parameter.type.types, parameter.expand_op
+                )
+
+            else:
+                replacement_expression = self.visit_empty_set_op_expr(
+                    [parameter.type], parameter.expand_op
+                )
+
+        elif typ_dialect_impl._is_tuple_type or (
+            typ_dialect_impl._isnull
+            and isinstance(values[0], collections_abc.Sequence)
+            and not isinstance(values[0], (str, bytes))
+        ):
+            if typ_dialect_impl._has_bind_expression:
+                raise NotImplementedError(
+                    "bind_expression() on TupleType not supported with "
+                    "literal_binds"
+                )
+
+            replacement_expression = (
+                "VALUES " if self.dialect.tuple_in_values else ""
+            ) + ", ".join(
+                "(%s)"
+                % (
+                    ", ".join(
+                        self.render_literal_value(value, param_type)
+                        for value, param_type in zip(
+                            tuple_element, parameter.type.types
+                        )
+                    )
+                )
+                for i, tuple_element in enumerate(values)
+            )
+        else:
+            if bind_expression_template:
+                post_compile_pattern = self._post_compile_pattern
+                m = post_compile_pattern.search(bind_expression_template)
+                assert m and m.group(
+                    2
+                ), "unexpected format for expanding parameter"
+
+                tok = m.group(2).split("~~")
+                be_left, be_right = tok[1], tok[3]
+                replacement_expression = ", ".join(
+                    "%s%s%s"
+                    % (
+                        be_left,
+                        self.render_literal_value(value, parameter.type),
+                        be_right,
+                    )
+                    for value in values
+                )
+            else:
+                replacement_expression = ", ".join(
+                    self.render_literal_value(value, parameter.type)
+                    for value in values
+                )
+
+        return (), replacement_expression
+
+    def _literal_execute_expanding_parameter(self, name, parameter, values):
+        if parameter.literal_execute:
+            return self._literal_execute_expanding_parameter_literal_binds(
+                parameter, values
+            )
+
+        dialect = self.dialect
+        typ_dialect_impl = parameter.type._unwrapped_dialect_impl(dialect)
+
+        if self._numeric_binds:
+            bind_template = self.compilation_bindtemplate
+        else:
+            bind_template = self.bindtemplate
+
+        if (
+            self.dialect._bind_typing_render_casts
+            and typ_dialect_impl.render_bind_cast
+        ):
+
+            def _render_bindtemplate(name):
+                return self.render_bind_cast(
+                    parameter.type,
+                    typ_dialect_impl,
+                    bind_template % {"name": name},
+                )
+
+        else:
+
+            def _render_bindtemplate(name):
+                return bind_template % {"name": name}
+
+        if not values:
+            to_update = []
+            if typ_dialect_impl._is_tuple_type:
+                replacement_expression = self.visit_empty_set_op_expr(
+                    parameter.type.types, parameter.expand_op
+                )
+            else:
+                replacement_expression = self.visit_empty_set_op_expr(
+                    [parameter.type], parameter.expand_op
+                )
+
+        elif typ_dialect_impl._is_tuple_type or (
+            typ_dialect_impl._isnull
+            and isinstance(values[0], collections_abc.Sequence)
+            and not isinstance(values[0], (str, bytes))
+        ):
+            assert not typ_dialect_impl._is_array
+            to_update = [
+                ("%s_%s_%s" % (name, i, j), value)
+                for i, tuple_element in enumerate(values, 1)
+                for j, value in enumerate(tuple_element, 1)
+            ]
+
+            replacement_expression = (
+                "VALUES " if dialect.tuple_in_values else ""
+            ) + ", ".join(
+                "(%s)"
+                % (
+                    ", ".join(
+                        _render_bindtemplate(
+                            to_update[i * len(tuple_element) + j][0]
+                        )
+                        for j, value in enumerate(tuple_element)
+                    )
+                )
+                for i, tuple_element in enumerate(values)
+            )
+        else:
+            to_update = [
+                ("%s_%s" % (name, i), value)
+                for i, value in enumerate(values, 1)
+            ]
+            replacement_expression = ", ".join(
+                _render_bindtemplate(key) for key, value in to_update
+            )
+
+        return to_update, replacement_expression
+
+    def visit_binary(
+        self,
+        binary,
+        override_operator=None,
+        eager_grouping=False,
+        from_linter=None,
+        lateral_from_linter=None,
+        **kw,
+    ):
+        if from_linter and operators.is_comparison(binary.operator):
+            if lateral_from_linter is not None:
+                enclosing_lateral = kw["enclosing_lateral"]
+                lateral_from_linter.edges.update(
+                    itertools.product(
+                        _de_clone(
+                            binary.left._from_objects + [enclosing_lateral]
+                        ),
+                        _de_clone(
+                            binary.right._from_objects + [enclosing_lateral]
+                        ),
+                    )
+                )
+            else:
+                from_linter.edges.update(
+                    itertools.product(
+                        _de_clone(binary.left._from_objects),
+                        _de_clone(binary.right._from_objects),
+                    )
+                )
+
+        # don't allow "? = ?" to render
+        if (
+            self.ansi_bind_rules
+            and isinstance(binary.left, elements.BindParameter)
+            and isinstance(binary.right, elements.BindParameter)
+        ):
+            kw["literal_execute"] = True
+
+        operator_ = override_operator or binary.operator
+        disp = self._get_operator_dispatch(operator_, "binary", None)
+        if disp:
+            return disp(binary, operator_, **kw)
+        else:
+            try:
+                opstring = OPERATORS[operator_]
+            except KeyError as err:
+                raise exc.UnsupportedCompilationError(self, operator_) from err
+            else:
+                return self._generate_generic_binary(
+                    binary,
+                    opstring,
+                    from_linter=from_linter,
+                    lateral_from_linter=lateral_from_linter,
+                    **kw,
+                )
+
+    def visit_function_as_comparison_op_binary(self, element, operator, **kw):
+        return self.process(element.sql_function, **kw)
+
+    def visit_mod_binary(self, binary, operator, **kw):
+        if self.preparer._double_percents:
+            return (
+                self.process(binary.left, **kw)
+                + " %% "
+                + self.process(binary.right, **kw)
+            )
+        else:
+            return (
+                self.process(binary.left, **kw)
+                + " % "
+                + self.process(binary.right, **kw)
+            )
+
+    def visit_custom_op_binary(self, element, operator, **kw):
+        kw["eager_grouping"] = operator.eager_grouping
+        return self._generate_generic_binary(
+            element,
+            " " + self.escape_literal_column(operator.opstring) + " ",
+            **kw,
+        )
+
+    def visit_custom_op_unary_operator(self, element, operator, **kw):
+        return self._generate_generic_unary_operator(
+            element, self.escape_literal_column(operator.opstring) + " ", **kw
+        )
+
+    def visit_custom_op_unary_modifier(self, element, operator, **kw):
+        return self._generate_generic_unary_modifier(
+            element, " " + self.escape_literal_column(operator.opstring), **kw
+        )
+
+    def _generate_generic_binary(
+        self, binary, opstring, eager_grouping=False, **kw
+    ):
+        _in_operator_expression = kw.get("_in_operator_expression", False)
+
+        kw["_in_operator_expression"] = True
+        kw["_binary_op"] = binary.operator
+        text = (
+            binary.left._compiler_dispatch(
+                self, eager_grouping=eager_grouping, **kw
+            )
+            + opstring
+            + binary.right._compiler_dispatch(
+                self, eager_grouping=eager_grouping, **kw
+            )
+        )
+
+        if _in_operator_expression and eager_grouping:
+            text = "(%s)" % text
+        return text
+
+    def _generate_generic_unary_operator(self, unary, opstring, **kw):
+        return opstring + unary.element._compiler_dispatch(self, **kw)
+
+    def _generate_generic_unary_modifier(self, unary, opstring, **kw):
+        return unary.element._compiler_dispatch(self, **kw) + opstring
+
+    @util.memoized_property
+    def _like_percent_literal(self):
+        return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE)
+
+    def visit_ilike_case_insensitive_operand(self, element, **kw):
+        return f"lower({element.element._compiler_dispatch(self, **kw)})"
+
+    def visit_contains_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.right = percent.concat(binary.right).concat(percent)
+        return self.visit_like_op_binary(binary, operator, **kw)
+
+    def visit_not_contains_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.right = percent.concat(binary.right).concat(percent)
+        return self.visit_not_like_op_binary(binary, operator, **kw)
+
+    def visit_icontains_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.left = ilike_case_insensitive(binary.left)
+        binary.right = percent.concat(
+            ilike_case_insensitive(binary.right)
+        ).concat(percent)
+        return self.visit_ilike_op_binary(binary, operator, **kw)
+
+    def visit_not_icontains_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.left = ilike_case_insensitive(binary.left)
+        binary.right = percent.concat(
+            ilike_case_insensitive(binary.right)
+        ).concat(percent)
+        return self.visit_not_ilike_op_binary(binary, operator, **kw)
+
+    def visit_startswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.right = percent._rconcat(binary.right)
+        return self.visit_like_op_binary(binary, operator, **kw)
+
+    def visit_not_startswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.right = percent._rconcat(binary.right)
+        return self.visit_not_like_op_binary(binary, operator, **kw)
+
+    def visit_istartswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.left = ilike_case_insensitive(binary.left)
+        binary.right = percent._rconcat(ilike_case_insensitive(binary.right))
+        return self.visit_ilike_op_binary(binary, operator, **kw)
+
+    def visit_not_istartswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.left = ilike_case_insensitive(binary.left)
+        binary.right = percent._rconcat(ilike_case_insensitive(binary.right))
+        return self.visit_not_ilike_op_binary(binary, operator, **kw)
+
+    def visit_endswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.right = percent.concat(binary.right)
+        return self.visit_like_op_binary(binary, operator, **kw)
+
+    def visit_not_endswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.right = percent.concat(binary.right)
+        return self.visit_not_like_op_binary(binary, operator, **kw)
+
+    def visit_iendswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.left = ilike_case_insensitive(binary.left)
+        binary.right = percent.concat(ilike_case_insensitive(binary.right))
+        return self.visit_ilike_op_binary(binary, operator, **kw)
+
+    def visit_not_iendswith_op_binary(self, binary, operator, **kw):
+        binary = binary._clone()
+        percent = self._like_percent_literal
+        binary.left = ilike_case_insensitive(binary.left)
+        binary.right = percent.concat(ilike_case_insensitive(binary.right))
+        return self.visit_not_ilike_op_binary(binary, operator, **kw)
+
+    def visit_like_op_binary(self, binary, operator, **kw):
+        escape = binary.modifiers.get("escape", None)
+
+        return "%s LIKE %s" % (
+            binary.left._compiler_dispatch(self, **kw),
+            binary.right._compiler_dispatch(self, **kw),
+        ) + (
+            " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
+            if escape is not None
+            else ""
+        )
+
+    def visit_not_like_op_binary(self, binary, operator, **kw):
+        escape = binary.modifiers.get("escape", None)
+        return "%s NOT LIKE %s" % (
+            binary.left._compiler_dispatch(self, **kw),
+            binary.right._compiler_dispatch(self, **kw),
+        ) + (
+            " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
+            if escape is not None
+            else ""
+        )
+
+    def visit_ilike_op_binary(self, binary, operator, **kw):
+        if operator is operators.ilike_op:
+            binary = binary._clone()
+            binary.left = ilike_case_insensitive(binary.left)
+            binary.right = ilike_case_insensitive(binary.right)
+        # else we assume ilower() has been applied
+
+        return self.visit_like_op_binary(binary, operator, **kw)
+
+    def visit_not_ilike_op_binary(self, binary, operator, **kw):
+        if operator is operators.not_ilike_op:
+            binary = binary._clone()
+            binary.left = ilike_case_insensitive(binary.left)
+            binary.right = ilike_case_insensitive(binary.right)
+        # else we assume ilower() has been applied
+
+        return self.visit_not_like_op_binary(binary, operator, **kw)
+
+    def visit_between_op_binary(self, binary, operator, **kw):
+        symmetric = binary.modifiers.get("symmetric", False)
+        return self._generate_generic_binary(
+            binary, " BETWEEN SYMMETRIC " if symmetric else " BETWEEN ", **kw
+        )
+
+    def visit_not_between_op_binary(self, binary, operator, **kw):
+        symmetric = binary.modifiers.get("symmetric", False)
+        return self._generate_generic_binary(
+            binary,
+            " NOT BETWEEN SYMMETRIC " if symmetric else " NOT BETWEEN ",
+            **kw,
+        )
+
+    def visit_regexp_match_op_binary(self, binary, operator, **kw):
+        raise exc.CompileError(
+            "%s dialect does not support regular expressions"
+            % self.dialect.name
+        )
+
+    def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
+        raise exc.CompileError(
+            "%s dialect does not support regular expressions"
+            % self.dialect.name
+        )
+
+    def visit_regexp_replace_op_binary(self, binary, operator, **kw):
+        raise exc.CompileError(
+            "%s dialect does not support regular expression replacements"
+            % self.dialect.name
+        )
+
+    def visit_bindparam(
+        self,
+        bindparam,
+        within_columns_clause=False,
+        literal_binds=False,
+        skip_bind_expression=False,
+        literal_execute=False,
+        render_postcompile=False,
+        **kwargs,
+    ):
+
+        if not skip_bind_expression:
+            impl = bindparam.type.dialect_impl(self.dialect)
+            if impl._has_bind_expression:
+                bind_expression = impl.bind_expression(bindparam)
+                wrapped = self.process(
+                    bind_expression,
+                    skip_bind_expression=True,
+                    within_columns_clause=within_columns_clause,
+                    literal_binds=literal_binds and not bindparam.expanding,
+                    literal_execute=literal_execute,
+                    render_postcompile=render_postcompile,
+                    **kwargs,
+                )
+                if bindparam.expanding:
+                    # for postcompile w/ expanding, move the "wrapped" part
+                    # of this into the inside
+
+                    m = re.match(
+                        r"^(.*)\(__\[POSTCOMPILE_(\S+?)\]\)(.*)$", wrapped
+                    )
+                    assert m, "unexpected format for expanding parameter"
+                    wrapped = "(__[POSTCOMPILE_%s~~%s~~REPL~~%s~~])" % (
+                        m.group(2),
+                        m.group(1),
+                        m.group(3),
+                    )
+
+                    if literal_binds:
+                        ret = self.render_literal_bindparam(
+                            bindparam,
+                            within_columns_clause=True,
+                            bind_expression_template=wrapped,
+                            **kwargs,
+                        )
+                        return "(%s)" % ret
+
+                return wrapped
+
+        if not literal_binds:
+            literal_execute = (
+                literal_execute
+                or bindparam.literal_execute
+                or (within_columns_clause and self.ansi_bind_rules)
+            )
+            post_compile = literal_execute or bindparam.expanding
+        else:
+            post_compile = False
+
+        if literal_binds:
+            ret = self.render_literal_bindparam(
+                bindparam, within_columns_clause=True, **kwargs
+            )
+            if bindparam.expanding:
+                ret = "(%s)" % ret
+            return ret
+
+        name = self._truncate_bindparam(bindparam)
+
+        if name in self.binds:
+            existing = self.binds[name]
+            if existing is not bindparam:
+                if (
+                    (existing.unique or bindparam.unique)
+                    and not existing.proxy_set.intersection(
+                        bindparam.proxy_set
+                    )
+                    and not existing._cloned_set.intersection(
+                        bindparam._cloned_set
+                    )
+                ):
+                    raise exc.CompileError(
+                        "Bind parameter '%s' conflicts with "
+                        "unique bind parameter of the same name" % name
+                    )
+                elif existing.expanding != bindparam.expanding:
+                    raise exc.CompileError(
+                        "Can't reuse bound parameter name '%s' in both "
+                        "'expanding' (e.g. within an IN expression) and "
+                        "non-expanding contexts.  If this parameter is to "
+                        "receive a list/array value, set 'expanding=True' on "
+                        "it for expressions that aren't IN, otherwise use "
+                        "a different parameter name." % (name,)
+                    )
+                elif existing._is_crud or bindparam._is_crud:
+                    if existing._is_crud and bindparam._is_crud:
+                        # TODO: this condition is not well understood.
+                        # see tests in test/sql/test_update.py
+                        raise exc.CompileError(
+                            "Encountered unsupported case when compiling an "
+                            "INSERT or UPDATE statement.  If this is a "
+                            "multi-table "
+                            "UPDATE statement, please provide string-named "
+                            "arguments to the "
+                            "values() method with distinct names; support for "
+                            "multi-table UPDATE statements that "
+                            "target multiple tables for UPDATE is very "
+                            "limited",
+                        )
+                    else:
+                        raise exc.CompileError(
+                            f"bindparam() name '{bindparam.key}' is reserved "
+                            "for automatic usage in the VALUES or SET "
+                            "clause of this "
+                            "insert/update statement.   Please use a "
+                            "name other than column name when using "
+                            "bindparam() "
+                            "with insert() or update() (for example, "
+                            f"'b_{bindparam.key}')."
+                        )
+
+        self.binds[bindparam.key] = self.binds[name] = bindparam
+
+        # if we are given a cache key that we're going to match against,
+        # relate the bindparam here to one that is most likely present
+        # in the "extracted params" portion of the cache key.  this is used
+        # to set up a positional mapping that is used to determine the
+        # correct parameters for a subsequent use of this compiled with
+        # a different set of parameter values.   here, we accommodate for
+        # parameters that may have been cloned both before and after the cache
+        # key was been generated.
+        ckbm_tuple = self._cache_key_bind_match
+
+        if ckbm_tuple:
+            ckbm, cksm = ckbm_tuple
+            for bp in bindparam._cloned_set:
+                if bp.key in cksm:
+                    cb = cksm[bp.key]
+                    ckbm[cb].append(bindparam)
+
+        if bindparam.isoutparam:
+            self.has_out_parameters = True
+
+        if post_compile:
+            if render_postcompile:
+                self._render_postcompile = True
+
+            if literal_execute:
+                self.literal_execute_params |= {bindparam}
+            else:
+                self.post_compile_params |= {bindparam}
+
+        ret = self.bindparam_string(
+            name,
+            post_compile=post_compile,
+            expanding=bindparam.expanding,
+            bindparam_type=bindparam.type,
+            **kwargs,
+        )
+
+        if bindparam.expanding:
+            ret = "(%s)" % ret
+
+        return ret
+
+    def render_bind_cast(self, type_, dbapi_type, sqltext):
+        raise NotImplementedError()
+
+    def render_literal_bindparam(
+        self,
+        bindparam,
+        render_literal_value=NO_ARG,
+        bind_expression_template=None,
+        **kw,
+    ):
+        if render_literal_value is not NO_ARG:
+            value = render_literal_value
+        else:
+            if bindparam.value is None and bindparam.callable is None:
+                op = kw.get("_binary_op", None)
+                if op and op not in (operators.is_, operators.is_not):
+                    util.warn_limited(
+                        "Bound parameter '%s' rendering literal NULL in a SQL "
+                        "expression; comparisons to NULL should not use "
+                        "operators outside of 'is' or 'is not'",
+                        (bindparam.key,),
+                    )
+                return self.process(sqltypes.NULLTYPE, **kw)
+            value = bindparam.effective_value
+
+        if bindparam.expanding:
+            leep = self._literal_execute_expanding_parameter_literal_binds
+            to_update, replacement_expr = leep(
+                bindparam,
+                value,
+                bind_expression_template=bind_expression_template,
+            )
+            return replacement_expr
+        else:
+            return self.render_literal_value(value, bindparam.type)
+
+    def render_literal_value(self, value, type_):
+        """Render the value of a bind parameter as a quoted literal.
+
+        This is used for statement sections that do not accept bind parameters
+        on the target driver/database.
+
+        This should be implemented by subclasses using the quoting services
+        of the DBAPI.
+
+        """
+
+        if value is None and not type_.should_evaluate_none:
+            # issue #10535 - handle NULL in the compiler without placing
+            # this onto each type, except for "evaluate None" types
+            # (e.g. JSON)
+            return self.process(elements.Null._instance())
+
+        processor = type_._cached_literal_processor(self.dialect)
+        if processor:
+            try:
+                return processor(value)
+            except Exception as e:
+                raise exc.CompileError(
+                    f"Could not render literal value "
+                    f'"{sql_util._repr_single_value(value)}" '
+                    f"with datatype "
+                    f"{type_}; see parent stack trace for "
+                    "more detail."
+                ) from e
+
+        else:
+            raise exc.CompileError(
+                f"No literal value renderer is available for literal value "
+                f'"{sql_util._repr_single_value(value)}" '
+                f"with datatype {type_}"
+            )
+
+    def _truncate_bindparam(self, bindparam):
+        if bindparam in self.bind_names:
+            return self.bind_names[bindparam]
+
+        bind_name = bindparam.key
+        if isinstance(bind_name, elements._truncated_label):
+            bind_name = self._truncated_identifier("bindparam", bind_name)
+
+        # add to bind_names for translation
+        self.bind_names[bindparam] = bind_name
+
+        return bind_name
+
+    def _truncated_identifier(
+        self, ident_class: str, name: _truncated_label
+    ) -> str:
+        if (ident_class, name) in self.truncated_names:
+            return self.truncated_names[(ident_class, name)]
+
+        anonname = name.apply_map(self.anon_map)
+
+        if len(anonname) > self.label_length - 6:
+            counter = self._truncated_counters.get(ident_class, 1)
+            truncname = (
+                anonname[0 : max(self.label_length - 6, 0)]
+                + "_"
+                + hex(counter)[2:]
+            )
+            self._truncated_counters[ident_class] = counter + 1
+        else:
+            truncname = anonname
+        self.truncated_names[(ident_class, name)] = truncname
+        return truncname
+
+    def _anonymize(self, name: str) -> str:
+        return name % self.anon_map
+
+    def bindparam_string(
+        self,
+        name: str,
+        post_compile: bool = False,
+        expanding: bool = False,
+        escaped_from: Optional[str] = None,
+        bindparam_type: Optional[TypeEngine[Any]] = None,
+        accumulate_bind_names: Optional[Set[str]] = None,
+        visited_bindparam: Optional[List[str]] = None,
+        **kw: Any,
+    ) -> str:
+        # TODO: accumulate_bind_names is passed by crud.py to gather
+        # names on a per-value basis, visited_bindparam is passed by
+        # visit_insert() to collect all parameters in the statement.
+        # see if this gathering can be simplified somehow
+        if accumulate_bind_names is not None:
+            accumulate_bind_names.add(name)
+        if visited_bindparam is not None:
+            visited_bindparam.append(name)
+
+        if not escaped_from:
+            if self._bind_translate_re.search(name):
+                # not quite the translate use case as we want to
+                # also get a quick boolean if we even found
+                # unusual characters in the name
+                new_name = self._bind_translate_re.sub(
+                    lambda m: self._bind_translate_chars[m.group(0)],
+                    name,
+                )
+                escaped_from = name
+                name = new_name
+
+        if escaped_from:
+            self.escaped_bind_names = self.escaped_bind_names.union(
+                {escaped_from: name}
+            )
+        if post_compile:
+            ret = "__[POSTCOMPILE_%s]" % name
+            if expanding:
+                # for expanding, bound parameters or literal values will be
+                # rendered per item
+                return ret
+
+            # otherwise, for non-expanding "literal execute", apply
+            # bind casts as determined by the datatype
+            if bindparam_type is not None:
+                type_impl = bindparam_type._unwrapped_dialect_impl(
+                    self.dialect
+                )
+                if type_impl.render_literal_cast:
+                    ret = self.render_bind_cast(bindparam_type, type_impl, ret)
+            return ret
+        elif self.state is CompilerState.COMPILING:
+            ret = self.compilation_bindtemplate % {"name": name}
+        else:
+            ret = self.bindtemplate % {"name": name}
+
+        if (
+            bindparam_type is not None
+            and self.dialect._bind_typing_render_casts
+        ):
+            type_impl = bindparam_type._unwrapped_dialect_impl(self.dialect)
+            if type_impl.render_bind_cast:
+                ret = self.render_bind_cast(bindparam_type, type_impl, ret)
+
+        return ret
+
+    def _dispatch_independent_ctes(self, stmt, kw):
+        local_kw = kw.copy()
+        local_kw.pop("cte_opts", None)
+        for cte, opt in zip(
+            stmt._independent_ctes, stmt._independent_ctes_opts
+        ):
+            cte._compiler_dispatch(self, cte_opts=opt, **local_kw)
+
+    def visit_cte(
+        self,
+        cte: CTE,
+        asfrom: bool = False,
+        ashint: bool = False,
+        fromhints: Optional[_FromHintsType] = None,
+        visiting_cte: Optional[CTE] = None,
+        from_linter: Optional[FromLinter] = None,
+        cte_opts: selectable._CTEOpts = selectable._CTEOpts(False),
+        **kwargs: Any,
+    ) -> Optional[str]:
+        self_ctes = self._init_cte_state()
+        assert self_ctes is self.ctes
+
+        kwargs["visiting_cte"] = cte
+
+        cte_name = cte.name
+
+        if isinstance(cte_name, elements._truncated_label):
+            cte_name = self._truncated_identifier("alias", cte_name)
+
+        is_new_cte = True
+        embedded_in_current_named_cte = False
+
+        _reference_cte = cte._get_reference_cte()
+
+        nesting = cte.nesting or cte_opts.nesting
+
+        # check for CTE already encountered
+        if _reference_cte in self.level_name_by_cte:
+            cte_level, _, existing_cte_opts = self.level_name_by_cte[
+                _reference_cte
+            ]
+            assert _ == cte_name
+
+            cte_level_name = (cte_level, cte_name)
+            existing_cte = self.ctes_by_level_name[cte_level_name]
+
+            # check if we are receiving it here with a specific
+            # "nest_here" location; if so, move it to this location
+
+            if cte_opts.nesting:
+                if existing_cte_opts.nesting:
+                    raise exc.CompileError(
+                        "CTE is stated as 'nest_here' in "
+                        "more than one location"
+                    )
+
+                old_level_name = (cte_level, cte_name)
+                cte_level = len(self.stack) if nesting else 1
+                cte_level_name = new_level_name = (cte_level, cte_name)
+
+                del self.ctes_by_level_name[old_level_name]
+                self.ctes_by_level_name[new_level_name] = existing_cte
+                self.level_name_by_cte[_reference_cte] = new_level_name + (
+                    cte_opts,
+                )
+
+        else:
+            cte_level = len(self.stack) if nesting else 1
+            cte_level_name = (cte_level, cte_name)
+
+            if cte_level_name in self.ctes_by_level_name:
+                existing_cte = self.ctes_by_level_name[cte_level_name]
+            else:
+                existing_cte = None
+
+        if existing_cte is not None:
+            embedded_in_current_named_cte = visiting_cte is existing_cte
+
+            # we've generated a same-named CTE that we are enclosed in,
+            # or this is the same CTE.  just return the name.
+            if cte is existing_cte._restates or cte is existing_cte:
+                is_new_cte = False
+            elif existing_cte is cte._restates:
+                # we've generated a same-named CTE that is
+                # enclosed in us - we take precedence, so
+                # discard the text for the "inner".
+                del self_ctes[existing_cte]
+
+                existing_cte_reference_cte = existing_cte._get_reference_cte()
+
+                assert existing_cte_reference_cte is _reference_cte
+                assert existing_cte_reference_cte is existing_cte
+
+                del self.level_name_by_cte[existing_cte_reference_cte]
+            else:
+                if (
+                    # if the two CTEs have the same hash, which we expect
+                    # here means that one/both is an annotated of the other
+                    (hash(cte) == hash(existing_cte))
+                    # or...
+                    or (
+                        (
+                            # if they are clones, i.e. they came from the ORM
+                            # or some other visit method
+                            cte._is_clone_of is not None
+                            or existing_cte._is_clone_of is not None
+                        )
+                        # and are deep-copy identical
+                        and cte.compare(existing_cte)
+                    )
+                ):
+                    # then consider these two CTEs the same
+                    is_new_cte = False
+                else:
+                    # otherwise these are two CTEs that either will render
+                    # differently, or were indicated separately by the user,
+                    # with the same name
+                    raise exc.CompileError(
+                        "Multiple, unrelated CTEs found with "
+                        "the same name: %r" % cte_name
+                    )
+
+        if not asfrom and not is_new_cte:
+            return None
+
+        if cte._cte_alias is not None:
+            pre_alias_cte = cte._cte_alias
+            cte_pre_alias_name = cte._cte_alias.name
+            if isinstance(cte_pre_alias_name, elements._truncated_label):
+                cte_pre_alias_name = self._truncated_identifier(
+                    "alias", cte_pre_alias_name
+                )
+        else:
+            pre_alias_cte = cte
+            cte_pre_alias_name = None
+
+        if is_new_cte:
+            self.ctes_by_level_name[cte_level_name] = cte
+            self.level_name_by_cte[_reference_cte] = cte_level_name + (
+                cte_opts,
+            )
+
+            if pre_alias_cte not in self.ctes:
+                self.visit_cte(pre_alias_cte, **kwargs)
+
+            if not cte_pre_alias_name and cte not in self_ctes:
+                if cte.recursive:
+                    self.ctes_recursive = True
+                text = self.preparer.format_alias(cte, cte_name)
+                if cte.recursive:
+                    col_source = cte.element
+
+                    # TODO: can we get at the .columns_plus_names collection
+                    # that is already (or will be?) generated for the SELECT
+                    # rather than calling twice?
+                    recur_cols = [
+                        # TODO: proxy_name is not technically safe,
+                        # see test_cte->
+                        # test_with_recursive_no_name_currently_buggy.  not
+                        # clear what should be done with such a case
+                        fallback_label_name or proxy_name
+                        for (
+                            _,
+                            proxy_name,
+                            fallback_label_name,
+                            c,
+                            repeated,
+                        ) in (col_source._generate_columns_plus_names(True))
+                        if not repeated
+                    ]
+
+                    text += "(%s)" % (
+                        ", ".join(
+                            self.preparer.format_label_name(
+                                ident, anon_map=self.anon_map
+                            )
+                            for ident in recur_cols
+                        )
+                    )
+
+                assert kwargs.get("subquery", False) is False
+
+                if not self.stack:
+                    # toplevel, this is a stringify of the
+                    # cte directly.  just compile the inner
+                    # the way alias() does.
+                    return cte.element._compiler_dispatch(
+                        self, asfrom=asfrom, **kwargs
+                    )
+                else:
+                    prefixes = self._generate_prefixes(
+                        cte, cte._prefixes, **kwargs
+                    )
+                    inner = cte.element._compiler_dispatch(
+                        self, asfrom=True, **kwargs
+                    )
+
+                    text += " AS %s\n(%s)" % (prefixes, inner)
+
+                if cte._suffixes:
+                    text += " " + self._generate_prefixes(
+                        cte, cte._suffixes, **kwargs
+                    )
+
+                self_ctes[cte] = text
+
+        if asfrom:
+            if from_linter:
+                from_linter.froms[cte._de_clone()] = cte_name
+
+            if not is_new_cte and embedded_in_current_named_cte:
+                return self.preparer.format_alias(cte, cte_name)
+
+            if cte_pre_alias_name:
+                text = self.preparer.format_alias(cte, cte_pre_alias_name)
+                if self.preparer._requires_quotes(cte_name):
+                    cte_name = self.preparer.quote(cte_name)
+                text += self.get_render_as_alias_suffix(cte_name)
+                return text
+            else:
+                return self.preparer.format_alias(cte, cte_name)
+
+        return None
+
+    def visit_table_valued_alias(self, element, **kw):
+        if element.joins_implicitly:
+            kw["from_linter"] = None
+        if element._is_lateral:
+            return self.visit_lateral(element, **kw)
+        else:
+            return self.visit_alias(element, **kw)
+
+    def visit_table_valued_column(self, element, **kw):
+        return self.visit_column(element, **kw)
+
+    def visit_alias(
+        self,
+        alias,
+        asfrom=False,
+        ashint=False,
+        iscrud=False,
+        fromhints=None,
+        subquery=False,
+        lateral=False,
+        enclosing_alias=None,
+        from_linter=None,
+        **kwargs,
+    ):
+        if lateral:
+            if "enclosing_lateral" not in kwargs:
+                # if lateral is set and enclosing_lateral is not
+                # present, we assume we are being called directly
+                # from visit_lateral() and we need to set enclosing_lateral.
+                assert alias._is_lateral
+                kwargs["enclosing_lateral"] = alias
+
+            # for lateral objects, we track a second from_linter that is...
+            # lateral!  to the level above us.
+            if (
+                from_linter
+                and "lateral_from_linter" not in kwargs
+                and "enclosing_lateral" in kwargs
+            ):
+                kwargs["lateral_from_linter"] = from_linter
+
+        if enclosing_alias is not None and enclosing_alias.element is alias:
+            inner = alias.element._compiler_dispatch(
+                self,
+                asfrom=asfrom,
+                ashint=ashint,
+                iscrud=iscrud,
+                fromhints=fromhints,
+                lateral=lateral,
+                enclosing_alias=alias,
+                **kwargs,
+            )
+            if subquery and (asfrom or lateral):
+                inner = "(%s)" % (inner,)
+            return inner
+        else:
+            enclosing_alias = kwargs["enclosing_alias"] = alias
+
+        if asfrom or ashint:
+            if isinstance(alias.name, elements._truncated_label):
+                alias_name = self._truncated_identifier("alias", alias.name)
+            else:
+                alias_name = alias.name
+
+        if ashint:
+            return self.preparer.format_alias(alias, alias_name)
+        elif asfrom:
+            if from_linter:
+                from_linter.froms[alias._de_clone()] = alias_name
+
+            inner = alias.element._compiler_dispatch(
+                self, asfrom=True, lateral=lateral, **kwargs
+            )
+            if subquery:
+                inner = "(%s)" % (inner,)
+
+            ret = inner + self.get_render_as_alias_suffix(
+                self.preparer.format_alias(alias, alias_name)
+            )
+
+            if alias._supports_derived_columns and alias._render_derived:
+                ret += "(%s)" % (
+                    ", ".join(
+                        "%s%s"
+                        % (
+                            self.preparer.quote(col.name),
+                            (
+                                " %s"
+                                % self.dialect.type_compiler_instance.process(
+                                    col.type, **kwargs
+                                )
+                                if alias._render_derived_w_types
+                                else ""
+                            ),
+                        )
+                        for col in alias.c
+                    )
+                )
+
+            if fromhints and alias in fromhints:
+                ret = self.format_from_hint_text(
+                    ret, alias, fromhints[alias], iscrud
+                )
+
+            return ret
+        else:
+            # note we cancel the "subquery" flag here as well
+            return alias.element._compiler_dispatch(
+                self, lateral=lateral, **kwargs
+            )
+
+    def visit_subquery(self, subquery, **kw):
+        kw["subquery"] = True
+        return self.visit_alias(subquery, **kw)
+
+    def visit_lateral(self, lateral_, **kw):
+        kw["lateral"] = True
+        return "LATERAL %s" % self.visit_alias(lateral_, **kw)
+
+    def visit_tablesample(self, tablesample, asfrom=False, **kw):
+        text = "%s TABLESAMPLE %s" % (
+            self.visit_alias(tablesample, asfrom=True, **kw),
+            tablesample._get_method()._compiler_dispatch(self, **kw),
+        )
+
+        if tablesample.seed is not None:
+            text += " REPEATABLE (%s)" % (
+                tablesample.seed._compiler_dispatch(self, **kw)
+            )
+
+        return text
+
+    def _render_values(self, element, **kw):
+        kw.setdefault("literal_binds", element.literal_binds)
+        tuples = ", ".join(
+            self.process(
+                elements.Tuple(
+                    types=element._column_types, *elem
+                ).self_group(),
+                **kw,
+            )
+            for chunk in element._data
+            for elem in chunk
+        )
+        return f"VALUES {tuples}"
+
+    def visit_values(self, element, asfrom=False, from_linter=None, **kw):
+        v = self._render_values(element, **kw)
+
+        if element._unnamed:
+            name = None
+        elif isinstance(element.name, elements._truncated_label):
+            name = self._truncated_identifier("values", element.name)
+        else:
+            name = element.name
+
+        if element._is_lateral:
+            lateral = "LATERAL "
+        else:
+            lateral = ""
+
+        if asfrom:
+            if from_linter:
+                from_linter.froms[element._de_clone()] = (
+                    name if name is not None else "(unnamed VALUES element)"
+                )
+
+            if name:
+                kw["include_table"] = False
+                v = "%s(%s)%s (%s)" % (
+                    lateral,
+                    v,
+                    self.get_render_as_alias_suffix(self.preparer.quote(name)),
+                    (
+                        ", ".join(
+                            c._compiler_dispatch(self, **kw)
+                            for c in element.columns
+                        )
+                    ),
+                )
+            else:
+                v = "%s(%s)" % (lateral, v)
+        return v
+
+    def visit_scalar_values(self, element, **kw):
+        return f"({self._render_values(element, **kw)})"
+
+    def get_render_as_alias_suffix(self, alias_name_text):
+        return " AS " + alias_name_text
+
+    def _add_to_result_map(
+        self,
+        keyname: str,
+        name: str,
+        objects: Tuple[Any, ...],
+        type_: TypeEngine[Any],
+    ) -> None:
+
+        # note objects must be non-empty for cursor.py to handle the
+        # collection properly
+        assert objects
+
+        if keyname is None or keyname == "*":
+            self._ordered_columns = False
+            self._ad_hoc_textual = True
+        if type_._is_tuple_type:
+            raise exc.CompileError(
+                "Most backends don't support SELECTing "
+                "from a tuple() object.  If this is an ORM query, "
+                "consider using the Bundle object."
+            )
+        self._result_columns.append(
+            ResultColumnsEntry(keyname, name, objects, type_)
+        )
+
+    def _label_returning_column(
+        self, stmt, column, populate_result_map, column_clause_args=None, **kw
+    ):
+        """Render a column with necessary labels inside of a RETURNING clause.
+
+        This method is provided for individual dialects in place of calling
+        the _label_select_column method directly, so that the two use cases
+        of RETURNING vs. SELECT can be disambiguated going forward.
+
+        .. versionadded:: 1.4.21
+
+        """
+        return self._label_select_column(
+            None,
+            column,
+            populate_result_map,
+            False,
+            {} if column_clause_args is None else column_clause_args,
+            **kw,
+        )
+
+    def _label_select_column(
+        self,
+        select,
+        column,
+        populate_result_map,
+        asfrom,
+        column_clause_args,
+        name=None,
+        proxy_name=None,
+        fallback_label_name=None,
+        within_columns_clause=True,
+        column_is_repeated=False,
+        need_column_expressions=False,
+        include_table=True,
+    ):
+        """produce labeled columns present in a select()."""
+        impl = column.type.dialect_impl(self.dialect)
+
+        if impl._has_column_expression and (
+            need_column_expressions or populate_result_map
+        ):
+            col_expr = impl.column_expression(column)
+        else:
+            col_expr = column
+
+        if populate_result_map:
+            # pass an "add_to_result_map" callable into the compilation
+            # of embedded columns.  this collects information about the
+            # column as it will be fetched in the result and is coordinated
+            # with cursor.description when the query is executed.
+            add_to_result_map = self._add_to_result_map
+
+            # if the SELECT statement told us this column is a repeat,
+            # wrap the callable with one that prevents the addition of the
+            # targets
+            if column_is_repeated:
+                _add_to_result_map = add_to_result_map
+
+                def add_to_result_map(keyname, name, objects, type_):
+                    _add_to_result_map(keyname, name, (keyname,), type_)
+
+            # if we redefined col_expr for type expressions, wrap the
+            # callable with one that adds the original column to the targets
+            elif col_expr is not column:
+                _add_to_result_map = add_to_result_map
+
+                def add_to_result_map(keyname, name, objects, type_):
+                    _add_to_result_map(
+                        keyname, name, (column,) + objects, type_
+                    )
+
+        else:
+            add_to_result_map = None
+
+        # this method is used by some of the dialects for RETURNING,
+        # which has different inputs.  _label_returning_column was added
+        # as the better target for this now however for 1.4 we will keep
+        # _label_select_column directly compatible with this use case.
+        # these assertions right now set up the current expected inputs
+        assert within_columns_clause, (
+            "_label_select_column is only relevant within "
+            "the columns clause of a SELECT or RETURNING"
+        )
+        if isinstance(column, elements.Label):
+            if col_expr is not column:
+                result_expr = _CompileLabel(
+                    col_expr, column.name, alt_names=(column.element,)
+                )
+            else:
+                result_expr = col_expr
+
+        elif name:
+            # here, _columns_plus_names has determined there's an explicit
+            # label name we need to use.  this is the default for
+            # tablenames_plus_columnnames as well as when columns are being
+            # deduplicated on name
+
+            assert (
+                proxy_name is not None
+            ), "proxy_name is required if 'name' is passed"
+
+            result_expr = _CompileLabel(
+                col_expr,
+                name,
+                alt_names=(
+                    proxy_name,
+                    # this is a hack to allow legacy result column lookups
+                    # to work as they did before; this goes away in 2.0.
+                    # TODO: this only seems to be tested indirectly
+                    # via test/orm/test_deprecations.py.   should be a
+                    # resultset test for this
+                    column._tq_label,
+                ),
+            )
+        else:
+            # determine here whether this column should be rendered in
+            # a labelled context or not, as we were given no required label
+            # name from the caller. Here we apply heuristics based on the kind
+            # of SQL expression involved.
+
+            if col_expr is not column:
+                # type-specific expression wrapping the given column,
+                # so we render a label
+                render_with_label = True
+            elif isinstance(column, elements.ColumnClause):
+                # table-bound column, we render its name as a label if we are
+                # inside of a subquery only
+                render_with_label = (
+                    asfrom
+                    and not column.is_literal
+                    and column.table is not None
+                )
+            elif isinstance(column, elements.TextClause):
+                render_with_label = False
+            elif isinstance(column, elements.UnaryExpression):
+                render_with_label = column.wraps_column_expression or asfrom
+            elif (
+                # general class of expressions that don't have a SQL-column
+                # addressible name.  includes scalar selects, bind parameters,
+                # SQL functions, others
+                not isinstance(column, elements.NamedColumn)
+                # deeper check that indicates there's no natural "name" to
+                # this element, which accommodates for custom SQL constructs
+                # that might have a ".name" attribute (but aren't SQL
+                # functions) but are not implementing this more recently added
+                # base class.  in theory the "NamedColumn" check should be
+                # enough, however here we seek to maintain legacy behaviors
+                # as well.
+                and column._non_anon_label is None
+            ):
+                render_with_label = True
+            else:
+                render_with_label = False
+
+            if render_with_label:
+                if not fallback_label_name:
+                    # used by the RETURNING case right now.  we generate it
+                    # here as 3rd party dialects may be referring to
+                    # _label_select_column method directly instead of the
+                    # just-added _label_returning_column method
+                    assert not column_is_repeated
+                    fallback_label_name = column._anon_name_label
+
+                fallback_label_name = (
+                    elements._truncated_label(fallback_label_name)
+                    if not isinstance(
+                        fallback_label_name, elements._truncated_label
+                    )
+                    else fallback_label_name
+                )
+
+                result_expr = _CompileLabel(
+                    col_expr, fallback_label_name, alt_names=(proxy_name,)
+                )
+            else:
+                result_expr = col_expr
+
+        column_clause_args.update(
+            within_columns_clause=within_columns_clause,
+            add_to_result_map=add_to_result_map,
+            include_table=include_table,
+        )
+        return result_expr._compiler_dispatch(self, **column_clause_args)
+
+    def format_from_hint_text(self, sqltext, table, hint, iscrud):
+        hinttext = self.get_from_hint_text(table, hint)
+        if hinttext:
+            sqltext += " " + hinttext
+        return sqltext
+
+    def get_select_hint_text(self, byfroms):
+        return None
+
+    def get_from_hint_text(self, table, text):
+        return None
+
+    def get_crud_hint_text(self, table, text):
+        return None
+
+    def get_statement_hint_text(self, hint_texts):
+        return " ".join(hint_texts)
+
+    _default_stack_entry: _CompilerStackEntry
+
+    if not typing.TYPE_CHECKING:
+        _default_stack_entry = util.immutabledict(
+            [("correlate_froms", frozenset()), ("asfrom_froms", frozenset())]
+        )
+
+    def _display_froms_for_select(
+        self, select_stmt, asfrom, lateral=False, **kw
+    ):
+        # utility method to help external dialects
+        # get the correct from list for a select.
+        # specifically the oracle dialect needs this feature
+        # right now.
+        toplevel = not self.stack
+        entry = self._default_stack_entry if toplevel else self.stack[-1]
+
+        compile_state = select_stmt._compile_state_factory(select_stmt, self)
+
+        correlate_froms = entry["correlate_froms"]
+        asfrom_froms = entry["asfrom_froms"]
+
+        if asfrom and not lateral:
+            froms = compile_state._get_display_froms(
+                explicit_correlate_froms=correlate_froms.difference(
+                    asfrom_froms
+                ),
+                implicit_correlate_froms=(),
+            )
+        else:
+            froms = compile_state._get_display_froms(
+                explicit_correlate_froms=correlate_froms,
+                implicit_correlate_froms=asfrom_froms,
+            )
+        return froms
+
+    translate_select_structure: Any = None
+    """if not ``None``, should be a callable which accepts ``(select_stmt,
+    **kw)`` and returns a select object.   this is used for structural changes
+    mostly to accommodate for LIMIT/OFFSET schemes
+
+    """
+
+    def visit_select(
+        self,
+        select_stmt,
+        asfrom=False,
+        insert_into=False,
+        fromhints=None,
+        compound_index=None,
+        select_wraps_for=None,
+        lateral=False,
+        from_linter=None,
+        **kwargs,
+    ):
+        assert select_wraps_for is None, (
+            "SQLAlchemy 1.4 requires use of "
+            "the translate_select_structure hook for structural "
+            "translations of SELECT objects"
+        )
+
+        # initial setup of SELECT.  the compile_state_factory may now
+        # be creating a totally different SELECT from the one that was
+        # passed in.  for ORM use this will convert from an ORM-state
+        # SELECT to a regular "Core" SELECT.  other composed operations
+        # such as computation of joins will be performed.
+
+        kwargs["within_columns_clause"] = False
+
+        compile_state = select_stmt._compile_state_factory(
+            select_stmt, self, **kwargs
+        )
+        kwargs["ambiguous_table_name_map"] = (
+            compile_state._ambiguous_table_name_map
+        )
+
+        select_stmt = compile_state.statement
+
+        toplevel = not self.stack
+
+        if toplevel and not self.compile_state:
+            self.compile_state = compile_state
+
+        is_embedded_select = compound_index is not None or insert_into
+
+        # translate step for Oracle, SQL Server which often need to
+        # restructure the SELECT to allow for LIMIT/OFFSET and possibly
+        # other conditions
+        if self.translate_select_structure:
+            new_select_stmt = self.translate_select_structure(
+                select_stmt, asfrom=asfrom, **kwargs
+            )
+
+            # if SELECT was restructured, maintain a link to the originals
+            # and assemble a new compile state
+            if new_select_stmt is not select_stmt:
+                compile_state_wraps_for = compile_state
+                select_wraps_for = select_stmt
+                select_stmt = new_select_stmt
+
+                compile_state = select_stmt._compile_state_factory(
+                    select_stmt, self, **kwargs
+                )
+                select_stmt = compile_state.statement
+
+        entry = self._default_stack_entry if toplevel else self.stack[-1]
+
+        populate_result_map = need_column_expressions = (
+            toplevel
+            or entry.get("need_result_map_for_compound", False)
+            or entry.get("need_result_map_for_nested", False)
+        )
+
+        # indicates there is a CompoundSelect in play and we are not the
+        # first select
+        if compound_index:
+            populate_result_map = False
+
+        # this was first proposed as part of #3372; however, it is not
+        # reached in current tests and could possibly be an assertion
+        # instead.
+        if not populate_result_map and "add_to_result_map" in kwargs:
+            del kwargs["add_to_result_map"]
+
+        froms = self._setup_select_stack(
+            select_stmt, compile_state, entry, asfrom, lateral, compound_index
+        )
+
+        column_clause_args = kwargs.copy()
+        column_clause_args.update(
+            {"within_label_clause": False, "within_columns_clause": False}
+        )
+
+        text = "SELECT "  # we're off to a good start !
+
+        if select_stmt._hints:
+            hint_text, byfrom = self._setup_select_hints(select_stmt)
+            if hint_text:
+                text += hint_text + " "
+        else:
+            byfrom = None
+
+        if select_stmt._independent_ctes:
+            self._dispatch_independent_ctes(select_stmt, kwargs)
+
+        if select_stmt._prefixes:
+            text += self._generate_prefixes(
+                select_stmt, select_stmt._prefixes, **kwargs
+            )
+
+        text += self.get_select_precolumns(select_stmt, **kwargs)
+        # the actual list of columns to print in the SELECT column list.
+        inner_columns = [
+            c
+            for c in [
+                self._label_select_column(
+                    select_stmt,
+                    column,
+                    populate_result_map,
+                    asfrom,
+                    column_clause_args,
+                    name=name,
+                    proxy_name=proxy_name,
+                    fallback_label_name=fallback_label_name,
+                    column_is_repeated=repeated,
+                    need_column_expressions=need_column_expressions,
+                )
+                for (
+                    name,
+                    proxy_name,
+                    fallback_label_name,
+                    column,
+                    repeated,
+                ) in compile_state.columns_plus_names
+            ]
+            if c is not None
+        ]
+
+        if populate_result_map and select_wraps_for is not None:
+            # if this select was generated from translate_select,
+            # rewrite the targeted columns in the result map
+
+            translate = dict(
+                zip(
+                    [
+                        name
+                        for (
+                            key,
+                            proxy_name,
+                            fallback_label_name,
+                            name,
+                            repeated,
+                        ) in compile_state.columns_plus_names
+                    ],
+                    [
+                        name
+                        for (
+                            key,
+                            proxy_name,
+                            fallback_label_name,
+                            name,
+                            repeated,
+                        ) in compile_state_wraps_for.columns_plus_names
+                    ],
+                )
+            )
+
+            self._result_columns = [
+                ResultColumnsEntry(
+                    key, name, tuple(translate.get(o, o) for o in obj), type_
+                )
+                for key, name, obj, type_ in self._result_columns
+            ]
+
+        text = self._compose_select_body(
+            text,
+            select_stmt,
+            compile_state,
+            inner_columns,
+            froms,
+            byfrom,
+            toplevel,
+            kwargs,
+        )
+
+        if select_stmt._statement_hints:
+            per_dialect = [
+                ht
+                for (dialect_name, ht) in select_stmt._statement_hints
+                if dialect_name in ("*", self.dialect.name)
+            ]
+            if per_dialect:
+                text += " " + self.get_statement_hint_text(per_dialect)
+
+        # In compound query, CTEs are shared at the compound level
+        if self.ctes and (not is_embedded_select or toplevel):
+            nesting_level = len(self.stack) if not toplevel else None
+            text = self._render_cte_clause(nesting_level=nesting_level) + text
+
+        if select_stmt._suffixes:
+            text += " " + self._generate_prefixes(
+                select_stmt, select_stmt._suffixes, **kwargs
+            )
+
+        self.stack.pop(-1)
+
+        return text
+
+    def _setup_select_hints(
+        self, select: Select[Any]
+    ) -> Tuple[str, _FromHintsType]:
+        byfrom = {
+            from_: hinttext
+            % {"name": from_._compiler_dispatch(self, ashint=True)}
+            for (from_, dialect), hinttext in select._hints.items()
+            if dialect in ("*", self.dialect.name)
+        }
+        hint_text = self.get_select_hint_text(byfrom)
+        return hint_text, byfrom
+
+    def _setup_select_stack(
+        self, select, compile_state, entry, asfrom, lateral, compound_index
+    ):
+        correlate_froms = entry["correlate_froms"]
+        asfrom_froms = entry["asfrom_froms"]
+
+        if compound_index == 0:
+            entry["select_0"] = select
+        elif compound_index:
+            select_0 = entry["select_0"]
+            numcols = len(select_0._all_selected_columns)
+
+            if len(compile_state.columns_plus_names) != numcols:
+                raise exc.CompileError(
+                    "All selectables passed to "
+                    "CompoundSelect must have identical numbers of "
+                    "columns; select #%d has %d columns, select "
+                    "#%d has %d"
+                    % (
+                        1,
+                        numcols,
+                        compound_index + 1,
+                        len(select._all_selected_columns),
+                    )
+                )
+
+        if asfrom and not lateral:
+            froms = compile_state._get_display_froms(
+                explicit_correlate_froms=correlate_froms.difference(
+                    asfrom_froms
+                ),
+                implicit_correlate_froms=(),
+            )
+        else:
+            froms = compile_state._get_display_froms(
+                explicit_correlate_froms=correlate_froms,
+                implicit_correlate_froms=asfrom_froms,
+            )
+
+        new_correlate_froms = set(_from_objects(*froms))
+        all_correlate_froms = new_correlate_froms.union(correlate_froms)
+
+        new_entry: _CompilerStackEntry = {
+            "asfrom_froms": new_correlate_froms,
+            "correlate_froms": all_correlate_froms,
+            "selectable": select,
+            "compile_state": compile_state,
+        }
+        self.stack.append(new_entry)
+
+        return froms
+
+    def _compose_select_body(
+        self,
+        text,
+        select,
+        compile_state,
+        inner_columns,
+        froms,
+        byfrom,
+        toplevel,
+        kwargs,
+    ):
+        text += ", ".join(inner_columns)
+
+        if self.linting & COLLECT_CARTESIAN_PRODUCTS:
+            from_linter = FromLinter({}, set())
+            warn_linting = self.linting & WARN_LINTING
+            if toplevel:
+                self.from_linter = from_linter
+        else:
+            from_linter = None
+            warn_linting = False
+
+        # adjust the whitespace for no inner columns, part of #9440,
+        # so that a no-col SELECT comes out as "SELECT WHERE..." or
+        # "SELECT FROM ...".
+        # while it would be better to have built the SELECT starting string
+        # without trailing whitespace first, then add whitespace only if inner
+        # cols were present, this breaks compatibility with various custom
+        # compilation schemes that are currently being tested.
+        if not inner_columns:
+            text = text.rstrip()
+
+        if froms:
+            text += " \nFROM "
+
+            if select._hints:
+                text += ", ".join(
+                    [
+                        f._compiler_dispatch(
+                            self,
+                            asfrom=True,
+                            fromhints=byfrom,
+                            from_linter=from_linter,
+                            **kwargs,
+                        )
+                        for f in froms
+                    ]
+                )
+            else:
+                text += ", ".join(
+                    [
+                        f._compiler_dispatch(
+                            self,
+                            asfrom=True,
+                            from_linter=from_linter,
+                            **kwargs,
+                        )
+                        for f in froms
+                    ]
+                )
+        else:
+            text += self.default_from()
+
+        if select._where_criteria:
+            t = self._generate_delimited_and_list(
+                select._where_criteria, from_linter=from_linter, **kwargs
+            )
+            if t:
+                text += " \nWHERE " + t
+
+        if warn_linting:
+            assert from_linter is not None
+            from_linter.warn()
+
+        if select._group_by_clauses:
+            text += self.group_by_clause(select, **kwargs)
+
+        if select._having_criteria:
+            t = self._generate_delimited_and_list(
+                select._having_criteria, **kwargs
+            )
+            if t:
+                text += " \nHAVING " + t
+
+        if select._order_by_clauses:
+            text += self.order_by_clause(select, **kwargs)
+
+        if select._has_row_limiting_clause:
+            text += self._row_limit_clause(select, **kwargs)
+
+        if select._for_update_arg is not None:
+            text += self.for_update_clause(select, **kwargs)
+
+        return text
+
+    def _generate_prefixes(self, stmt, prefixes, **kw):
+        clause = " ".join(
+            prefix._compiler_dispatch(self, **kw)
+            for prefix, dialect_name in prefixes
+            if dialect_name in (None, "*") or dialect_name == self.dialect.name
+        )
+        if clause:
+            clause += " "
+        return clause
+
+    def _render_cte_clause(
+        self,
+        nesting_level=None,
+        include_following_stack=False,
+    ):
+        """
+        include_following_stack
+            Also render the nesting CTEs on the next stack. Useful for
+            SQL structures like UNION or INSERT that can wrap SELECT
+            statements containing nesting CTEs.
+        """
+        if not self.ctes:
+            return ""
+
+        ctes: MutableMapping[CTE, str]
+
+        if nesting_level and nesting_level > 1:
+            ctes = util.OrderedDict()
+            for cte in list(self.ctes.keys()):
+                cte_level, cte_name, cte_opts = self.level_name_by_cte[
+                    cte._get_reference_cte()
+                ]
+                nesting = cte.nesting or cte_opts.nesting
+                is_rendered_level = cte_level == nesting_level or (
+                    include_following_stack and cte_level == nesting_level + 1
+                )
+                if not (nesting and is_rendered_level):
+                    continue
+
+                ctes[cte] = self.ctes[cte]
+
+        else:
+            ctes = self.ctes
+
+        if not ctes:
+            return ""
+        ctes_recursive = any([cte.recursive for cte in ctes])
+
+        cte_text = self.get_cte_preamble(ctes_recursive) + " "
+        cte_text += ", \n".join([txt for txt in ctes.values()])
+        cte_text += "\n "
+
+        if nesting_level and nesting_level > 1:
+            for cte in list(ctes.keys()):
+                cte_level, cte_name, cte_opts = self.level_name_by_cte[
+                    cte._get_reference_cte()
+                ]
+                del self.ctes[cte]
+                del self.ctes_by_level_name[(cte_level, cte_name)]
+                del self.level_name_by_cte[cte._get_reference_cte()]
+
+        return cte_text
+
+    def get_cte_preamble(self, recursive):
+        if recursive:
+            return "WITH RECURSIVE"
+        else:
+            return "WITH"
+
+    def get_select_precolumns(self, select, **kw):
+        """Called when building a ``SELECT`` statement, position is just
+        before column list.
+
+        """
+        if select._distinct_on:
+            util.warn_deprecated(
+                "DISTINCT ON is currently supported only by the PostgreSQL "
+                "dialect.  Use of DISTINCT ON for other backends is currently "
+                "silently ignored, however this usage is deprecated, and will "
+                "raise CompileError in a future release for all backends "
+                "that do not support this syntax.",
+                version="1.4",
+            )
+        return "DISTINCT " if select._distinct else ""
+
+    def group_by_clause(self, select, **kw):
+        """allow dialects to customize how GROUP BY is rendered."""
+
+        group_by = self._generate_delimited_list(
+            select._group_by_clauses, OPERATORS[operators.comma_op], **kw
+        )
+        if group_by:
+            return " GROUP BY " + group_by
+        else:
+            return ""
+
+    def order_by_clause(self, select, **kw):
+        """allow dialects to customize how ORDER BY is rendered."""
+
+        order_by = self._generate_delimited_list(
+            select._order_by_clauses, OPERATORS[operators.comma_op], **kw
+        )
+
+        if order_by:
+            return " ORDER BY " + order_by
+        else:
+            return ""
+
+    def for_update_clause(self, select, **kw):
+        return " FOR UPDATE"
+
+    def returning_clause(
+        self,
+        stmt: UpdateBase,
+        returning_cols: Sequence[ColumnElement[Any]],
+        *,
+        populate_result_map: bool,
+        **kw: Any,
+    ) -> str:
+        columns = [
+            self._label_returning_column(
+                stmt,
+                column,
+                populate_result_map,
+                fallback_label_name=fallback_label_name,
+                column_is_repeated=repeated,
+                name=name,
+                proxy_name=proxy_name,
+                **kw,
+            )
+            for (
+                name,
+                proxy_name,
+                fallback_label_name,
+                column,
+                repeated,
+            ) in stmt._generate_columns_plus_names(
+                True, cols=base._select_iterables(returning_cols)
+            )
+        ]
+
+        return "RETURNING " + ", ".join(columns)
+
+    def limit_clause(self, select, **kw):
+        text = ""
+        if select._limit_clause is not None:
+            text += "\n LIMIT " + self.process(select._limit_clause, **kw)
+        if select._offset_clause is not None:
+            if select._limit_clause is None:
+                text += "\n LIMIT -1"
+            text += " OFFSET " + self.process(select._offset_clause, **kw)
+        return text
+
+    def fetch_clause(
+        self,
+        select,
+        fetch_clause=None,
+        require_offset=False,
+        use_literal_execute_for_simple_int=False,
+        **kw,
+    ):
+        if fetch_clause is None:
+            fetch_clause = select._fetch_clause
+            fetch_clause_options = select._fetch_clause_options
+        else:
+            fetch_clause_options = {"percent": False, "with_ties": False}
+
+        text = ""
+
+        if select._offset_clause is not None:
+            offset_clause = select._offset_clause
+            if (
+                use_literal_execute_for_simple_int
+                and select._simple_int_clause(offset_clause)
+            ):
+                offset_clause = offset_clause.render_literal_execute()
+            offset_str = self.process(offset_clause, **kw)
+            text += "\n OFFSET %s ROWS" % offset_str
+        elif require_offset:
+            text += "\n OFFSET 0 ROWS"
+
+        if fetch_clause is not None:
+            if (
+                use_literal_execute_for_simple_int
+                and select._simple_int_clause(fetch_clause)
+            ):
+                fetch_clause = fetch_clause.render_literal_execute()
+            text += "\n FETCH FIRST %s%s ROWS %s" % (
+                self.process(fetch_clause, **kw),
+                " PERCENT" if fetch_clause_options["percent"] else "",
+                "WITH TIES" if fetch_clause_options["with_ties"] else "ONLY",
+            )
+        return text
+
+    def visit_table(
+        self,
+        table,
+        asfrom=False,
+        iscrud=False,
+        ashint=False,
+        fromhints=None,
+        use_schema=True,
+        from_linter=None,
+        ambiguous_table_name_map=None,
+        **kwargs,
+    ):
+        if from_linter:
+            from_linter.froms[table] = table.fullname
+
+        if asfrom or ashint:
+            effective_schema = self.preparer.schema_for_object(table)
+
+            if use_schema and effective_schema:
+                ret = (
+                    self.preparer.quote_schema(effective_schema)
+                    + "."
+                    + self.preparer.quote(table.name)
+                )
+            else:
+                ret = self.preparer.quote(table.name)
+
+                if (
+                    not effective_schema
+                    and ambiguous_table_name_map
+                    and table.name in ambiguous_table_name_map
+                ):
+                    anon_name = self._truncated_identifier(
+                        "alias", ambiguous_table_name_map[table.name]
+                    )
+
+                    ret = ret + self.get_render_as_alias_suffix(
+                        self.preparer.format_alias(None, anon_name)
+                    )
+
+            if fromhints and table in fromhints:
+                ret = self.format_from_hint_text(
+                    ret, table, fromhints[table], iscrud
+                )
+            return ret
+        else:
+            return ""
+
+    def visit_join(self, join, asfrom=False, from_linter=None, **kwargs):
+        if from_linter:
+            from_linter.edges.update(
+                itertools.product(
+                    _de_clone(join.left._from_objects),
+                    _de_clone(join.right._from_objects),
+                )
+            )
+
+        if join.full:
+            join_type = " FULL OUTER JOIN "
+        elif join.isouter:
+            join_type = " LEFT OUTER JOIN "
+        else:
+            join_type = " JOIN "
+        return (
+            join.left._compiler_dispatch(
+                self, asfrom=True, from_linter=from_linter, **kwargs
+            )
+            + join_type
+            + join.right._compiler_dispatch(
+                self, asfrom=True, from_linter=from_linter, **kwargs
+            )
+            + " ON "
+            # TODO: likely need asfrom=True here?
+            + join.onclause._compiler_dispatch(
+                self, from_linter=from_linter, **kwargs
+            )
+        )
+
+    def _setup_crud_hints(self, stmt, table_text):
+        dialect_hints = {
+            table: hint_text
+            for (table, dialect), hint_text in stmt._hints.items()
+            if dialect in ("*", self.dialect.name)
+        }
+        if stmt.table in dialect_hints:
+            table_text = self.format_from_hint_text(
+                table_text, stmt.table, dialect_hints[stmt.table], True
+            )
+        return dialect_hints, table_text
+
+    # within the realm of "insertmanyvalues sentinel columns",
+    # these lookups match different kinds of Column() configurations
+    # to specific backend capabilities.  they are broken into two
+    # lookups, one for autoincrement columns and the other for non
+    # autoincrement columns
+    _sentinel_col_non_autoinc_lookup = util.immutabledict(
+        {
+            _SentinelDefaultCharacterization.CLIENTSIDE: (
+                InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT
+            ),
+            _SentinelDefaultCharacterization.SENTINEL_DEFAULT: (
+                InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT
+            ),
+            _SentinelDefaultCharacterization.NONE: (
+                InsertmanyvaluesSentinelOpts._SUPPORTED_OR_NOT
+            ),
+            _SentinelDefaultCharacterization.IDENTITY: (
+                InsertmanyvaluesSentinelOpts.IDENTITY
+            ),
+            _SentinelDefaultCharacterization.SEQUENCE: (
+                InsertmanyvaluesSentinelOpts.SEQUENCE
+            ),
+        }
+    )
+    _sentinel_col_autoinc_lookup = _sentinel_col_non_autoinc_lookup.union(
+        {
+            _SentinelDefaultCharacterization.NONE: (
+                InsertmanyvaluesSentinelOpts.AUTOINCREMENT
+            ),
+        }
+    )
+
+    def _get_sentinel_column_for_table(
+        self, table: Table
+    ) -> Optional[Sequence[Column[Any]]]:
+        """given a :class:`.Table`, return a usable sentinel column or
+        columns for this dialect if any.
+
+        Return None if no sentinel columns could be identified, or raise an
+        error if a column was marked as a sentinel explicitly but isn't
+        compatible with this dialect.
+
+        """
+
+        sentinel_opts = self.dialect.insertmanyvalues_implicit_sentinel
+        sentinel_characteristics = table._sentinel_column_characteristics
+
+        sent_cols = sentinel_characteristics.columns
+
+        if sent_cols is None:
+            return None
+
+        if sentinel_characteristics.is_autoinc:
+            bitmask = self._sentinel_col_autoinc_lookup.get(
+                sentinel_characteristics.default_characterization, 0
+            )
+        else:
+            bitmask = self._sentinel_col_non_autoinc_lookup.get(
+                sentinel_characteristics.default_characterization, 0
+            )
+
+        if sentinel_opts & bitmask:
+            return sent_cols
+
+        if sentinel_characteristics.is_explicit:
+            # a column was explicitly marked as insert_sentinel=True,
+            # however it is not compatible with this dialect.   they should
+            # not indicate this column as a sentinel if they need to include
+            # this dialect.
+
+            # TODO: do we want non-primary key explicit sentinel cols
+            # that can gracefully degrade for some backends?
+            # insert_sentinel="degrade" perhaps.  not for the initial release.
+            # I am hoping people are generally not dealing with this sentinel
+            # business at all.
+
+            # if is_explicit is True, there will be only one sentinel column.
+
+            raise exc.InvalidRequestError(
+                f"Column {sent_cols[0]} can't be explicitly "
+                "marked as a sentinel column when using the "
+                f"{self.dialect.name} dialect, as the "
+                "particular type of default generation on this column is "
+                "not currently compatible with this dialect's specific "
+                f"INSERT..RETURNING syntax which can receive the "
+                "server-generated value in "
+                "a deterministic way.  To remove this error, remove "
+                "insert_sentinel=True from primary key autoincrement "
+                "columns; these columns are automatically used as "
+                "sentinels for supported dialects in any case."
+            )
+
+        return None
+
+    def _deliver_insertmanyvalues_batches(
+        self,
+        statement: str,
+        parameters: _DBAPIMultiExecuteParams,
+        compiled_parameters: List[_MutableCoreSingleExecuteParams],
+        generic_setinputsizes: Optional[_GenericSetInputSizesType],
+        batch_size: int,
+        sort_by_parameter_order: bool,
+        schema_translate_map: Optional[SchemaTranslateMapType],
+    ) -> Iterator[_InsertManyValuesBatch]:
+        imv = self._insertmanyvalues
+        assert imv is not None
+
+        if not imv.sentinel_param_keys:
+            _sentinel_from_params = None
+        else:
+            _sentinel_from_params = operator.itemgetter(
+                *imv.sentinel_param_keys
+            )
+
+        lenparams = len(parameters)
+        if imv.is_default_expr and not self.dialect.supports_default_metavalue:
+            # backend doesn't support
+            # INSERT INTO table (pk_col) VALUES (DEFAULT), (DEFAULT), ...
+            # at the moment this is basically SQL Server due to
+            # not being able to use DEFAULT for identity column
+            # just yield out that many single statements!  still
+            # faster than a whole connection.execute() call ;)
+            #
+            # note we still are taking advantage of the fact that we know
+            # we are using RETURNING.   The generalized approach of fetching
+            # cursor.lastrowid etc. still goes through the more heavyweight
+            # "ExecutionContext per statement" system as it isn't usable
+            # as a generic "RETURNING" approach
+            use_row_at_a_time = True
+            downgraded = False
+        elif not self.dialect.supports_multivalues_insert or (
+            sort_by_parameter_order
+            and self._result_columns
+            and (imv.sentinel_columns is None or imv.includes_upsert_behaviors)
+        ):
+            # deterministic order was requested and the compiler could
+            # not organize sentinel columns for this dialect/statement.
+            # use row at a time
+            use_row_at_a_time = True
+            downgraded = True
+        else:
+            use_row_at_a_time = False
+            downgraded = False
+
+        if use_row_at_a_time:
+            for batchnum, (param, compiled_param) in enumerate(
+                cast(
+                    "Sequence[Tuple[_DBAPISingleExecuteParams, _MutableCoreSingleExecuteParams]]",  # noqa: E501
+                    zip(parameters, compiled_parameters),
+                ),
+                1,
+            ):
+                yield _InsertManyValuesBatch(
+                    statement,
+                    param,
+                    generic_setinputsizes,
+                    [param],
+                    (
+                        [_sentinel_from_params(compiled_param)]
+                        if _sentinel_from_params
+                        else []
+                    ),
+                    1,
+                    batchnum,
+                    lenparams,
+                    sort_by_parameter_order,
+                    downgraded,
+                )
+            return
+
+        if schema_translate_map:
+            rst = functools.partial(
+                self.preparer._render_schema_translates,
+                schema_translate_map=schema_translate_map,
+            )
+        else:
+            rst = None
+
+        imv_single_values_expr = imv.single_values_expr
+        if rst:
+            imv_single_values_expr = rst(imv_single_values_expr)
+
+        executemany_values = f"({imv_single_values_expr})"
+        statement = statement.replace(executemany_values, "__EXECMANY_TOKEN__")
+
+        # Use optional insertmanyvalues_max_parameters
+        # to further shrink the batch size so that there are no more than
+        # insertmanyvalues_max_parameters params.
+        # Currently used by SQL Server, which limits statements to 2100 bound
+        # parameters (actually 2099).
+        max_params = self.dialect.insertmanyvalues_max_parameters
+        if max_params:
+            total_num_of_params = len(self.bind_names)
+            num_params_per_batch = len(imv.insert_crud_params)
+            num_params_outside_of_batch = (
+                total_num_of_params - num_params_per_batch
+            )
+            batch_size = min(
+                batch_size,
+                (
+                    (max_params - num_params_outside_of_batch)
+                    // num_params_per_batch
+                ),
+            )
+
+        batches = cast("List[Sequence[Any]]", list(parameters))
+        compiled_batches = cast(
+            "List[Sequence[Any]]", list(compiled_parameters)
+        )
+
+        processed_setinputsizes: Optional[_GenericSetInputSizesType] = None
+        batchnum = 1
+        total_batches = lenparams // batch_size + (
+            1 if lenparams % batch_size else 0
+        )
+
+        insert_crud_params = imv.insert_crud_params
+        assert insert_crud_params is not None
+
+        if rst:
+            insert_crud_params = [
+                (col, key, rst(expr), st)
+                for col, key, expr, st in insert_crud_params
+            ]
+
+        escaped_bind_names: Mapping[str, str]
+        expand_pos_lower_index = expand_pos_upper_index = 0
+
+        if not self.positional:
+            if self.escaped_bind_names:
+                escaped_bind_names = self.escaped_bind_names
+            else:
+                escaped_bind_names = {}
+
+            all_keys = set(parameters[0])
+
+            def apply_placeholders(keys, formatted):
+                for key in keys:
+                    key = escaped_bind_names.get(key, key)
+                    formatted = formatted.replace(
+                        self.bindtemplate % {"name": key},
+                        self.bindtemplate
+                        % {"name": f"{key}__EXECMANY_INDEX__"},
+                    )
+                return formatted
+
+            if imv.embed_values_counter:
+                imv_values_counter = ", _IMV_VALUES_COUNTER"
+            else:
+                imv_values_counter = ""
+            formatted_values_clause = f"""({', '.join(
+                apply_placeholders(bind_keys, formatted)
+                for _, _, formatted, bind_keys in insert_crud_params
+            )}{imv_values_counter})"""
+
+            keys_to_replace = all_keys.intersection(
+                escaped_bind_names.get(key, key)
+                for _, _, _, bind_keys in insert_crud_params
+                for key in bind_keys
+            )
+            base_parameters = {
+                key: parameters[0][key]
+                for key in all_keys.difference(keys_to_replace)
+            }
+            executemany_values_w_comma = ""
+        else:
+            formatted_values_clause = ""
+            keys_to_replace = set()
+            base_parameters = {}
+
+            if imv.embed_values_counter:
+                executemany_values_w_comma = (
+                    f"({imv_single_values_expr}, _IMV_VALUES_COUNTER), "
+                )
+            else:
+                executemany_values_w_comma = f"({imv_single_values_expr}), "
+
+            all_names_we_will_expand: Set[str] = set()
+            for elem in imv.insert_crud_params:
+                all_names_we_will_expand.update(elem[3])
+
+            # get the start and end position in a particular list
+            # of parameters where we will be doing the "expanding".
+            # statements can have params on either side or both sides,
+            # given RETURNING and CTEs
+            if all_names_we_will_expand:
+                positiontup = self.positiontup
+                assert positiontup is not None
+
+                all_expand_positions = {
+                    idx
+                    for idx, name in enumerate(positiontup)
+                    if name in all_names_we_will_expand
+                }
+                expand_pos_lower_index = min(all_expand_positions)
+                expand_pos_upper_index = max(all_expand_positions) + 1
+                assert (
+                    len(all_expand_positions)
+                    == expand_pos_upper_index - expand_pos_lower_index
+                )
+
+            if self._numeric_binds:
+                escaped = re.escape(self._numeric_binds_identifier_char)
+                executemany_values_w_comma = re.sub(
+                    rf"{escaped}\d+", "%s", executemany_values_w_comma
+                )
+
+        while batches:
+            batch = batches[0:batch_size]
+            compiled_batch = compiled_batches[0:batch_size]
+
+            batches[0:batch_size] = []
+            compiled_batches[0:batch_size] = []
+
+            if batches:
+                current_batch_size = batch_size
+            else:
+                current_batch_size = len(batch)
+
+            if generic_setinputsizes:
+                # if setinputsizes is present, expand this collection to
+                # suit the batch length as well
+                # currently this will be mssql+pyodbc for internal dialects
+                processed_setinputsizes = [
+                    (new_key, len_, typ)
+                    for new_key, len_, typ in (
+                        (f"{key}_{index}", len_, typ)
+                        for index in range(current_batch_size)
+                        for key, len_, typ in generic_setinputsizes
+                    )
+                ]
+
+            replaced_parameters: Any
+            if self.positional:
+                num_ins_params = imv.num_positional_params_counted
+
+                batch_iterator: Iterable[Sequence[Any]]
+                extra_params_left: Sequence[Any]
+                extra_params_right: Sequence[Any]
+
+                if num_ins_params == len(batch[0]):
+                    extra_params_left = extra_params_right = ()
+                    batch_iterator = batch
+                else:
+                    extra_params_left = batch[0][:expand_pos_lower_index]
+                    extra_params_right = batch[0][expand_pos_upper_index:]
+                    batch_iterator = (
+                        b[expand_pos_lower_index:expand_pos_upper_index]
+                        for b in batch
+                    )
+
+                if imv.embed_values_counter:
+                    expanded_values_string = (
+                        "".join(
+                            executemany_values_w_comma.replace(
+                                "_IMV_VALUES_COUNTER", str(i)
+                            )
+                            for i, _ in enumerate(batch)
+                        )
+                    )[:-2]
+                else:
+                    expanded_values_string = (
+                        (executemany_values_w_comma * current_batch_size)
+                    )[:-2]
+
+                if self._numeric_binds and num_ins_params > 0:
+                    # numeric will always number the parameters inside of
+                    # VALUES (and thus order self.positiontup) to be higher
+                    # than non-VALUES parameters, no matter where in the
+                    # statement those non-VALUES parameters appear (this is
+                    # ensured in _process_numeric by numbering first all
+                    # params that are not in _values_bindparam)
+                    # therefore all extra params are always
+                    # on the left side and numbered lower than the VALUES
+                    # parameters
+                    assert not extra_params_right
+
+                    start = expand_pos_lower_index + 1
+                    end = num_ins_params * (current_batch_size) + start
+
+                    # need to format here, since statement may contain
+                    # unescaped %, while values_string contains just (%s, %s)
+                    positions = tuple(
+                        f"{self._numeric_binds_identifier_char}{i}"
+                        for i in range(start, end)
+                    )
+                    expanded_values_string = expanded_values_string % positions
+
+                replaced_statement = statement.replace(
+                    "__EXECMANY_TOKEN__", expanded_values_string
+                )
+
+                replaced_parameters = tuple(
+                    itertools.chain.from_iterable(batch_iterator)
+                )
+
+                replaced_parameters = (
+                    extra_params_left
+                    + replaced_parameters
+                    + extra_params_right
+                )
+
+            else:
+                replaced_values_clauses = []
+                replaced_parameters = base_parameters.copy()
+
+                for i, param in enumerate(batch):
+                    fmv = formatted_values_clause.replace(
+                        "EXECMANY_INDEX__", str(i)
+                    )
+                    if imv.embed_values_counter:
+                        fmv = fmv.replace("_IMV_VALUES_COUNTER", str(i))
+
+                    replaced_values_clauses.append(fmv)
+                    replaced_parameters.update(
+                        {f"{key}__{i}": param[key] for key in keys_to_replace}
+                    )
+
+                replaced_statement = statement.replace(
+                    "__EXECMANY_TOKEN__",
+                    ", ".join(replaced_values_clauses),
+                )
+
+            yield _InsertManyValuesBatch(
+                replaced_statement,
+                replaced_parameters,
+                processed_setinputsizes,
+                batch,
+                (
+                    [_sentinel_from_params(cb) for cb in compiled_batch]
+                    if _sentinel_from_params
+                    else []
+                ),
+                current_batch_size,
+                batchnum,
+                total_batches,
+                sort_by_parameter_order,
+                False,
+            )
+            batchnum += 1
+
+    def visit_insert(
+        self, insert_stmt, visited_bindparam=None, visiting_cte=None, **kw
+    ):
+        compile_state = insert_stmt._compile_state_factory(
+            insert_stmt, self, **kw
+        )
+        insert_stmt = compile_state.statement
+
+        if visiting_cte is not None:
+            kw["visiting_cte"] = visiting_cte
+            toplevel = False
+        else:
+            toplevel = not self.stack
+
+        if toplevel:
+            self.isinsert = True
+            if not self.dml_compile_state:
+                self.dml_compile_state = compile_state
+            if not self.compile_state:
+                self.compile_state = compile_state
+
+        self.stack.append(
+            {
+                "correlate_froms": set(),
+                "asfrom_froms": set(),
+                "selectable": insert_stmt,
+            }
+        )
+
+        counted_bindparam = 0
+
+        # reset any incoming "visited_bindparam" collection
+        visited_bindparam = None
+
+        # for positional, insertmanyvalues needs to know how many
+        # bound parameters are in the VALUES sequence; there's no simple
+        # rule because default expressions etc. can have zero or more
+        # params inside them.   After multiple attempts to figure this out,
+        # this very simplistic "count after" works and is
+        # likely the least amount of callcounts, though looks clumsy
+        if self.positional and visiting_cte is None:
+            # if we are inside a CTE, don't count parameters
+            # here since they wont be for insertmanyvalues. keep
+            # visited_bindparam at None so no counting happens.
+            # see #9173
+            visited_bindparam = []
+
+        crud_params_struct = crud._get_crud_params(
+            self,
+            insert_stmt,
+            compile_state,
+            toplevel,
+            visited_bindparam=visited_bindparam,
+            **kw,
+        )
+
+        if self.positional and visited_bindparam is not None:
+            counted_bindparam = len(visited_bindparam)
+            if self._numeric_binds:
+                if self._values_bindparam is not None:
+                    self._values_bindparam += visited_bindparam
+                else:
+                    self._values_bindparam = visited_bindparam
+
+        crud_params_single = crud_params_struct.single_params
+
+        if (
+            not crud_params_single
+            and not self.dialect.supports_default_values
+            and not self.dialect.supports_default_metavalue
+            and not self.dialect.supports_empty_insert
+        ):
+            raise exc.CompileError(
+                "The '%s' dialect with current database "
+                "version settings does not support empty "
+                "inserts." % self.dialect.name
+            )
+
+        if compile_state._has_multi_parameters:
+            if not self.dialect.supports_multivalues_insert:
+                raise exc.CompileError(
+                    "The '%s' dialect with current database "
+                    "version settings does not support "
+                    "in-place multirow inserts." % self.dialect.name
+                )
+            elif (
+                self.implicit_returning or insert_stmt._returning
+            ) and insert_stmt._sort_by_parameter_order:
+                raise exc.CompileError(
+                    "RETURNING cannot be determinstically sorted when "
+                    "using an INSERT which includes multi-row values()."
+                )
+            crud_params_single = crud_params_struct.single_params
+        else:
+            crud_params_single = crud_params_struct.single_params
+
+        preparer = self.preparer
+        supports_default_values = self.dialect.supports_default_values
+
+        text = "INSERT "
+
+        if insert_stmt._prefixes:
+            text += self._generate_prefixes(
+                insert_stmt, insert_stmt._prefixes, **kw
+            )
+
+        text += "INTO "
+        table_text = preparer.format_table(insert_stmt.table)
+
+        if insert_stmt._hints:
+            _, table_text = self._setup_crud_hints(insert_stmt, table_text)
+
+        if insert_stmt._independent_ctes:
+            self._dispatch_independent_ctes(insert_stmt, kw)
+
+        text += table_text
+
+        if crud_params_single or not supports_default_values:
+            text += " (%s)" % ", ".join(
+                [expr for _, expr, _, _ in crud_params_single]
+            )
+
+        # look for insertmanyvalues attributes that would have been configured
+        # by crud.py as it scanned through the columns to be part of the
+        # INSERT
+        use_insertmanyvalues = crud_params_struct.use_insertmanyvalues
+        named_sentinel_params: Optional[Sequence[str]] = None
+        add_sentinel_cols = None
+        implicit_sentinel = False
+
+        returning_cols = self.implicit_returning or insert_stmt._returning
+        if returning_cols:
+            add_sentinel_cols = crud_params_struct.use_sentinel_columns
+            if add_sentinel_cols is not None:
+                assert use_insertmanyvalues
+
+                # search for the sentinel column explicitly present
+                # in the INSERT columns list, and additionally check that
+                # this column has a bound parameter name set up that's in the
+                # parameter list.  If both of these cases are present, it means
+                # we will have a client side value for the sentinel in each
+                # parameter set.
+
+                _params_by_col = {
+                    col: param_names
+                    for col, _, _, param_names in crud_params_single
+                }
+                named_sentinel_params = []
+                for _add_sentinel_col in add_sentinel_cols:
+                    if _add_sentinel_col not in _params_by_col:
+                        named_sentinel_params = None
+                        break
+                    param_name = self._within_exec_param_key_getter(
+                        _add_sentinel_col
+                    )
+                    if param_name not in _params_by_col[_add_sentinel_col]:
+                        named_sentinel_params = None
+                        break
+                    named_sentinel_params.append(param_name)
+
+                if named_sentinel_params is None:
+                    # if we are not going to have a client side value for
+                    # the sentinel in the parameter set, that means it's
+                    # an autoincrement, an IDENTITY, or a server-side SQL
+                    # expression like nextval('seqname').  So this is
+                    # an "implicit" sentinel; we will look for it in
+                    # RETURNING
+                    # only, and then sort on it.  For this case on PG,
+                    # SQL Server we have to use a special INSERT form
+                    # that guarantees the server side function lines up with
+                    # the entries in the VALUES.
+                    if (
+                        self.dialect.insertmanyvalues_implicit_sentinel
+                        & InsertmanyvaluesSentinelOpts.ANY_AUTOINCREMENT
+                    ):
+                        implicit_sentinel = True
+                    else:
+                        # here, we are not using a sentinel at all
+                        # and we are likely the SQLite dialect.
+                        # The first add_sentinel_col that we have should not
+                        # be marked as "insert_sentinel=True".  if it was,
+                        # an error should have been raised in
+                        # _get_sentinel_column_for_table.
+                        assert not add_sentinel_cols[0]._insert_sentinel, (
+                            "sentinel selection rules should have prevented "
+                            "us from getting here for this dialect"
+                        )
+
+                # always put the sentinel columns last.  even if they are
+                # in the returning list already, they will be there twice
+                # then.
+                returning_cols = list(returning_cols) + list(add_sentinel_cols)
+
+            returning_clause = self.returning_clause(
+                insert_stmt,
+                returning_cols,
+                populate_result_map=toplevel,
+            )
+
+            if self.returning_precedes_values:
+                text += " " + returning_clause
+
+        else:
+            returning_clause = None
+
+        if insert_stmt.select is not None:
+            # placed here by crud.py
+            select_text = self.process(
+                self.stack[-1]["insert_from_select"], insert_into=True, **kw
+            )
+
+            if self.ctes and self.dialect.cte_follows_insert:
+                nesting_level = len(self.stack) if not toplevel else None
+                text += " %s%s" % (
+                    self._render_cte_clause(
+                        nesting_level=nesting_level,
+                        include_following_stack=True,
+                    ),
+                    select_text,
+                )
+            else:
+                text += " %s" % select_text
+        elif not crud_params_single and supports_default_values:
+            text += " DEFAULT VALUES"
+            if use_insertmanyvalues:
+                self._insertmanyvalues = _InsertManyValues(
+                    True,
+                    self.dialect.default_metavalue_token,
+                    cast(
+                        "List[crud._CrudParamElementStr]", crud_params_single
+                    ),
+                    counted_bindparam,
+                    sort_by_parameter_order=(
+                        insert_stmt._sort_by_parameter_order
+                    ),
+                    includes_upsert_behaviors=(
+                        insert_stmt._post_values_clause is not None
+                    ),
+                    sentinel_columns=add_sentinel_cols,
+                    num_sentinel_columns=(
+                        len(add_sentinel_cols) if add_sentinel_cols else 0
+                    ),
+                    implicit_sentinel=implicit_sentinel,
+                )
+        elif compile_state._has_multi_parameters:
+            text += " VALUES %s" % (
+                ", ".join(
+                    "(%s)"
+                    % (", ".join(value for _, _, value, _ in crud_param_set))
+                    for crud_param_set in crud_params_struct.all_multi_params
+                ),
+            )
+        else:
+            insert_single_values_expr = ", ".join(
+                [
+                    value
+                    for _, _, value, _ in cast(
+                        "List[crud._CrudParamElementStr]",
+                        crud_params_single,
+                    )
+                ]
+            )
+
+            if use_insertmanyvalues:
+                if (
+                    implicit_sentinel
+                    and (
+                        self.dialect.insertmanyvalues_implicit_sentinel
+                        & InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT
+                    )
+                    # this is checking if we have
+                    # INSERT INTO table (id) VALUES (DEFAULT).
+                    and not (crud_params_struct.is_default_metavalue_only)
+                ):
+                    # if we have a sentinel column that is server generated,
+                    # then for selected backends render the VALUES list as a
+                    # subquery.  This is the orderable form supported by
+                    # PostgreSQL and SQL Server.
+                    embed_sentinel_value = True
+
+                    render_bind_casts = (
+                        self.dialect.insertmanyvalues_implicit_sentinel
+                        & InsertmanyvaluesSentinelOpts.RENDER_SELECT_COL_CASTS
+                    )
+
+                    colnames = ", ".join(
+                        f"p{i}" for i, _ in enumerate(crud_params_single)
+                    )
+
+                    if render_bind_casts:
+                        # render casts for the SELECT list.  For PG, we are
+                        # already rendering bind casts in the parameter list,
+                        # selectively for the more "tricky" types like ARRAY.
+                        # however, even for the "easy" types, if the parameter
+                        # is NULL for every entry, PG gives up and says
+                        # "it must be TEXT", which fails for other easy types
+                        # like ints.  So we cast on this side too.
+                        colnames_w_cast = ", ".join(
+                            self.render_bind_cast(
+                                col.type,
+                                col.type._unwrapped_dialect_impl(self.dialect),
+                                f"p{i}",
+                            )
+                            for i, (col, *_) in enumerate(crud_params_single)
+                        )
+                    else:
+                        colnames_w_cast = colnames
+
+                    text += (
+                        f" SELECT {colnames_w_cast} FROM "
+                        f"(VALUES ({insert_single_values_expr})) "
+                        f"AS imp_sen({colnames}, sen_counter) "
+                        "ORDER BY sen_counter"
+                    )
+                else:
+                    # otherwise, if no sentinel or backend doesn't support
+                    # orderable subquery form, use a plain VALUES list
+                    embed_sentinel_value = False
+                    text += f" VALUES ({insert_single_values_expr})"
+
+                self._insertmanyvalues = _InsertManyValues(
+                    is_default_expr=False,
+                    single_values_expr=insert_single_values_expr,
+                    insert_crud_params=cast(
+                        "List[crud._CrudParamElementStr]",
+                        crud_params_single,
+                    ),
+                    num_positional_params_counted=counted_bindparam,
+                    sort_by_parameter_order=(
+                        insert_stmt._sort_by_parameter_order
+                    ),
+                    includes_upsert_behaviors=(
+                        insert_stmt._post_values_clause is not None
+                    ),
+                    sentinel_columns=add_sentinel_cols,
+                    num_sentinel_columns=(
+                        len(add_sentinel_cols) if add_sentinel_cols else 0
+                    ),
+                    sentinel_param_keys=named_sentinel_params,
+                    implicit_sentinel=implicit_sentinel,
+                    embed_values_counter=embed_sentinel_value,
+                )
+
+            else:
+                text += f" VALUES ({insert_single_values_expr})"
+
+        if insert_stmt._post_values_clause is not None:
+            post_values_clause = self.process(
+                insert_stmt._post_values_clause, **kw
+            )
+            if post_values_clause:
+                text += " " + post_values_clause
+
+        if returning_clause and not self.returning_precedes_values:
+            text += " " + returning_clause
+
+        if self.ctes and not self.dialect.cte_follows_insert:
+            nesting_level = len(self.stack) if not toplevel else None
+            text = (
+                self._render_cte_clause(
+                    nesting_level=nesting_level,
+                    include_following_stack=True,
+                )
+                + text
+            )
+
+        self.stack.pop(-1)
+
+        return text
+
+    def update_limit_clause(self, update_stmt):
+        """Provide a hook for MySQL to add LIMIT to the UPDATE"""
+        return None
+
+    def delete_limit_clause(self, delete_stmt):
+        """Provide a hook for MySQL to add LIMIT to the DELETE"""
+        return None
+
+    def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
+        """Provide a hook to override the initial table clause
+        in an UPDATE statement.
+
+        MySQL overrides this.
+
+        """
+        kw["asfrom"] = True
+        return from_table._compiler_dispatch(self, iscrud=True, **kw)
+
+    def update_from_clause(
+        self, update_stmt, from_table, extra_froms, from_hints, **kw
+    ):
+        """Provide a hook to override the generation of an
+        UPDATE..FROM clause.
+
+        MySQL and MSSQL override this.
+
+        """
+        raise NotImplementedError(
+            "This backend does not support multiple-table "
+            "criteria within UPDATE"
+        )
+
+    def visit_update(self, update_stmt, visiting_cte=None, **kw):
+        compile_state = update_stmt._compile_state_factory(
+            update_stmt, self, **kw
+        )
+        update_stmt = compile_state.statement
+
+        if visiting_cte is not None:
+            kw["visiting_cte"] = visiting_cte
+            toplevel = False
+        else:
+            toplevel = not self.stack
+
+        if toplevel:
+            self.isupdate = True
+            if not self.dml_compile_state:
+                self.dml_compile_state = compile_state
+            if not self.compile_state:
+                self.compile_state = compile_state
+
+        if self.linting & COLLECT_CARTESIAN_PRODUCTS:
+            from_linter = FromLinter({}, set())
+            warn_linting = self.linting & WARN_LINTING
+            if toplevel:
+                self.from_linter = from_linter
+        else:
+            from_linter = None
+            warn_linting = False
+
+        extra_froms = compile_state._extra_froms
+        is_multitable = bool(extra_froms)
+
+        if is_multitable:
+            # main table might be a JOIN
+            main_froms = set(_from_objects(update_stmt.table))
+            render_extra_froms = [
+                f for f in extra_froms if f not in main_froms
+            ]
+            correlate_froms = main_froms.union(extra_froms)
+        else:
+            render_extra_froms = []
+            correlate_froms = {update_stmt.table}
+
+        self.stack.append(
+            {
+                "correlate_froms": correlate_froms,
+                "asfrom_froms": correlate_froms,
+                "selectable": update_stmt,
+            }
+        )
+
+        text = "UPDATE "
+
+        if update_stmt._prefixes:
+            text += self._generate_prefixes(
+                update_stmt, update_stmt._prefixes, **kw
+            )
+
+        table_text = self.update_tables_clause(
+            update_stmt,
+            update_stmt.table,
+            render_extra_froms,
+            from_linter=from_linter,
+            **kw,
+        )
+        crud_params_struct = crud._get_crud_params(
+            self, update_stmt, compile_state, toplevel, **kw
+        )
+        crud_params = crud_params_struct.single_params
+
+        if update_stmt._hints:
+            dialect_hints, table_text = self._setup_crud_hints(
+                update_stmt, table_text
+            )
+        else:
+            dialect_hints = None
+
+        if update_stmt._independent_ctes:
+            self._dispatch_independent_ctes(update_stmt, kw)
+
+        text += table_text
+
+        text += " SET "
+        text += ", ".join(
+            expr + "=" + value
+            for _, expr, value, _ in cast(
+                "List[Tuple[Any, str, str, Any]]", crud_params
+            )
+        )
+
+        if self.implicit_returning or update_stmt._returning:
+            if self.returning_precedes_values:
+                text += " " + self.returning_clause(
+                    update_stmt,
+                    self.implicit_returning or update_stmt._returning,
+                    populate_result_map=toplevel,
+                )
+
+        if extra_froms:
+            extra_from_text = self.update_from_clause(
+                update_stmt,
+                update_stmt.table,
+                render_extra_froms,
+                dialect_hints,
+                from_linter=from_linter,
+                **kw,
+            )
+            if extra_from_text:
+                text += " " + extra_from_text
+
+        if update_stmt._where_criteria:
+            t = self._generate_delimited_and_list(
+                update_stmt._where_criteria, from_linter=from_linter, **kw
+            )
+            if t:
+                text += " WHERE " + t
+
+        limit_clause = self.update_limit_clause(update_stmt)
+        if limit_clause:
+            text += " " + limit_clause
+
+        if (
+            self.implicit_returning or update_stmt._returning
+        ) and not self.returning_precedes_values:
+            text += " " + self.returning_clause(
+                update_stmt,
+                self.implicit_returning or update_stmt._returning,
+                populate_result_map=toplevel,
+            )
+
+        if self.ctes:
+            nesting_level = len(self.stack) if not toplevel else None
+            text = self._render_cte_clause(nesting_level=nesting_level) + text
+
+        if warn_linting:
+            assert from_linter is not None
+            from_linter.warn(stmt_type="UPDATE")
+
+        self.stack.pop(-1)
+
+        return text
+
+    def delete_extra_from_clause(
+        self, update_stmt, from_table, extra_froms, from_hints, **kw
+    ):
+        """Provide a hook to override the generation of an
+        DELETE..FROM clause.
+
+        This can be used to implement DELETE..USING for example.
+
+        MySQL and MSSQL override this.
+
+        """
+        raise NotImplementedError(
+            "This backend does not support multiple-table "
+            "criteria within DELETE"
+        )
+
+    def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw):
+        return from_table._compiler_dispatch(
+            self, asfrom=True, iscrud=True, **kw
+        )
+
+    def visit_delete(self, delete_stmt, visiting_cte=None, **kw):
+        compile_state = delete_stmt._compile_state_factory(
+            delete_stmt, self, **kw
+        )
+        delete_stmt = compile_state.statement
+
+        if visiting_cte is not None:
+            kw["visiting_cte"] = visiting_cte
+            toplevel = False
+        else:
+            toplevel = not self.stack
+
+        if toplevel:
+            self.isdelete = True
+            if not self.dml_compile_state:
+                self.dml_compile_state = compile_state
+            if not self.compile_state:
+                self.compile_state = compile_state
+
+        if self.linting & COLLECT_CARTESIAN_PRODUCTS:
+            from_linter = FromLinter({}, set())
+            warn_linting = self.linting & WARN_LINTING
+            if toplevel:
+                self.from_linter = from_linter
+        else:
+            from_linter = None
+            warn_linting = False
+
+        extra_froms = compile_state._extra_froms
+
+        correlate_froms = {delete_stmt.table}.union(extra_froms)
+        self.stack.append(
+            {
+                "correlate_froms": correlate_froms,
+                "asfrom_froms": correlate_froms,
+                "selectable": delete_stmt,
+            }
+        )
+
+        text = "DELETE "
+
+        if delete_stmt._prefixes:
+            text += self._generate_prefixes(
+                delete_stmt, delete_stmt._prefixes, **kw
+            )
+
+        text += "FROM "
+
+        try:
+            table_text = self.delete_table_clause(
+                delete_stmt,
+                delete_stmt.table,
+                extra_froms,
+                from_linter=from_linter,
+            )
+        except TypeError:
+            # anticipate 3rd party dialects that don't include **kw
+            # TODO: remove in 2.1
+            table_text = self.delete_table_clause(
+                delete_stmt, delete_stmt.table, extra_froms
+            )
+            if from_linter:
+                _ = self.process(delete_stmt.table, from_linter=from_linter)
+
+        crud._get_crud_params(self, delete_stmt, compile_state, toplevel, **kw)
+
+        if delete_stmt._hints:
+            dialect_hints, table_text = self._setup_crud_hints(
+                delete_stmt, table_text
+            )
+        else:
+            dialect_hints = None
+
+        if delete_stmt._independent_ctes:
+            self._dispatch_independent_ctes(delete_stmt, kw)
+
+        text += table_text
+
+        if (
+            self.implicit_returning or delete_stmt._returning
+        ) and self.returning_precedes_values:
+            text += " " + self.returning_clause(
+                delete_stmt,
+                self.implicit_returning or delete_stmt._returning,
+                populate_result_map=toplevel,
+            )
+
+        if extra_froms:
+            extra_from_text = self.delete_extra_from_clause(
+                delete_stmt,
+                delete_stmt.table,
+                extra_froms,
+                dialect_hints,
+                from_linter=from_linter,
+                **kw,
+            )
+            if extra_from_text:
+                text += " " + extra_from_text
+
+        if delete_stmt._where_criteria:
+            t = self._generate_delimited_and_list(
+                delete_stmt._where_criteria, from_linter=from_linter, **kw
+            )
+            if t:
+                text += " WHERE " + t
+
+        limit_clause = self.delete_limit_clause(delete_stmt)
+        if limit_clause:
+            text += " " + limit_clause
+
+        if (
+            self.implicit_returning or delete_stmt._returning
+        ) and not self.returning_precedes_values:
+            text += " " + self.returning_clause(
+                delete_stmt,
+                self.implicit_returning or delete_stmt._returning,
+                populate_result_map=toplevel,
+            )
+
+        if self.ctes:
+            nesting_level = len(self.stack) if not toplevel else None
+            text = self._render_cte_clause(nesting_level=nesting_level) + text
+
+        if warn_linting:
+            assert from_linter is not None
+            from_linter.warn(stmt_type="DELETE")
+
+        self.stack.pop(-1)
+
+        return text
+
+    def visit_savepoint(self, savepoint_stmt, **kw):
+        return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt)
+
+    def visit_rollback_to_savepoint(self, savepoint_stmt, **kw):
+        return "ROLLBACK TO SAVEPOINT %s" % self.preparer.format_savepoint(
+            savepoint_stmt
+        )
+
+    def visit_release_savepoint(self, savepoint_stmt, **kw):
+        return "RELEASE SAVEPOINT %s" % self.preparer.format_savepoint(
+            savepoint_stmt
+        )
+
+
+class StrSQLCompiler(SQLCompiler):
+    """A :class:`.SQLCompiler` subclass which allows a small selection
+    of non-standard SQL features to render into a string value.
+
+    The :class:`.StrSQLCompiler` is invoked whenever a Core expression
+    element is directly stringified without calling upon the
+    :meth:`_expression.ClauseElement.compile` method.
+    It can render a limited set
+    of non-standard SQL constructs to assist in basic stringification,
+    however for more substantial custom or dialect-specific SQL constructs,
+    it will be necessary to make use of
+    :meth:`_expression.ClauseElement.compile`
+    directly.
+
+    .. seealso::
+
+        :ref:`faq_sql_expression_string`
+
+    """
+
+    def _fallback_column_name(self, column):
+        return "<name unknown>"
+
+    @util.preload_module("sqlalchemy.engine.url")
+    def visit_unsupported_compilation(self, element, err, **kw):
+        if element.stringify_dialect != "default":
+            url = util.preloaded.engine_url
+            dialect = url.URL.create(element.stringify_dialect).get_dialect()()
+
+            compiler = dialect.statement_compiler(
+                dialect, None, _supporting_against=self
+            )
+            if not isinstance(compiler, StrSQLCompiler):
+                return compiler.process(element, **kw)
+
+        return super().visit_unsupported_compilation(element, err)
+
+    def visit_getitem_binary(self, binary, operator, **kw):
+        return "%s[%s]" % (
+            self.process(binary.left, **kw),
+            self.process(binary.right, **kw),
+        )
+
+    def visit_json_getitem_op_binary(self, binary, operator, **kw):
+        return self.visit_getitem_binary(binary, operator, **kw)
+
+    def visit_json_path_getitem_op_binary(self, binary, operator, **kw):
+        return self.visit_getitem_binary(binary, operator, **kw)
+
+    def visit_sequence(self, sequence, **kw):
+        return (
+            f"<next sequence value: {self.preparer.format_sequence(sequence)}>"
+        )
+
+    def returning_clause(
+        self,
+        stmt: UpdateBase,
+        returning_cols: Sequence[ColumnElement[Any]],
+        *,
+        populate_result_map: bool,
+        **kw: Any,
+    ) -> str:
+        columns = [
+            self._label_select_column(None, c, True, False, {})
+            for c in base._select_iterables(returning_cols)
+        ]
+        return "RETURNING " + ", ".join(columns)
+
+    def update_from_clause(
+        self, update_stmt, from_table, extra_froms, from_hints, **kw
+    ):
+        kw["asfrom"] = True
+        return "FROM " + ", ".join(
+            t._compiler_dispatch(self, fromhints=from_hints, **kw)
+            for t in extra_froms
+        )
+
+    def delete_extra_from_clause(
+        self, update_stmt, from_table, extra_froms, from_hints, **kw
+    ):
+        kw["asfrom"] = True
+        return ", " + ", ".join(
+            t._compiler_dispatch(self, fromhints=from_hints, **kw)
+            for t in extra_froms
+        )
+
+    def visit_empty_set_expr(self, element_types, **kw):
+        return "SELECT 1 WHERE 1!=1"
+
+    def get_from_hint_text(self, table, text):
+        return "[%s]" % text
+
+    def visit_regexp_match_op_binary(self, binary, operator, **kw):
+        return self._generate_generic_binary(binary, " <regexp> ", **kw)
+
+    def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
+        return self._generate_generic_binary(binary, " <not regexp> ", **kw)
+
+    def visit_regexp_replace_op_binary(self, binary, operator, **kw):
+        return "<regexp replace>(%s, %s)" % (
+            binary.left._compiler_dispatch(self, **kw),
+            binary.right._compiler_dispatch(self, **kw),
+        )
+
+    def visit_try_cast(self, cast, **kwargs):
+        return "TRY_CAST(%s AS %s)" % (
+            cast.clause._compiler_dispatch(self, **kwargs),
+            cast.typeclause._compiler_dispatch(self, **kwargs),
+        )
+
+
+class DDLCompiler(Compiled):
+    is_ddl = True
+
+    if TYPE_CHECKING:
+
+        def __init__(
+            self,
+            dialect: Dialect,
+            statement: ExecutableDDLElement,
+            schema_translate_map: Optional[SchemaTranslateMapType] = ...,
+            render_schema_translate: bool = ...,
+            compile_kwargs: Mapping[str, Any] = ...,
+        ): ...
+
+    @util.memoized_property
+    def sql_compiler(self):
+        return self.dialect.statement_compiler(
+            self.dialect, None, schema_translate_map=self.schema_translate_map
+        )
+
+    @util.memoized_property
+    def type_compiler(self):
+        return self.dialect.type_compiler_instance
+
+    def construct_params(
+        self,
+        params: Optional[_CoreSingleExecuteParams] = None,
+        extracted_parameters: Optional[Sequence[BindParameter[Any]]] = None,
+        escape_names: bool = True,
+    ) -> Optional[_MutableCoreSingleExecuteParams]:
+        return None
+
+    def visit_ddl(self, ddl, **kwargs):
+        # table events can substitute table and schema name
+        context = ddl.context
+        if isinstance(ddl.target, schema.Table):
+            context = context.copy()
+
+            preparer = self.preparer
+            path = preparer.format_table_seq(ddl.target)
+            if len(path) == 1:
+                table, sch = path[0], ""
+            else:
+                table, sch = path[-1], path[0]
+
+            context.setdefault("table", table)
+            context.setdefault("schema", sch)
+            context.setdefault("fullname", preparer.format_table(ddl.target))
+
+        return self.sql_compiler.post_process_text(ddl.statement % context)
+
+    def visit_create_schema(self, create, **kw):
+        text = "CREATE SCHEMA "
+        if create.if_not_exists:
+            text += "IF NOT EXISTS "
+        return text + self.preparer.format_schema(create.element)
+
+    def visit_drop_schema(self, drop, **kw):
+        text = "DROP SCHEMA "
+        if drop.if_exists:
+            text += "IF EXISTS "
+        text += self.preparer.format_schema(drop.element)
+        if drop.cascade:
+            text += " CASCADE"
+        return text
+
+    def visit_create_table(self, create, **kw):
+        table = create.element
+        preparer = self.preparer
+
+        text = "\nCREATE "
+        if table._prefixes:
+            text += " ".join(table._prefixes) + " "
+
+        text += "TABLE "
+        if create.if_not_exists:
+            text += "IF NOT EXISTS "
+
+        text += preparer.format_table(table) + " "
+
+        create_table_suffix = self.create_table_suffix(table)
+        if create_table_suffix:
+            text += create_table_suffix + " "
+
+        text += "("
+
+        separator = "\n"
+
+        # if only one primary key, specify it along with the column
+        first_pk = False
+        for create_column in create.columns:
+            column = create_column.element
+            try:
+                processed = self.process(
+                    create_column, first_pk=column.primary_key and not first_pk
+                )
+                if processed is not None:
+                    text += separator
+                    separator = ", \n"
+                    text += "\t" + processed
+                if column.primary_key:
+                    first_pk = True
+            except exc.CompileError as ce:
+                raise exc.CompileError(
+                    "(in table '%s', column '%s'): %s"
+                    % (table.description, column.name, ce.args[0])
+                ) from ce
+
+        const = self.create_table_constraints(
+            table,
+            _include_foreign_key_constraints=create.include_foreign_key_constraints,  # noqa
+        )
+        if const:
+            text += separator + "\t" + const
+
+        text += "\n)%s\n\n" % self.post_create_table(table)
+        return text
+
+    def visit_create_column(self, create, first_pk=False, **kw):
+        column = create.element
+
+        if column.system:
+            return None
+
+        text = self.get_column_specification(column, first_pk=first_pk)
+        const = " ".join(
+            self.process(constraint) for constraint in column.constraints
+        )
+        if const:
+            text += " " + const
+
+        return text
+
+    def create_table_constraints(
+        self, table, _include_foreign_key_constraints=None, **kw
+    ):
+        # On some DB order is significant: visit PK first, then the
+        # other constraints (engine.ReflectionTest.testbasic failed on FB2)
+        constraints = []
+        if table.primary_key:
+            constraints.append(table.primary_key)
+
+        all_fkcs = table.foreign_key_constraints
+        if _include_foreign_key_constraints is not None:
+            omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints)
+        else:
+            omit_fkcs = set()
+
+        constraints.extend(
+            [
+                c
+                for c in table._sorted_constraints
+                if c is not table.primary_key and c not in omit_fkcs
+            ]
+        )
+
+        return ", \n\t".join(
+            p
+            for p in (
+                self.process(constraint)
+                for constraint in constraints
+                if (constraint._should_create_for_compiler(self))
+                and (
+                    not self.dialect.supports_alter
+                    or not getattr(constraint, "use_alter", False)
+                )
+            )
+            if p is not None
+        )
+
+    def visit_drop_table(self, drop, **kw):
+        text = "\nDROP TABLE "
+        if drop.if_exists:
+            text += "IF EXISTS "
+        return text + self.preparer.format_table(drop.element)
+
+    def visit_drop_view(self, drop, **kw):
+        return "\nDROP VIEW " + self.preparer.format_table(drop.element)
+
+    def _verify_index_table(self, index):
+        if index.table is None:
+            raise exc.CompileError(
+                "Index '%s' is not associated with any table." % index.name
+            )
+
+    def visit_create_index(
+        self, create, include_schema=False, include_table_schema=True, **kw
+    ):
+        index = create.element
+        self._verify_index_table(index)
+        preparer = self.preparer
+        text = "CREATE "
+        if index.unique:
+            text += "UNIQUE "
+        if index.name is None:
+            raise exc.CompileError(
+                "CREATE INDEX requires that the index have a name"
+            )
+
+        text += "INDEX "
+        if create.if_not_exists:
+            text += "IF NOT EXISTS "
+
+        text += "%s ON %s (%s)" % (
+            self._prepared_index_name(index, include_schema=include_schema),
+            preparer.format_table(
+                index.table, use_schema=include_table_schema
+            ),
+            ", ".join(
+                self.sql_compiler.process(
+                    expr, include_table=False, literal_binds=True
+                )
+                for expr in index.expressions
+            ),
+        )
+        return text
+
+    def visit_drop_index(self, drop, **kw):
+        index = drop.element
+
+        if index.name is None:
+            raise exc.CompileError(
+                "DROP INDEX requires that the index have a name"
+            )
+        text = "\nDROP INDEX "
+        if drop.if_exists:
+            text += "IF EXISTS "
+
+        return text + self._prepared_index_name(index, include_schema=True)
+
+    def _prepared_index_name(self, index, include_schema=False):
+        if index.table is not None:
+            effective_schema = self.preparer.schema_for_object(index.table)
+        else:
+            effective_schema = None
+        if include_schema and effective_schema:
+            schema_name = self.preparer.quote_schema(effective_schema)
+        else:
+            schema_name = None
+
+        index_name = self.preparer.format_index(index)
+
+        if schema_name:
+            index_name = schema_name + "." + index_name
+        return index_name
+
+    def visit_add_constraint(self, create, **kw):
+        return "ALTER TABLE %s ADD %s" % (
+            self.preparer.format_table(create.element.table),
+            self.process(create.element),
+        )
+
+    def visit_set_table_comment(self, create, **kw):
+        return "COMMENT ON TABLE %s IS %s" % (
+            self.preparer.format_table(create.element),
+            self.sql_compiler.render_literal_value(
+                create.element.comment, sqltypes.String()
+            ),
+        )
+
+    def visit_drop_table_comment(self, drop, **kw):
+        return "COMMENT ON TABLE %s IS NULL" % self.preparer.format_table(
+            drop.element
+        )
+
+    def visit_set_column_comment(self, create, **kw):
+        return "COMMENT ON COLUMN %s IS %s" % (
+            self.preparer.format_column(
+                create.element, use_table=True, use_schema=True
+            ),
+            self.sql_compiler.render_literal_value(
+                create.element.comment, sqltypes.String()
+            ),
+        )
+
+    def visit_drop_column_comment(self, drop, **kw):
+        return "COMMENT ON COLUMN %s IS NULL" % self.preparer.format_column(
+            drop.element, use_table=True
+        )
+
+    def visit_set_constraint_comment(self, create, **kw):
+        raise exc.UnsupportedCompilationError(self, type(create))
+
+    def visit_drop_constraint_comment(self, drop, **kw):
+        raise exc.UnsupportedCompilationError(self, type(drop))
+
+    def get_identity_options(self, identity_options):
+        text = []
+        if identity_options.increment is not None:
+            text.append("INCREMENT BY %d" % identity_options.increment)
+        if identity_options.start is not None:
+            text.append("START WITH %d" % identity_options.start)
+        if identity_options.minvalue is not None:
+            text.append("MINVALUE %d" % identity_options.minvalue)
+        if identity_options.maxvalue is not None:
+            text.append("MAXVALUE %d" % identity_options.maxvalue)
+        if identity_options.nominvalue is not None:
+            text.append("NO MINVALUE")
+        if identity_options.nomaxvalue is not None:
+            text.append("NO MAXVALUE")
+        if identity_options.cache is not None:
+            text.append("CACHE %d" % identity_options.cache)
+        if identity_options.cycle is not None:
+            text.append("CYCLE" if identity_options.cycle else "NO CYCLE")
+        return " ".join(text)
+
+    def visit_create_sequence(self, create, prefix=None, **kw):
+        text = "CREATE SEQUENCE "
+        if create.if_not_exists:
+            text += "IF NOT EXISTS "
+        text += self.preparer.format_sequence(create.element)
+
+        if prefix:
+            text += prefix
+        options = self.get_identity_options(create.element)
+        if options:
+            text += " " + options
+        return text
+
+    def visit_drop_sequence(self, drop, **kw):
+        text = "DROP SEQUENCE "
+        if drop.if_exists:
+            text += "IF EXISTS "
+        return text + self.preparer.format_sequence(drop.element)
+
+    def visit_drop_constraint(self, drop, **kw):
+        constraint = drop.element
+        if constraint.name is not None:
+            formatted_name = self.preparer.format_constraint(constraint)
+        else:
+            formatted_name = None
+
+        if formatted_name is None:
+            raise exc.CompileError(
+                "Can't emit DROP CONSTRAINT for constraint %r; "
+                "it has no name" % drop.element
+            )
+        return "ALTER TABLE %s DROP CONSTRAINT %s%s%s" % (
+            self.preparer.format_table(drop.element.table),
+            "IF EXISTS " if drop.if_exists else "",
+            formatted_name,
+            " CASCADE" if drop.cascade else "",
+        )
+
+    def get_column_specification(self, column, **kwargs):
+        colspec = (
+            self.preparer.format_column(column)
+            + " "
+            + self.dialect.type_compiler_instance.process(
+                column.type, type_expression=column
+            )
+        )
+        default = self.get_column_default_string(column)
+        if default is not None:
+            colspec += " DEFAULT " + default
+
+        if column.computed is not None:
+            colspec += " " + self.process(column.computed)
+
+        if (
+            column.identity is not None
+            and self.dialect.supports_identity_columns
+        ):
+            colspec += " " + self.process(column.identity)
+
+        if not column.nullable and (
+            not column.identity or not self.dialect.supports_identity_columns
+        ):
+            colspec += " NOT NULL"
+        return colspec
+
+    def create_table_suffix(self, table):
+        return ""
+
+    def post_create_table(self, table):
+        return ""
+
+    def get_column_default_string(self, column):
+        if isinstance(column.server_default, schema.DefaultClause):
+            return self.render_default_string(column.server_default.arg)
+        else:
+            return None
+
+    def render_default_string(self, default):
+        if isinstance(default, str):
+            return self.sql_compiler.render_literal_value(
+                default, sqltypes.STRINGTYPE
+            )
+        else:
+            return self.sql_compiler.process(default, literal_binds=True)
+
+    def visit_table_or_column_check_constraint(self, constraint, **kw):
+        if constraint.is_column_level:
+            return self.visit_column_check_constraint(constraint)
+        else:
+            return self.visit_check_constraint(constraint)
+
+    def visit_check_constraint(self, constraint, **kw):
+        text = ""
+        if constraint.name is not None:
+            formatted_name = self.preparer.format_constraint(constraint)
+            if formatted_name is not None:
+                text += "CONSTRAINT %s " % formatted_name
+        text += "CHECK (%s)" % self.sql_compiler.process(
+            constraint.sqltext, include_table=False, literal_binds=True
+        )
+        text += self.define_constraint_deferrability(constraint)
+        return text
+
+    def visit_column_check_constraint(self, constraint, **kw):
+        text = ""
+        if constraint.name is not None:
+            formatted_name = self.preparer.format_constraint(constraint)
+            if formatted_name is not None:
+                text += "CONSTRAINT %s " % formatted_name
+        text += "CHECK (%s)" % self.sql_compiler.process(
+            constraint.sqltext, include_table=False, literal_binds=True
+        )
+        text += self.define_constraint_deferrability(constraint)
+        return text
+
+    def visit_primary_key_constraint(self, constraint, **kw):
+        if len(constraint) == 0:
+            return ""
+        text = ""
+        if constraint.name is not None:
+            formatted_name = self.preparer.format_constraint(constraint)
+            if formatted_name is not None:
+                text += "CONSTRAINT %s " % formatted_name
+        text += "PRIMARY KEY "
+        text += "(%s)" % ", ".join(
+            self.preparer.quote(c.name)
+            for c in (
+                constraint.columns_autoinc_first
+                if constraint._implicit_generated
+                else constraint.columns
+            )
+        )
+        text += self.define_constraint_deferrability(constraint)
+        return text
+
+    def visit_foreign_key_constraint(self, constraint, **kw):
+        preparer = self.preparer
+        text = ""
+        if constraint.name is not None:
+            formatted_name = self.preparer.format_constraint(constraint)
+            if formatted_name is not None:
+                text += "CONSTRAINT %s " % formatted_name
+        remote_table = list(constraint.elements)[0].column.table
+        text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
+            ", ".join(
+                preparer.quote(f.parent.name) for f in constraint.elements
+            ),
+            self.define_constraint_remote_table(
+                constraint, remote_table, preparer
+            ),
+            ", ".join(
+                preparer.quote(f.column.name) for f in constraint.elements
+            ),
+        )
+        text += self.define_constraint_match(constraint)
+        text += self.define_constraint_cascades(constraint)
+        text += self.define_constraint_deferrability(constraint)
+        return text
+
+    def define_constraint_remote_table(self, constraint, table, preparer):
+        """Format the remote table clause of a CREATE CONSTRAINT clause."""
+
+        return preparer.format_table(table)
+
+    def visit_unique_constraint(self, constraint, **kw):
+        if len(constraint) == 0:
+            return ""
+        text = ""
+        if constraint.name is not None:
+            formatted_name = self.preparer.format_constraint(constraint)
+            if formatted_name is not None:
+                text += "CONSTRAINT %s " % formatted_name
+        text += "UNIQUE %s(%s)" % (
+            self.define_unique_constraint_distinct(constraint, **kw),
+            ", ".join(self.preparer.quote(c.name) for c in constraint),
+        )
+        text += self.define_constraint_deferrability(constraint)
+        return text
+
+    def define_unique_constraint_distinct(self, constraint, **kw):
+        return ""
+
+    def define_constraint_cascades(self, constraint):
+        text = ""
+        if constraint.ondelete is not None:
+            text += " ON DELETE %s" % self.preparer.validate_sql_phrase(
+                constraint.ondelete, FK_ON_DELETE
+            )
+        if constraint.onupdate is not None:
+            text += " ON UPDATE %s" % self.preparer.validate_sql_phrase(
+                constraint.onupdate, FK_ON_UPDATE
+            )
+        return text
+
+    def define_constraint_deferrability(self, constraint):
+        text = ""
+        if constraint.deferrable is not None:
+            if constraint.deferrable:
+                text += " DEFERRABLE"
+            else:
+                text += " NOT DEFERRABLE"
+        if constraint.initially is not None:
+            text += " INITIALLY %s" % self.preparer.validate_sql_phrase(
+                constraint.initially, FK_INITIALLY
+            )
+        return text
+
+    def define_constraint_match(self, constraint):
+        text = ""
+        if constraint.match is not None:
+            text += " MATCH %s" % constraint.match
+        return text
+
+    def visit_computed_column(self, generated, **kw):
+        text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
+            generated.sqltext, include_table=False, literal_binds=True
+        )
+        if generated.persisted is True:
+            text += " STORED"
+        elif generated.persisted is False:
+            text += " VIRTUAL"
+        return text
+
+    def visit_identity_column(self, identity, **kw):
+        text = "GENERATED %s AS IDENTITY" % (
+            "ALWAYS" if identity.always else "BY DEFAULT",
+        )
+        options = self.get_identity_options(identity)
+        if options:
+            text += " (%s)" % options
+        return text
+
+
+class GenericTypeCompiler(TypeCompiler):
+    def visit_FLOAT(self, type_, **kw):
+        return "FLOAT"
+
+    def visit_DOUBLE(self, type_, **kw):
+        return "DOUBLE"
+
+    def visit_DOUBLE_PRECISION(self, type_, **kw):
+        return "DOUBLE PRECISION"
+
+    def visit_REAL(self, type_, **kw):
+        return "REAL"
+
+    def visit_NUMERIC(self, type_, **kw):
+        if type_.precision is None:
+            return "NUMERIC"
+        elif type_.scale is None:
+            return "NUMERIC(%(precision)s)" % {"precision": type_.precision}
+        else:
+            return "NUMERIC(%(precision)s, %(scale)s)" % {
+                "precision": type_.precision,
+                "scale": type_.scale,
+            }
+
+    def visit_DECIMAL(self, type_, **kw):
+        if type_.precision is None:
+            return "DECIMAL"
+        elif type_.scale is None:
+            return "DECIMAL(%(precision)s)" % {"precision": type_.precision}
+        else:
+            return "DECIMAL(%(precision)s, %(scale)s)" % {
+                "precision": type_.precision,
+                "scale": type_.scale,
+            }
+
+    def visit_INTEGER(self, type_, **kw):
+        return "INTEGER"
+
+    def visit_SMALLINT(self, type_, **kw):
+        return "SMALLINT"
+
+    def visit_BIGINT(self, type_, **kw):
+        return "BIGINT"
+
+    def visit_TIMESTAMP(self, type_, **kw):
+        return "TIMESTAMP"
+
+    def visit_DATETIME(self, type_, **kw):
+        return "DATETIME"
+
+    def visit_DATE(self, type_, **kw):
+        return "DATE"
+
+    def visit_TIME(self, type_, **kw):
+        return "TIME"
+
+    def visit_CLOB(self, type_, **kw):
+        return "CLOB"
+
+    def visit_NCLOB(self, type_, **kw):
+        return "NCLOB"
+
+    def _render_string_type(self, type_, name, length_override=None):
+        text = name
+        if length_override:
+            text += "(%d)" % length_override
+        elif type_.length:
+            text += "(%d)" % type_.length
+        if type_.collation:
+            text += ' COLLATE "%s"' % type_.collation
+        return text
+
+    def visit_CHAR(self, type_, **kw):
+        return self._render_string_type(type_, "CHAR")
+
+    def visit_NCHAR(self, type_, **kw):
+        return self._render_string_type(type_, "NCHAR")
+
+    def visit_VARCHAR(self, type_, **kw):
+        return self._render_string_type(type_, "VARCHAR")
+
+    def visit_NVARCHAR(self, type_, **kw):
+        return self._render_string_type(type_, "NVARCHAR")
+
+    def visit_TEXT(self, type_, **kw):
+        return self._render_string_type(type_, "TEXT")
+
+    def visit_UUID(self, type_, **kw):
+        return "UUID"
+
+    def visit_BLOB(self, type_, **kw):
+        return "BLOB"
+
+    def visit_BINARY(self, type_, **kw):
+        return "BINARY" + (type_.length and "(%d)" % type_.length or "")
+
+    def visit_VARBINARY(self, type_, **kw):
+        return "VARBINARY" + (type_.length and "(%d)" % type_.length or "")
+
+    def visit_BOOLEAN(self, type_, **kw):
+        return "BOOLEAN"
+
+    def visit_uuid(self, type_, **kw):
+        if not type_.native_uuid or not self.dialect.supports_native_uuid:
+            return self._render_string_type(type_, "CHAR", length_override=32)
+        else:
+            return self.visit_UUID(type_, **kw)
+
+    def visit_large_binary(self, type_, **kw):
+        return self.visit_BLOB(type_, **kw)
+
+    def visit_boolean(self, type_, **kw):
+        return self.visit_BOOLEAN(type_, **kw)
+
+    def visit_time(self, type_, **kw):
+        return self.visit_TIME(type_, **kw)
+
+    def visit_datetime(self, type_, **kw):
+        return self.visit_DATETIME(type_, **kw)
+
+    def visit_date(self, type_, **kw):
+        return self.visit_DATE(type_, **kw)
+
+    def visit_big_integer(self, type_, **kw):
+        return self.visit_BIGINT(type_, **kw)
+
+    def visit_small_integer(self, type_, **kw):
+        return self.visit_SMALLINT(type_, **kw)
+
+    def visit_integer(self, type_, **kw):
+        return self.visit_INTEGER(type_, **kw)
+
+    def visit_real(self, type_, **kw):
+        return self.visit_REAL(type_, **kw)
+
+    def visit_float(self, type_, **kw):
+        return self.visit_FLOAT(type_, **kw)
+
+    def visit_double(self, type_, **kw):
+        return self.visit_DOUBLE(type_, **kw)
+
+    def visit_numeric(self, type_, **kw):
+        return self.visit_NUMERIC(type_, **kw)
+
+    def visit_string(self, type_, **kw):
+        return self.visit_VARCHAR(type_, **kw)
+
+    def visit_unicode(self, type_, **kw):
+        return self.visit_VARCHAR(type_, **kw)
+
+    def visit_text(self, type_, **kw):
+        return self.visit_TEXT(type_, **kw)
+
+    def visit_unicode_text(self, type_, **kw):
+        return self.visit_TEXT(type_, **kw)
+
+    def visit_enum(self, type_, **kw):
+        return self.visit_VARCHAR(type_, **kw)
+
+    def visit_null(self, type_, **kw):
+        raise exc.CompileError(
+            "Can't generate DDL for %r; "
+            "did you forget to specify a "
+            "type on this Column?" % type_
+        )
+
+    def visit_type_decorator(self, type_, **kw):
+        return self.process(type_.type_engine(self.dialect), **kw)
+
+    def visit_user_defined(self, type_, **kw):
+        return type_.get_col_spec(**kw)
+
+
+class StrSQLTypeCompiler(GenericTypeCompiler):
+    def process(self, type_, **kw):
+        try:
+            _compiler_dispatch = type_._compiler_dispatch
+        except AttributeError:
+            return self._visit_unknown(type_, **kw)
+        else:
+            return _compiler_dispatch(self, **kw)
+
+    def __getattr__(self, key):
+        if key.startswith("visit_"):
+            return self._visit_unknown
+        else:
+            raise AttributeError(key)
+
+    def _visit_unknown(self, type_, **kw):
+        if type_.__class__.__name__ == type_.__class__.__name__.upper():
+            return type_.__class__.__name__
+        else:
+            return repr(type_)
+
+    def visit_null(self, type_, **kw):
+        return "NULL"
+
+    def visit_user_defined(self, type_, **kw):
+        try:
+            get_col_spec = type_.get_col_spec
+        except AttributeError:
+            return repr(type_)
+        else:
+            return get_col_spec(**kw)
+
+
+class _SchemaForObjectCallable(Protocol):
+    def __call__(self, __obj: Any) -> str: ...
+
+
+class _BindNameForColProtocol(Protocol):
+    def __call__(self, col: ColumnClause[Any]) -> str: ...
+
+
+class IdentifierPreparer:
+    """Handle quoting and case-folding of identifiers based on options."""
+
+    reserved_words = RESERVED_WORDS
+
+    legal_characters = LEGAL_CHARACTERS
+
+    illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS
+
+    initial_quote: str
+
+    final_quote: str
+
+    _strings: MutableMapping[str, str]
+
+    schema_for_object: _SchemaForObjectCallable = operator.attrgetter("schema")
+    """Return the .schema attribute for an object.
+
+    For the default IdentifierPreparer, the schema for an object is always
+    the value of the ".schema" attribute.   if the preparer is replaced
+    with one that has a non-empty schema_translate_map, the value of the
+    ".schema" attribute is rendered a symbol that will be converted to a
+    real schema name from the mapping post-compile.
+
+    """
+
+    _includes_none_schema_translate: bool = False
+
+    def __init__(
+        self,
+        dialect,
+        initial_quote='"',
+        final_quote=None,
+        escape_quote='"',
+        quote_case_sensitive_collations=True,
+        omit_schema=False,
+    ):
+        """Construct a new ``IdentifierPreparer`` object.
+
+        initial_quote
+          Character that begins a delimited identifier.
+
+        final_quote
+          Character that ends a delimited identifier. Defaults to
+          `initial_quote`.
+
+        omit_schema
+          Prevent prepending schema name. Useful for databases that do
+          not support schemae.
+        """
+
+        self.dialect = dialect
+        self.initial_quote = initial_quote
+        self.final_quote = final_quote or self.initial_quote
+        self.escape_quote = escape_quote
+        self.escape_to_quote = self.escape_quote * 2
+        self.omit_schema = omit_schema
+        self.quote_case_sensitive_collations = quote_case_sensitive_collations
+        self._strings = {}
+        self._double_percents = self.dialect.paramstyle in (
+            "format",
+            "pyformat",
+        )
+
+    def _with_schema_translate(self, schema_translate_map):
+        prep = self.__class__.__new__(self.__class__)
+        prep.__dict__.update(self.__dict__)
+
+        includes_none = None in schema_translate_map
+
+        def symbol_getter(obj):
+            name = obj.schema
+            if obj._use_schema_map and (name is not None or includes_none):
+                if name is not None and ("[" in name or "]" in name):
+                    raise exc.CompileError(
+                        "Square bracket characters ([]) not supported "
+                        "in schema translate name '%s'" % name
+                    )
+                return quoted_name(
+                    "__[SCHEMA_%s]" % (name or "_none"), quote=False
+                )
+            else:
+                return obj.schema
+
+        prep.schema_for_object = symbol_getter
+        prep._includes_none_schema_translate = includes_none
+        return prep
+
+    def _render_schema_translates(self, statement, schema_translate_map):
+        d = schema_translate_map
+        if None in d:
+            if not self._includes_none_schema_translate:
+                raise exc.InvalidRequestError(
+                    "schema translate map which previously did not have "
+                    "`None` present as a key now has `None` present; compiled "
+                    "statement may lack adequate placeholders.  Please use "
+                    "consistent keys in successive "
+                    "schema_translate_map dictionaries."
+                )
+
+            d["_none"] = d[None]
+
+        def replace(m):
+            name = m.group(2)
+            if name in d:
+                effective_schema = d[name]
+            else:
+                if name in (None, "_none"):
+                    raise exc.InvalidRequestError(
+                        "schema translate map which previously had `None` "
+                        "present as a key now no longer has it present; don't "
+                        "know how to apply schema for compiled statement. "
+                        "Please use consistent keys in successive "
+                        "schema_translate_map dictionaries."
+                    )
+                effective_schema = name
+
+            if not effective_schema:
+                effective_schema = self.dialect.default_schema_name
+                if not effective_schema:
+                    # TODO: no coverage here
+                    raise exc.CompileError(
+                        "Dialect has no default schema name; can't "
+                        "use None as dynamic schema target."
+                    )
+            return self.quote_schema(effective_schema)
+
+        return re.sub(r"(__\[SCHEMA_([^\]]+)\])", replace, statement)
+
+    def _escape_identifier(self, value: str) -> str:
+        """Escape an identifier.
+
+        Subclasses should override this to provide database-dependent
+        escaping behavior.
+        """
+
+        value = value.replace(self.escape_quote, self.escape_to_quote)
+        if self._double_percents:
+            value = value.replace("%", "%%")
+        return value
+
+    def _unescape_identifier(self, value: str) -> str:
+        """Canonicalize an escaped identifier.
+
+        Subclasses should override this to provide database-dependent
+        unescaping behavior that reverses _escape_identifier.
+        """
+
+        return value.replace(self.escape_to_quote, self.escape_quote)
+
+    def validate_sql_phrase(self, element, reg):
+        """keyword sequence filter.
+
+        a filter for elements that are intended to represent keyword sequences,
+        such as "INITIALLY", "INITIALLY DEFERRED", etc.   no special characters
+        should be present.
+
+        .. versionadded:: 1.3
+
+        """
+
+        if element is not None and not reg.match(element):
+            raise exc.CompileError(
+                "Unexpected SQL phrase: %r (matching against %r)"
+                % (element, reg.pattern)
+            )
+        return element
+
+    def quote_identifier(self, value: str) -> str:
+        """Quote an identifier.
+
+        Subclasses should override this to provide database-dependent
+        quoting behavior.
+        """
+
+        return (
+            self.initial_quote
+            + self._escape_identifier(value)
+            + self.final_quote
+        )
+
+    def _requires_quotes(self, value: str) -> bool:
+        """Return True if the given identifier requires quoting."""
+        lc_value = value.lower()
+        return (
+            lc_value in self.reserved_words
+            or value[0] in self.illegal_initial_characters
+            or not self.legal_characters.match(str(value))
+            or (lc_value != value)
+        )
+
+    def _requires_quotes_illegal_chars(self, value):
+        """Return True if the given identifier requires quoting, but
+        not taking case convention into account."""
+        return not self.legal_characters.match(str(value))
+
+    def quote_schema(self, schema: str, force: Any = None) -> str:
+        """Conditionally quote a schema name.
+
+
+        The name is quoted if it is a reserved word, contains quote-necessary
+        characters, or is an instance of :class:`.quoted_name` which includes
+        ``quote`` set to ``True``.
+
+        Subclasses can override this to provide database-dependent
+        quoting behavior for schema names.
+
+        :param schema: string schema name
+        :param force: unused
+
+            .. deprecated:: 0.9
+
+                The :paramref:`.IdentifierPreparer.quote_schema.force`
+                parameter is deprecated and will be removed in a future
+                release.  This flag has no effect on the behavior of the
+                :meth:`.IdentifierPreparer.quote` method; please refer to
+                :class:`.quoted_name`.
+
+        """
+        if force is not None:
+            # not using the util.deprecated_params() decorator in this
+            # case because of the additional function call overhead on this
+            # very performance-critical spot.
+            util.warn_deprecated(
+                "The IdentifierPreparer.quote_schema.force parameter is "
+                "deprecated and will be removed in a future release.  This "
+                "flag has no effect on the behavior of the "
+                "IdentifierPreparer.quote method; please refer to "
+                "quoted_name().",
+                # deprecated 0.9. warning from 1.3
+                version="0.9",
+            )
+
+        return self.quote(schema)
+
+    def quote(self, ident: str, force: Any = None) -> str:
+        """Conditionally quote an identifier.
+
+        The identifier is quoted if it is a reserved word, contains
+        quote-necessary characters, or is an instance of
+        :class:`.quoted_name` which includes ``quote`` set to ``True``.
+
+        Subclasses can override this to provide database-dependent
+        quoting behavior for identifier names.
+
+        :param ident: string identifier
+        :param force: unused
+
+            .. deprecated:: 0.9
+
+                The :paramref:`.IdentifierPreparer.quote.force`
+                parameter is deprecated and will be removed in a future
+                release.  This flag has no effect on the behavior of the
+                :meth:`.IdentifierPreparer.quote` method; please refer to
+                :class:`.quoted_name`.
+
+        """
+        if force is not None:
+            # not using the util.deprecated_params() decorator in this
+            # case because of the additional function call overhead on this
+            # very performance-critical spot.
+            util.warn_deprecated(
+                "The IdentifierPreparer.quote.force parameter is "
+                "deprecated and will be removed in a future release.  This "
+                "flag has no effect on the behavior of the "
+                "IdentifierPreparer.quote method; please refer to "
+                "quoted_name().",
+                # deprecated 0.9. warning from 1.3
+                version="0.9",
+            )
+
+        force = getattr(ident, "quote", None)
+
+        if force is None:
+            if ident in self._strings:
+                return self._strings[ident]
+            else:
+                if self._requires_quotes(ident):
+                    self._strings[ident] = self.quote_identifier(ident)
+                else:
+                    self._strings[ident] = ident
+                return self._strings[ident]
+        elif force:
+            return self.quote_identifier(ident)
+        else:
+            return ident
+
+    def format_collation(self, collation_name):
+        if self.quote_case_sensitive_collations:
+            return self.quote(collation_name)
+        else:
+            return collation_name
+
+    def format_sequence(self, sequence, use_schema=True):
+        name = self.quote(sequence.name)
+
+        effective_schema = self.schema_for_object(sequence)
+
+        if (
+            not self.omit_schema
+            and use_schema
+            and effective_schema is not None
+        ):
+            name = self.quote_schema(effective_schema) + "." + name
+        return name
+
+    def format_label(
+        self, label: Label[Any], name: Optional[str] = None
+    ) -> str:
+        return self.quote(name or label.name)
+
+    def format_alias(
+        self, alias: Optional[AliasedReturnsRows], name: Optional[str] = None
+    ) -> str:
+        if name is None:
+            assert alias is not None
+            return self.quote(alias.name)
+        else:
+            return self.quote(name)
+
+    def format_savepoint(self, savepoint, name=None):
+        # Running the savepoint name through quoting is unnecessary
+        # for all known dialects.  This is here to support potential
+        # third party use cases
+        ident = name or savepoint.ident
+        if self._requires_quotes(ident):
+            ident = self.quote_identifier(ident)
+        return ident
+
+    @util.preload_module("sqlalchemy.sql.naming")
+    def format_constraint(self, constraint, _alembic_quote=True):
+        naming = util.preloaded.sql_naming
+
+        if constraint.name is _NONE_NAME:
+            name = naming._constraint_name_for_table(
+                constraint, constraint.table
+            )
+
+            if name is None:
+                return None
+        else:
+            name = constraint.name
+
+        if constraint.__visit_name__ == "index":
+            return self.truncate_and_render_index_name(
+                name, _alembic_quote=_alembic_quote
+            )
+        else:
+            return self.truncate_and_render_constraint_name(
+                name, _alembic_quote=_alembic_quote
+            )
+
+    def truncate_and_render_index_name(self, name, _alembic_quote=True):
+        # calculate these at format time so that ad-hoc changes
+        # to dialect.max_identifier_length etc. can be reflected
+        # as IdentifierPreparer is long lived
+        max_ = (
+            self.dialect.max_index_name_length
+            or self.dialect.max_identifier_length
+        )
+        return self._truncate_and_render_maxlen_name(
+            name, max_, _alembic_quote
+        )
+
+    def truncate_and_render_constraint_name(self, name, _alembic_quote=True):
+        # calculate these at format time so that ad-hoc changes
+        # to dialect.max_identifier_length etc. can be reflected
+        # as IdentifierPreparer is long lived
+        max_ = (
+            self.dialect.max_constraint_name_length
+            or self.dialect.max_identifier_length
+        )
+        return self._truncate_and_render_maxlen_name(
+            name, max_, _alembic_quote
+        )
+
+    def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote):
+        if isinstance(name, elements._truncated_label):
+            if len(name) > max_:
+                name = name[0 : max_ - 8] + "_" + util.md5_hex(name)[-4:]
+        else:
+            self.dialect.validate_identifier(name)
+
+        if not _alembic_quote:
+            return name
+        else:
+            return self.quote(name)
+
+    def format_index(self, index):
+        return self.format_constraint(index)
+
+    def format_table(self, table, use_schema=True, name=None):
+        """Prepare a quoted table and schema name."""
+
+        if name is None:
+            name = table.name
+
+        result = self.quote(name)
+
+        effective_schema = self.schema_for_object(table)
+
+        if not self.omit_schema and use_schema and effective_schema:
+            result = self.quote_schema(effective_schema) + "." + result
+        return result
+
+    def format_schema(self, name):
+        """Prepare a quoted schema name."""
+
+        return self.quote(name)
+
+    def format_label_name(
+        self,
+        name,
+        anon_map=None,
+    ):
+        """Prepare a quoted column name."""
+
+        if anon_map is not None and isinstance(
+            name, elements._truncated_label
+        ):
+            name = name.apply_map(anon_map)
+
+        return self.quote(name)
+
+    def format_column(
+        self,
+        column,
+        use_table=False,
+        name=None,
+        table_name=None,
+        use_schema=False,
+        anon_map=None,
+    ):
+        """Prepare a quoted column name."""
+
+        if name is None:
+            name = column.name
+
+        if anon_map is not None and isinstance(
+            name, elements._truncated_label
+        ):
+            name = name.apply_map(anon_map)
+
+        if not getattr(column, "is_literal", False):
+            if use_table:
+                return (
+                    self.format_table(
+                        column.table, use_schema=use_schema, name=table_name
+                    )
+                    + "."
+                    + self.quote(name)
+                )
+            else:
+                return self.quote(name)
+        else:
+            # literal textual elements get stuck into ColumnClause a lot,
+            # which shouldn't get quoted
+
+            if use_table:
+                return (
+                    self.format_table(
+                        column.table, use_schema=use_schema, name=table_name
+                    )
+                    + "."
+                    + name
+                )
+            else:
+                return name
+
+    def format_table_seq(self, table, use_schema=True):
+        """Format table name and schema as a tuple."""
+
+        # Dialects with more levels in their fully qualified references
+        # ('database', 'owner', etc.) could override this and return
+        # a longer sequence.
+
+        effective_schema = self.schema_for_object(table)
+
+        if not self.omit_schema and use_schema and effective_schema:
+            return (
+                self.quote_schema(effective_schema),
+                self.format_table(table, use_schema=False),
+            )
+        else:
+            return (self.format_table(table, use_schema=False),)
+
+    @util.memoized_property
+    def _r_identifiers(self):
+        initial, final, escaped_final = (
+            re.escape(s)
+            for s in (
+                self.initial_quote,
+                self.final_quote,
+                self._escape_identifier(self.final_quote),
+            )
+        )
+        r = re.compile(
+            r"(?:"
+            r"(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s"
+            r"|([^\.]+))(?=\.|$))+"
+            % {"initial": initial, "final": final, "escaped": escaped_final}
+        )
+        return r
+
+    def unformat_identifiers(self, identifiers):
+        """Unpack 'schema.table.column'-like strings into components."""
+
+        r = self._r_identifiers
+        return [
+            self._unescape_identifier(i)
+            for i in [a or b for a, b in r.findall(identifiers)]
+        ]
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py
new file mode 100644
index 00000000..19af40ff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/crud.py
@@ -0,0 +1,1669 @@
+# sql/crud.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Functions used by compiler.py to determine the parameters rendered
+within INSERT and UPDATE statements.
+
+"""
+from __future__ import annotations
+
+import functools
+import operator
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import MutableMapping
+from typing import NamedTuple
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import coercions
+from . import dml
+from . import elements
+from . import roles
+from .base import _DefaultDescriptionTuple
+from .dml import isinsert as _compile_state_isinsert
+from .elements import ColumnClause
+from .schema import default_is_clause_element
+from .schema import default_is_sequence
+from .selectable import Select
+from .selectable import TableClause
+from .. import exc
+from .. import util
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from .compiler import _BindNameForColProtocol
+    from .compiler import SQLCompiler
+    from .dml import _DMLColumnElement
+    from .dml import DMLState
+    from .dml import ValuesBase
+    from .elements import ColumnElement
+    from .elements import KeyedColumnElement
+    from .schema import _SQLExprDefault
+    from .schema import Column
+
+REQUIRED = util.symbol(
+    "REQUIRED",
+    """
+Placeholder for the value within a :class:`.BindParameter`
+which is required to be present when the statement is passed
+to :meth:`_engine.Connection.execute`.
+
+This symbol is typically used when a :func:`_expression.insert`
+or :func:`_expression.update` statement is compiled without parameter
+values present.
+
+""",
+)
+
+
+def _as_dml_column(c: ColumnElement[Any]) -> ColumnClause[Any]:
+    if not isinstance(c, ColumnClause):
+        raise exc.CompileError(
+            f"Can't create DML statement against column expression {c!r}"
+        )
+    return c
+
+
+_CrudParamElement = Tuple[
+    "ColumnElement[Any]",
+    str,  # column name
+    Optional[
+        Union[str, "_SQLExprDefault"]
+    ],  # bound parameter string or SQL expression to apply
+    Iterable[str],
+]
+_CrudParamElementStr = Tuple[
+    "KeyedColumnElement[Any]",
+    str,  # column name
+    str,  # bound parameter string
+    Iterable[str],
+]
+_CrudParamElementSQLExpr = Tuple[
+    "ColumnClause[Any]",
+    str,
+    "_SQLExprDefault",  # SQL expression to apply
+    Iterable[str],
+]
+
+_CrudParamSequence = List[_CrudParamElement]
+
+
+class _CrudParams(NamedTuple):
+    single_params: _CrudParamSequence
+    all_multi_params: List[Sequence[_CrudParamElementStr]]
+    is_default_metavalue_only: bool = False
+    use_insertmanyvalues: bool = False
+    use_sentinel_columns: Optional[Sequence[Column[Any]]] = None
+
+
+def _get_crud_params(
+    compiler: SQLCompiler,
+    stmt: ValuesBase,
+    compile_state: DMLState,
+    toplevel: bool,
+    **kw: Any,
+) -> _CrudParams:
+    """create a set of tuples representing column/string pairs for use
+    in an INSERT or UPDATE statement.
+
+    Also generates the Compiled object's postfetch, prefetch, and
+    returning column collections, used for default handling and ultimately
+    populating the CursorResult's prefetch_cols() and postfetch_cols()
+    collections.
+
+    """
+
+    # note: the _get_crud_params() system was written with the notion in mind
+    # that INSERT, UPDATE, DELETE are always the top level statement and
+    # that there is only one of them.  With the addition of CTEs that can
+    # make use of DML, this assumption is no longer accurate; the DML
+    # statement is not necessarily the top-level "row returning" thing
+    # and it is also theoretically possible (fortunately nobody has asked yet)
+    # to have a single statement with multiple DMLs inside of it via CTEs.
+
+    # the current _get_crud_params() design doesn't accommodate these cases
+    # right now.  It "just works" for a CTE that has a single DML inside of
+    # it, and for a CTE with multiple DML, it's not clear what would happen.
+
+    # overall, the "compiler.XYZ" collections here would need to be in a
+    # per-DML structure of some kind, and DefaultDialect would need to
+    # navigate these collections on a per-statement basis, with additional
+    # emphasis on the "toplevel returning data" statement.  However we
+    # still need to run through _get_crud_params() for all DML as we have
+    # Python / SQL generated column defaults that need to be rendered.
+
+    # if there is user need for this kind of thing, it's likely a post 2.0
+    # kind of change as it would require deep changes to DefaultDialect
+    # as well as here.
+
+    compiler.postfetch = []
+    compiler.insert_prefetch = []
+    compiler.update_prefetch = []
+    compiler.implicit_returning = []
+
+    visiting_cte = kw.get("visiting_cte", None)
+    if visiting_cte is not None:
+        # for insert -> CTE -> insert, don't populate an incoming
+        # _crud_accumulate_bind_names collection; the INSERT we process here
+        # will not be inline within the VALUES of the enclosing INSERT as the
+        # CTE is placed on the outside.  See issue #9173
+        kw.pop("accumulate_bind_names", None)
+    assert (
+        "accumulate_bind_names" not in kw
+    ), "Don't know how to handle insert within insert without a CTE"
+
+    # getters - these are normally just column.key,
+    # but in the case of mysql multi-table update, the rules for
+    # .key must conditionally take tablename into account
+    (
+        _column_as_key,
+        _getattr_col_key,
+        _col_bind_name,
+    ) = _key_getters_for_crud_column(compiler, stmt, compile_state)
+
+    compiler._get_bind_name_for_col = _col_bind_name
+
+    if stmt._returning and stmt._return_defaults:
+        raise exc.CompileError(
+            "Can't compile statement that includes returning() and "
+            "return_defaults() simultaneously"
+        )
+
+    if compile_state.isdelete:
+        _setup_delete_return_defaults(
+            compiler,
+            stmt,
+            compile_state,
+            (),
+            _getattr_col_key,
+            _column_as_key,
+            _col_bind_name,
+            (),
+            (),
+            toplevel,
+            kw,
+        )
+        return _CrudParams([], [])
+
+    # no parameters in the statement, no parameters in the
+    # compiled params - return binds for all columns
+    if compiler.column_keys is None and compile_state._no_parameters:
+        return _CrudParams(
+            [
+                (
+                    c,
+                    compiler.preparer.format_column(c),
+                    _create_bind_param(compiler, c, None, required=True),
+                    (c.key,),
+                )
+                for c in stmt.table.columns
+                if not c._omit_from_statements
+            ],
+            [],
+        )
+
+    stmt_parameter_tuples: Optional[
+        List[Tuple[Union[str, ColumnClause[Any]], Any]]
+    ]
+    spd: Optional[MutableMapping[_DMLColumnElement, Any]]
+
+    if (
+        _compile_state_isinsert(compile_state)
+        and compile_state._has_multi_parameters
+    ):
+        mp = compile_state._multi_parameters
+        assert mp is not None
+        spd = mp[0]
+        stmt_parameter_tuples = list(spd.items())
+        spd_str_key = {_column_as_key(key) for key in spd}
+    elif compile_state._ordered_values:
+        spd = compile_state._dict_parameters
+        stmt_parameter_tuples = compile_state._ordered_values
+        assert spd is not None
+        spd_str_key = {_column_as_key(key) for key in spd}
+    elif compile_state._dict_parameters:
+        spd = compile_state._dict_parameters
+        stmt_parameter_tuples = list(spd.items())
+        spd_str_key = {_column_as_key(key) for key in spd}
+    else:
+        stmt_parameter_tuples = spd = spd_str_key = None
+
+    # if we have statement parameters - set defaults in the
+    # compiled params
+    if compiler.column_keys is None:
+        parameters = {}
+    elif stmt_parameter_tuples:
+        assert spd_str_key is not None
+        parameters = {
+            _column_as_key(key): REQUIRED
+            for key in compiler.column_keys
+            if key not in spd_str_key
+        }
+    else:
+        parameters = {
+            _column_as_key(key): REQUIRED for key in compiler.column_keys
+        }
+
+    # create a list of column assignment clauses as tuples
+    values: List[_CrudParamElement] = []
+
+    if stmt_parameter_tuples is not None:
+        _get_stmt_parameter_tuples_params(
+            compiler,
+            compile_state,
+            parameters,
+            stmt_parameter_tuples,
+            _column_as_key,
+            values,
+            kw,
+        )
+
+    check_columns: Dict[str, ColumnClause[Any]] = {}
+
+    # special logic that only occurs for multi-table UPDATE
+    # statements
+    if dml.isupdate(compile_state) and compile_state.is_multitable:
+        _get_update_multitable_params(
+            compiler,
+            stmt,
+            compile_state,
+            stmt_parameter_tuples,
+            check_columns,
+            _col_bind_name,
+            _getattr_col_key,
+            values,
+            kw,
+        )
+
+    if _compile_state_isinsert(compile_state) and stmt._select_names:
+        # is an insert from select, is not a multiparams
+
+        assert not compile_state._has_multi_parameters
+
+        _scan_insert_from_select_cols(
+            compiler,
+            stmt,
+            compile_state,
+            parameters,
+            _getattr_col_key,
+            _column_as_key,
+            _col_bind_name,
+            check_columns,
+            values,
+            toplevel,
+            kw,
+        )
+        use_insertmanyvalues = False
+        use_sentinel_columns = None
+    else:
+        use_insertmanyvalues, use_sentinel_columns = _scan_cols(
+            compiler,
+            stmt,
+            compile_state,
+            parameters,
+            _getattr_col_key,
+            _column_as_key,
+            _col_bind_name,
+            check_columns,
+            values,
+            toplevel,
+            kw,
+        )
+
+    if parameters and stmt_parameter_tuples:
+        check = (
+            set(parameters)
+            .intersection(_column_as_key(k) for k, v in stmt_parameter_tuples)
+            .difference(check_columns)
+        )
+        if check:
+            raise exc.CompileError(
+                "Unconsumed column names: %s"
+                % (", ".join("%s" % (c,) for c in check))
+            )
+
+    is_default_metavalue_only = False
+
+    if (
+        _compile_state_isinsert(compile_state)
+        and compile_state._has_multi_parameters
+    ):
+        # is a multiparams, is not an insert from a select
+        assert not stmt._select_names
+        multi_extended_values = _extend_values_for_multiparams(
+            compiler,
+            stmt,
+            compile_state,
+            cast(
+                "Sequence[_CrudParamElementStr]",
+                values,
+            ),
+            cast("Callable[..., str]", _column_as_key),
+            kw,
+        )
+        return _CrudParams(values, multi_extended_values)
+    elif (
+        not values
+        and compiler.for_executemany
+        and compiler.dialect.supports_default_metavalue
+    ):
+        # convert an "INSERT DEFAULT VALUES"
+        # into INSERT (firstcol) VALUES (DEFAULT) which can be turned
+        # into an in-place multi values.  This supports
+        # insert_executemany_returning mode :)
+        values = [
+            (
+                _as_dml_column(stmt.table.columns[0]),
+                compiler.preparer.format_column(stmt.table.columns[0]),
+                compiler.dialect.default_metavalue_token,
+                (),
+            )
+        ]
+        is_default_metavalue_only = True
+
+    return _CrudParams(
+        values,
+        [],
+        is_default_metavalue_only=is_default_metavalue_only,
+        use_insertmanyvalues=use_insertmanyvalues,
+        use_sentinel_columns=use_sentinel_columns,
+    )
+
+
+@overload
+def _create_bind_param(
+    compiler: SQLCompiler,
+    col: ColumnElement[Any],
+    value: Any,
+    process: Literal[True] = ...,
+    required: bool = False,
+    name: Optional[str] = None,
+    **kw: Any,
+) -> str: ...
+
+
+@overload
+def _create_bind_param(
+    compiler: SQLCompiler,
+    col: ColumnElement[Any],
+    value: Any,
+    **kw: Any,
+) -> str: ...
+
+
+def _create_bind_param(
+    compiler: SQLCompiler,
+    col: ColumnElement[Any],
+    value: Any,
+    process: bool = True,
+    required: bool = False,
+    name: Optional[str] = None,
+    **kw: Any,
+) -> Union[str, elements.BindParameter[Any]]:
+    if name is None:
+        name = col.key
+    bindparam = elements.BindParameter(
+        name, value, type_=col.type, required=required
+    )
+    bindparam._is_crud = True
+    if process:
+        return bindparam._compiler_dispatch(compiler, **kw)
+    else:
+        return bindparam
+
+
+def _handle_values_anonymous_param(compiler, col, value, name, **kw):
+    # the insert() and update() constructs as of 1.4 will now produce anonymous
+    # bindparam() objects in the values() collections up front when given plain
+    # literal values.  This is so that cache key behaviors, which need to
+    # produce bound parameters in deterministic order without invoking any
+    # compilation here, can be applied to these constructs when they include
+    # values() (but not yet multi-values, which are not included in caching
+    # right now).
+    #
+    # in order to produce the desired "crud" style name for these parameters,
+    # which will also be targetable in engine/default.py through the usual
+    # conventions, apply our desired name to these unique parameters by
+    # populating the compiler truncated names cache with the desired name,
+    # rather than having
+    # compiler.visit_bindparam()->compiler._truncated_identifier make up a
+    # name.  Saves on call counts also.
+
+    # for INSERT/UPDATE that's a CTE, we don't need names to match to
+    # external parameters and these would also conflict in the case where
+    # multiple insert/update are combined together using CTEs
+    is_cte = "visiting_cte" in kw
+
+    if (
+        not is_cte
+        and value.unique
+        and isinstance(value.key, elements._truncated_label)
+    ):
+        compiler.truncated_names[("bindparam", value.key)] = name
+
+    if value.type._isnull:
+        # either unique parameter, or other bound parameters that were
+        # passed in directly
+        # set type to that of the column unconditionally
+        value = value._with_binary_element_type(col.type)
+
+    return value._compiler_dispatch(compiler, **kw)
+
+
+def _key_getters_for_crud_column(
+    compiler: SQLCompiler, stmt: ValuesBase, compile_state: DMLState
+) -> Tuple[
+    Callable[[Union[str, ColumnClause[Any]]], Union[str, Tuple[str, str]]],
+    Callable[[ColumnClause[Any]], Union[str, Tuple[str, str]]],
+    _BindNameForColProtocol,
+]:
+    if dml.isupdate(compile_state) and compile_state._extra_froms:
+        # when extra tables are present, refer to the columns
+        # in those extra tables as table-qualified, including in
+        # dictionaries and when rendering bind param names.
+        # the "main" table of the statement remains unqualified,
+        # allowing the most compatibility with a non-multi-table
+        # statement.
+        _et = set(compile_state._extra_froms)
+
+        c_key_role = functools.partial(
+            coercions.expect_as_key, roles.DMLColumnRole
+        )
+
+        def _column_as_key(
+            key: Union[ColumnClause[Any], str]
+        ) -> Union[str, Tuple[str, str]]:
+            str_key = c_key_role(key)
+            if hasattr(key, "table") and key.table in _et:
+                return (key.table.name, str_key)  # type: ignore
+            else:
+                return str_key
+
+        def _getattr_col_key(
+            col: ColumnClause[Any],
+        ) -> Union[str, Tuple[str, str]]:
+            if col.table in _et:
+                return (col.table.name, col.key)  # type: ignore
+            else:
+                return col.key
+
+        def _col_bind_name(col: ColumnClause[Any]) -> str:
+            if col.table in _et:
+                if TYPE_CHECKING:
+                    assert isinstance(col.table, TableClause)
+                return "%s_%s" % (col.table.name, col.key)
+            else:
+                return col.key
+
+    else:
+        _column_as_key = functools.partial(
+            coercions.expect_as_key, roles.DMLColumnRole
+        )
+        _getattr_col_key = _col_bind_name = operator.attrgetter("key")  # type: ignore  # noqa: E501
+
+    return _column_as_key, _getattr_col_key, _col_bind_name
+
+
+def _scan_insert_from_select_cols(
+    compiler,
+    stmt,
+    compile_state,
+    parameters,
+    _getattr_col_key,
+    _column_as_key,
+    _col_bind_name,
+    check_columns,
+    values,
+    toplevel,
+    kw,
+):
+    cols = [stmt.table.c[_column_as_key(name)] for name in stmt._select_names]
+
+    assert compiler.stack[-1]["selectable"] is stmt
+
+    compiler.stack[-1]["insert_from_select"] = stmt.select
+
+    add_select_cols: List[_CrudParamElementSQLExpr] = []
+    if stmt.include_insert_from_select_defaults:
+        col_set = set(cols)
+        for col in stmt.table.columns:
+            # omit columns that were not in the SELECT statement.
+            # this will omit columns marked as omit_from_statements naturally,
+            # as long as that col was not explicit in the SELECT.
+            # if an omit_from_statements col has a "default" on it, then
+            # we need to include it, as these defaults should still fire off.
+            # but, if it has that default and it's the "sentinel" default,
+            # we don't do sentinel default operations for insert_from_select
+            # here so we again omit it.
+            if (
+                col not in col_set
+                and col.default
+                and not col.default.is_sentinel
+            ):
+                cols.append(col)
+
+    for c in cols:
+        col_key = _getattr_col_key(c)
+        if col_key in parameters and col_key not in check_columns:
+            parameters.pop(col_key)
+            values.append((c, compiler.preparer.format_column(c), None, ()))
+        else:
+            _append_param_insert_select_hasdefault(
+                compiler, stmt, c, add_select_cols, kw
+            )
+
+    if add_select_cols:
+        values.extend(add_select_cols)
+        ins_from_select = compiler.stack[-1]["insert_from_select"]
+        if not isinstance(ins_from_select, Select):
+            raise exc.CompileError(
+                f"Can't extend statement for INSERT..FROM SELECT to include "
+                f"additional default-holding column(s) "
+                f"""{
+                    ', '.join(repr(key) for _, key, _, _ in add_select_cols)
+                }.  Convert the selectable to a subquery() first, or pass """
+                "include_defaults=False to Insert.from_select() to skip these "
+                "columns."
+            )
+        ins_from_select = ins_from_select._generate()
+        # copy raw_columns
+        ins_from_select._raw_columns = list(ins_from_select._raw_columns) + [
+            expr for _, _, expr, _ in add_select_cols
+        ]
+        compiler.stack[-1]["insert_from_select"] = ins_from_select
+
+
+def _scan_cols(
+    compiler,
+    stmt,
+    compile_state,
+    parameters,
+    _getattr_col_key,
+    _column_as_key,
+    _col_bind_name,
+    check_columns,
+    values,
+    toplevel,
+    kw,
+):
+    (
+        need_pks,
+        implicit_returning,
+        implicit_return_defaults,
+        postfetch_lastrowid,
+        use_insertmanyvalues,
+        use_sentinel_columns,
+    ) = _get_returning_modifiers(compiler, stmt, compile_state, toplevel)
+
+    assert compile_state.isupdate or compile_state.isinsert
+
+    if compile_state._parameter_ordering:
+        parameter_ordering = [
+            _column_as_key(key) for key in compile_state._parameter_ordering
+        ]
+        ordered_keys = set(parameter_ordering)
+        cols = [
+            stmt.table.c[key]
+            for key in parameter_ordering
+            if isinstance(key, str) and key in stmt.table.c
+        ] + [c for c in stmt.table.c if c.key not in ordered_keys]
+
+    else:
+        cols = stmt.table.columns
+
+    isinsert = _compile_state_isinsert(compile_state)
+    if isinsert and not compile_state._has_multi_parameters:
+        # new rules for #7998.  fetch lastrowid or implicit returning
+        # for autoincrement column even if parameter is NULL, for DBs that
+        # override NULL param for primary key (sqlite, mysql/mariadb)
+        autoincrement_col = stmt.table._autoincrement_column
+        insert_null_pk_still_autoincrements = (
+            compiler.dialect.insert_null_pk_still_autoincrements
+        )
+    else:
+        autoincrement_col = insert_null_pk_still_autoincrements = None
+
+    if stmt._supplemental_returning:
+        supplemental_returning = set(stmt._supplemental_returning)
+    else:
+        supplemental_returning = set()
+
+    compiler_implicit_returning = compiler.implicit_returning
+
+    # TODO - see TODO(return_defaults_columns) below
+    # cols_in_params = set()
+
+    for c in cols:
+        # scan through every column in the target table
+
+        col_key = _getattr_col_key(c)
+
+        if col_key in parameters and col_key not in check_columns:
+            # parameter is present for the column.  use that.
+
+            _append_param_parameter(
+                compiler,
+                stmt,
+                compile_state,
+                c,
+                col_key,
+                parameters,
+                _col_bind_name,
+                implicit_returning,
+                implicit_return_defaults,
+                postfetch_lastrowid,
+                values,
+                autoincrement_col,
+                insert_null_pk_still_autoincrements,
+                kw,
+            )
+
+            # TODO - see TODO(return_defaults_columns) below
+            # cols_in_params.add(c)
+
+        elif isinsert:
+            # no parameter is present and it's an insert.
+
+            if c.primary_key and need_pks:
+                # it's a primary key column, it will need to be generated by a
+                # default generator of some kind, and the statement expects
+                # inserted_primary_key to be available.
+
+                if implicit_returning:
+                    # we can use RETURNING, find out how to invoke this
+                    # column and get the value where RETURNING is an option.
+                    # we can inline server-side functions in this case.
+
+                    _append_param_insert_pk_returning(
+                        compiler, stmt, c, values, kw
+                    )
+                else:
+                    # otherwise, find out how to invoke this column
+                    # and get its value where RETURNING is not an option.
+                    # if we have to invoke a server-side function, we need
+                    # to pre-execute it.   or if this is a straight
+                    # autoincrement column and the dialect supports it
+                    # we can use cursor.lastrowid.
+
+                    _append_param_insert_pk_no_returning(
+                        compiler, stmt, c, values, kw
+                    )
+
+            elif c.default is not None:
+                # column has a default, but it's not a pk column, or it is but
+                # we don't need to get the pk back.
+                if not c.default.is_sentinel or (
+                    use_sentinel_columns is not None
+                ):
+                    _append_param_insert_hasdefault(
+                        compiler, stmt, c, implicit_return_defaults, values, kw
+                    )
+
+            elif c.server_default is not None:
+                # column has a DDL-level default, and is either not a pk
+                # column or we don't need the pk.
+                if implicit_return_defaults and c in implicit_return_defaults:
+                    compiler_implicit_returning.append(c)
+                elif not c.primary_key:
+                    compiler.postfetch.append(c)
+
+            elif implicit_return_defaults and c in implicit_return_defaults:
+                compiler_implicit_returning.append(c)
+
+            elif (
+                c.primary_key
+                and c is not stmt.table._autoincrement_column
+                and not c.nullable
+            ):
+                _warn_pk_with_no_anticipated_value(c)
+
+        elif compile_state.isupdate:
+            # no parameter is present and it's an insert.
+
+            _append_param_update(
+                compiler,
+                compile_state,
+                stmt,
+                c,
+                implicit_return_defaults,
+                values,
+                kw,
+            )
+
+        # adding supplemental cols to implicit_returning in table
+        # order so that order is maintained between multiple INSERT
+        # statements which may have different parameters included, but all
+        # have the same RETURNING clause
+        if (
+            c in supplemental_returning
+            and c not in compiler_implicit_returning
+        ):
+            compiler_implicit_returning.append(c)
+
+    if supplemental_returning:
+        # we should have gotten every col into implicit_returning,
+        # however supplemental returning can also have SQL functions etc.
+        # in it
+        remaining_supplemental = supplemental_returning.difference(
+            compiler_implicit_returning
+        )
+        compiler_implicit_returning.extend(
+            c
+            for c in stmt._supplemental_returning
+            if c in remaining_supplemental
+        )
+
+    # TODO(return_defaults_columns): there can still be more columns in
+    # _return_defaults_columns in the case that they are from something like an
+    # aliased of the table. we can add them here, however this breaks other ORM
+    # things. so this is for another day. see
+    # test/orm/dml/test_update_delete_where.py -> test_update_from_alias
+
+    # if stmt._return_defaults_columns:
+    #     compiler_implicit_returning.extend(
+    #         set(stmt._return_defaults_columns)
+    #         .difference(compiler_implicit_returning)
+    #         .difference(cols_in_params)
+    #     )
+
+    return (use_insertmanyvalues, use_sentinel_columns)
+
+
+def _setup_delete_return_defaults(
+    compiler,
+    stmt,
+    compile_state,
+    parameters,
+    _getattr_col_key,
+    _column_as_key,
+    _col_bind_name,
+    check_columns,
+    values,
+    toplevel,
+    kw,
+):
+    (_, _, implicit_return_defaults, *_) = _get_returning_modifiers(
+        compiler, stmt, compile_state, toplevel
+    )
+
+    if not implicit_return_defaults:
+        return
+
+    if stmt._return_defaults_columns:
+        compiler.implicit_returning.extend(implicit_return_defaults)
+
+    if stmt._supplemental_returning:
+        ir_set = set(compiler.implicit_returning)
+        compiler.implicit_returning.extend(
+            c for c in stmt._supplemental_returning if c not in ir_set
+        )
+
+
+def _append_param_parameter(
+    compiler,
+    stmt,
+    compile_state,
+    c,
+    col_key,
+    parameters,
+    _col_bind_name,
+    implicit_returning,
+    implicit_return_defaults,
+    postfetch_lastrowid,
+    values,
+    autoincrement_col,
+    insert_null_pk_still_autoincrements,
+    kw,
+):
+    value = parameters.pop(col_key)
+
+    col_value = compiler.preparer.format_column(
+        c, use_table=compile_state.include_table_with_column_exprs
+    )
+
+    accumulated_bind_names: Set[str] = set()
+
+    if coercions._is_literal(value):
+        if (
+            insert_null_pk_still_autoincrements
+            and c.primary_key
+            and c is autoincrement_col
+        ):
+            # support use case for #7998, fetch autoincrement cols
+            # even if value was given.
+
+            if postfetch_lastrowid:
+                compiler.postfetch_lastrowid = True
+            elif implicit_returning:
+                compiler.implicit_returning.append(c)
+
+        value = _create_bind_param(
+            compiler,
+            c,
+            value,
+            required=value is REQUIRED,
+            name=(
+                _col_bind_name(c)
+                if not _compile_state_isinsert(compile_state)
+                or not compile_state._has_multi_parameters
+                else "%s_m0" % _col_bind_name(c)
+            ),
+            accumulate_bind_names=accumulated_bind_names,
+            **kw,
+        )
+    elif value._is_bind_parameter:
+        if (
+            insert_null_pk_still_autoincrements
+            and value.value is None
+            and c.primary_key
+            and c is autoincrement_col
+        ):
+            # support use case for #7998, fetch autoincrement cols
+            # even if value was given
+            if implicit_returning:
+                compiler.implicit_returning.append(c)
+            elif compiler.dialect.postfetch_lastrowid:
+                compiler.postfetch_lastrowid = True
+
+        value = _handle_values_anonymous_param(
+            compiler,
+            c,
+            value,
+            name=(
+                _col_bind_name(c)
+                if not _compile_state_isinsert(compile_state)
+                or not compile_state._has_multi_parameters
+                else "%s_m0" % _col_bind_name(c)
+            ),
+            accumulate_bind_names=accumulated_bind_names,
+            **kw,
+        )
+    else:
+        # value is a SQL expression
+        value = compiler.process(
+            value.self_group(),
+            accumulate_bind_names=accumulated_bind_names,
+            **kw,
+        )
+
+        if compile_state.isupdate:
+            if implicit_return_defaults and c in implicit_return_defaults:
+                compiler.implicit_returning.append(c)
+
+            else:
+                compiler.postfetch.append(c)
+        else:
+            if c.primary_key:
+                if implicit_returning:
+                    compiler.implicit_returning.append(c)
+                elif compiler.dialect.postfetch_lastrowid:
+                    compiler.postfetch_lastrowid = True
+
+            elif implicit_return_defaults and (c in implicit_return_defaults):
+                compiler.implicit_returning.append(c)
+
+            else:
+                # postfetch specifically means, "we can SELECT the row we just
+                # inserted by primary key to get back the server generated
+                # defaults". so by definition this can't be used to get the
+                # primary key value back, because we need to have it ahead of
+                # time.
+
+                compiler.postfetch.append(c)
+
+    values.append((c, col_value, value, accumulated_bind_names))
+
+
+def _append_param_insert_pk_returning(compiler, stmt, c, values, kw):
+    """Create a primary key expression in the INSERT statement where
+    we want to populate result.inserted_primary_key and RETURNING
+    is available.
+
+    """
+    if c.default is not None:
+        if c.default.is_sequence:
+            if compiler.dialect.supports_sequences and (
+                not c.default.optional
+                or not compiler.dialect.sequences_optional
+            ):
+                accumulated_bind_names: Set[str] = set()
+                values.append(
+                    (
+                        c,
+                        compiler.preparer.format_column(c),
+                        compiler.process(
+                            c.default,
+                            accumulate_bind_names=accumulated_bind_names,
+                            **kw,
+                        ),
+                        accumulated_bind_names,
+                    )
+                )
+            compiler.implicit_returning.append(c)
+        elif c.default.is_clause_element:
+            accumulated_bind_names = set()
+            values.append(
+                (
+                    c,
+                    compiler.preparer.format_column(c),
+                    compiler.process(
+                        c.default.arg.self_group(),
+                        accumulate_bind_names=accumulated_bind_names,
+                        **kw,
+                    ),
+                    accumulated_bind_names,
+                )
+            )
+            compiler.implicit_returning.append(c)
+        else:
+            # client side default.  OK we can't use RETURNING, need to
+            # do a "prefetch", which in fact fetches the default value
+            # on the Python side
+            values.append(
+                (
+                    c,
+                    compiler.preparer.format_column(c),
+                    _create_insert_prefetch_bind_param(compiler, c, **kw),
+                    (c.key,),
+                )
+            )
+    elif c is stmt.table._autoincrement_column or c.server_default is not None:
+        compiler.implicit_returning.append(c)
+    elif not c.nullable:
+        # no .default, no .server_default, not autoincrement, we have
+        # no indication this primary key column will have any value
+        _warn_pk_with_no_anticipated_value(c)
+
+
+def _append_param_insert_pk_no_returning(compiler, stmt, c, values, kw):
+    """Create a primary key expression in the INSERT statement where
+    we want to populate result.inserted_primary_key and we cannot use
+    RETURNING.
+
+    Depending on the kind of default here we may create a bound parameter
+    in the INSERT statement and pre-execute a default generation function,
+    or we may use cursor.lastrowid if supported by the dialect.
+
+
+    """
+
+    if (
+        # column has a Python-side default
+        c.default is not None
+        and (
+            # and it either is not a sequence, or it is and we support
+            # sequences and want to invoke it
+            not c.default.is_sequence
+            or (
+                compiler.dialect.supports_sequences
+                and (
+                    not c.default.optional
+                    or not compiler.dialect.sequences_optional
+                )
+            )
+        )
+    ) or (
+        # column is the "autoincrement column"
+        c is stmt.table._autoincrement_column
+        and (
+            # dialect can't use cursor.lastrowid
+            not compiler.dialect.postfetch_lastrowid
+            and (
+                # column has a Sequence and we support those
+                (
+                    c.default is not None
+                    and c.default.is_sequence
+                    and compiler.dialect.supports_sequences
+                )
+                or
+                # column has no default on it, but dialect can run the
+                # "autoincrement" mechanism explicitly, e.g. PostgreSQL
+                # SERIAL we know the sequence name
+                (
+                    c.default is None
+                    and compiler.dialect.preexecute_autoincrement_sequences
+                )
+            )
+        )
+    ):
+        # do a pre-execute of the default
+        values.append(
+            (
+                c,
+                compiler.preparer.format_column(c),
+                _create_insert_prefetch_bind_param(compiler, c, **kw),
+                (c.key,),
+            )
+        )
+    elif (
+        c.default is None
+        and c.server_default is None
+        and not c.nullable
+        and c is not stmt.table._autoincrement_column
+    ):
+        # no .default, no .server_default, not autoincrement, we have
+        # no indication this primary key column will have any value
+        _warn_pk_with_no_anticipated_value(c)
+    elif compiler.dialect.postfetch_lastrowid:
+        # finally, where it seems like there will be a generated primary key
+        # value and we haven't set up any other way to fetch it, and the
+        # dialect supports cursor.lastrowid, switch on the lastrowid flag so
+        # that the DefaultExecutionContext calls upon cursor.lastrowid
+        compiler.postfetch_lastrowid = True
+
+
+def _append_param_insert_hasdefault(
+    compiler, stmt, c, implicit_return_defaults, values, kw
+):
+    if c.default.is_sequence:
+        if compiler.dialect.supports_sequences and (
+            not c.default.optional or not compiler.dialect.sequences_optional
+        ):
+            accumulated_bind_names: Set[str] = set()
+            values.append(
+                (
+                    c,
+                    compiler.preparer.format_column(c),
+                    compiler.process(
+                        c.default,
+                        accumulate_bind_names=accumulated_bind_names,
+                        **kw,
+                    ),
+                    accumulated_bind_names,
+                )
+            )
+            if implicit_return_defaults and c in implicit_return_defaults:
+                compiler.implicit_returning.append(c)
+            elif not c.primary_key:
+                compiler.postfetch.append(c)
+    elif c.default.is_clause_element:
+        accumulated_bind_names = set()
+        values.append(
+            (
+                c,
+                compiler.preparer.format_column(c),
+                compiler.process(
+                    c.default.arg.self_group(),
+                    accumulate_bind_names=accumulated_bind_names,
+                    **kw,
+                ),
+                accumulated_bind_names,
+            )
+        )
+
+        if implicit_return_defaults and c in implicit_return_defaults:
+            compiler.implicit_returning.append(c)
+        elif not c.primary_key:
+            # don't add primary key column to postfetch
+            compiler.postfetch.append(c)
+    else:
+        values.append(
+            (
+                c,
+                compiler.preparer.format_column(c),
+                _create_insert_prefetch_bind_param(compiler, c, **kw),
+                (c.key,),
+            )
+        )
+
+
+def _append_param_insert_select_hasdefault(
+    compiler: SQLCompiler,
+    stmt: ValuesBase,
+    c: ColumnClause[Any],
+    values: List[_CrudParamElementSQLExpr],
+    kw: Dict[str, Any],
+) -> None:
+    if default_is_sequence(c.default):
+        if compiler.dialect.supports_sequences and (
+            not c.default.optional or not compiler.dialect.sequences_optional
+        ):
+            values.append(
+                (
+                    c,
+                    compiler.preparer.format_column(c),
+                    c.default.next_value(),
+                    (),
+                )
+            )
+    elif default_is_clause_element(c.default):
+        values.append(
+            (
+                c,
+                compiler.preparer.format_column(c),
+                c.default.arg.self_group(),
+                (),
+            )
+        )
+    else:
+        values.append(
+            (
+                c,
+                compiler.preparer.format_column(c),
+                _create_insert_prefetch_bind_param(
+                    compiler, c, process=False, **kw
+                ),
+                (c.key,),
+            )
+        )
+
+
+def _append_param_update(
+    compiler, compile_state, stmt, c, implicit_return_defaults, values, kw
+):
+    include_table = compile_state.include_table_with_column_exprs
+    if c.onupdate is not None and not c.onupdate.is_sequence:
+        if c.onupdate.is_clause_element:
+            values.append(
+                (
+                    c,
+                    compiler.preparer.format_column(
+                        c,
+                        use_table=include_table,
+                    ),
+                    compiler.process(c.onupdate.arg.self_group(), **kw),
+                    (),
+                )
+            )
+            if implicit_return_defaults and c in implicit_return_defaults:
+                compiler.implicit_returning.append(c)
+            else:
+                compiler.postfetch.append(c)
+        else:
+            values.append(
+                (
+                    c,
+                    compiler.preparer.format_column(
+                        c,
+                        use_table=include_table,
+                    ),
+                    _create_update_prefetch_bind_param(compiler, c, **kw),
+                    (c.key,),
+                )
+            )
+    elif c.server_onupdate is not None:
+        if implicit_return_defaults and c in implicit_return_defaults:
+            compiler.implicit_returning.append(c)
+        else:
+            compiler.postfetch.append(c)
+    elif (
+        implicit_return_defaults
+        and (stmt._return_defaults_columns or not stmt._return_defaults)
+        and c in implicit_return_defaults
+    ):
+        compiler.implicit_returning.append(c)
+
+
+@overload
+def _create_insert_prefetch_bind_param(
+    compiler: SQLCompiler,
+    c: ColumnElement[Any],
+    process: Literal[True] = ...,
+    **kw: Any,
+) -> str: ...
+
+
+@overload
+def _create_insert_prefetch_bind_param(
+    compiler: SQLCompiler,
+    c: ColumnElement[Any],
+    process: Literal[False],
+    **kw: Any,
+) -> elements.BindParameter[Any]: ...
+
+
+def _create_insert_prefetch_bind_param(
+    compiler: SQLCompiler,
+    c: ColumnElement[Any],
+    process: bool = True,
+    name: Optional[str] = None,
+    **kw: Any,
+) -> Union[elements.BindParameter[Any], str]:
+    param = _create_bind_param(
+        compiler, c, None, process=process, name=name, **kw
+    )
+    compiler.insert_prefetch.append(c)  # type: ignore
+    return param
+
+
+@overload
+def _create_update_prefetch_bind_param(
+    compiler: SQLCompiler,
+    c: ColumnElement[Any],
+    process: Literal[True] = ...,
+    **kw: Any,
+) -> str: ...
+
+
+@overload
+def _create_update_prefetch_bind_param(
+    compiler: SQLCompiler,
+    c: ColumnElement[Any],
+    process: Literal[False],
+    **kw: Any,
+) -> elements.BindParameter[Any]: ...
+
+
+def _create_update_prefetch_bind_param(
+    compiler: SQLCompiler,
+    c: ColumnElement[Any],
+    process: bool = True,
+    name: Optional[str] = None,
+    **kw: Any,
+) -> Union[elements.BindParameter[Any], str]:
+    param = _create_bind_param(
+        compiler, c, None, process=process, name=name, **kw
+    )
+    compiler.update_prefetch.append(c)  # type: ignore
+    return param
+
+
+class _multiparam_column(elements.ColumnElement[Any]):
+    _is_multiparam_column = True
+
+    def __init__(self, original, index):
+        self.index = index
+        self.key = "%s_m%d" % (original.key, index + 1)
+        self.original = original
+        self.default = original.default
+        self.type = original.type
+
+    def compare(self, other, **kw):
+        raise NotImplementedError()
+
+    def _copy_internals(self, **kw):
+        raise NotImplementedError()
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, _multiparam_column)
+            and other.key == self.key
+            and other.original == self.original
+        )
+
+    @util.memoized_property
+    def _default_description_tuple(self) -> _DefaultDescriptionTuple:
+        """used by default.py -> _process_execute_defaults()"""
+
+        return _DefaultDescriptionTuple._from_column_default(self.default)
+
+    @util.memoized_property
+    def _onupdate_description_tuple(self) -> _DefaultDescriptionTuple:
+        """used by default.py -> _process_execute_defaults()"""
+
+        return _DefaultDescriptionTuple._from_column_default(self.onupdate)
+
+
+def _process_multiparam_default_bind(
+    compiler: SQLCompiler,
+    stmt: ValuesBase,
+    c: KeyedColumnElement[Any],
+    index: int,
+    kw: Dict[str, Any],
+) -> str:
+    if not c.default:
+        raise exc.CompileError(
+            "INSERT value for column %s is explicitly rendered as a bound"
+            "parameter in the VALUES clause; "
+            "a Python-side value or SQL expression is required" % c
+        )
+    elif default_is_clause_element(c.default):
+        return compiler.process(c.default.arg.self_group(), **kw)
+    elif c.default.is_sequence:
+        # these conditions would have been established
+        # by append_param_insert_(?:hasdefault|pk_returning|pk_no_returning)
+        # in order for us to be here, so these don't need to be
+        # checked
+        # assert compiler.dialect.supports_sequences and (
+        #    not c.default.optional
+        #    or not compiler.dialect.sequences_optional
+        # )
+        return compiler.process(c.default, **kw)
+    else:
+        col = _multiparam_column(c, index)
+        assert isinstance(stmt, dml.Insert)
+        return _create_insert_prefetch_bind_param(
+            compiler, col, process=True, **kw
+        )
+
+
+def _get_update_multitable_params(
+    compiler,
+    stmt,
+    compile_state,
+    stmt_parameter_tuples,
+    check_columns,
+    _col_bind_name,
+    _getattr_col_key,
+    values,
+    kw,
+):
+    normalized_params = {
+        coercions.expect(roles.DMLColumnRole, c): param
+        for c, param in stmt_parameter_tuples or ()
+    }
+
+    include_table = compile_state.include_table_with_column_exprs
+
+    affected_tables = set()
+    for t in compile_state._extra_froms:
+        for c in t.c:
+            if c in normalized_params:
+                affected_tables.add(t)
+                check_columns[_getattr_col_key(c)] = c
+                value = normalized_params[c]
+
+                col_value = compiler.process(c, include_table=include_table)
+                if coercions._is_literal(value):
+                    value = _create_bind_param(
+                        compiler,
+                        c,
+                        value,
+                        required=value is REQUIRED,
+                        name=_col_bind_name(c),
+                        **kw,  # TODO: no test coverage for literal binds here
+                    )
+                    accumulated_bind_names: Iterable[str] = (c.key,)
+                elif value._is_bind_parameter:
+                    cbn = _col_bind_name(c)
+                    value = _handle_values_anonymous_param(
+                        compiler, c, value, name=cbn, **kw
+                    )
+                    accumulated_bind_names = (cbn,)
+                else:
+                    compiler.postfetch.append(c)
+                    value = compiler.process(value.self_group(), **kw)
+                    accumulated_bind_names = ()
+                values.append((c, col_value, value, accumulated_bind_names))
+    # determine tables which are actually to be updated - process onupdate
+    # and server_onupdate for these
+    for t in affected_tables:
+        for c in t.c:
+            if c in normalized_params:
+                continue
+            elif c.onupdate is not None and not c.onupdate.is_sequence:
+                if c.onupdate.is_clause_element:
+                    values.append(
+                        (
+                            c,
+                            compiler.process(c, include_table=include_table),
+                            compiler.process(
+                                c.onupdate.arg.self_group(), **kw
+                            ),
+                            (),
+                        )
+                    )
+                    compiler.postfetch.append(c)
+                else:
+                    values.append(
+                        (
+                            c,
+                            compiler.process(c, include_table=include_table),
+                            _create_update_prefetch_bind_param(
+                                compiler, c, name=_col_bind_name(c), **kw
+                            ),
+                            (c.key,),
+                        )
+                    )
+            elif c.server_onupdate is not None:
+                compiler.postfetch.append(c)
+
+
+def _extend_values_for_multiparams(
+    compiler: SQLCompiler,
+    stmt: ValuesBase,
+    compile_state: DMLState,
+    initial_values: Sequence[_CrudParamElementStr],
+    _column_as_key: Callable[..., str],
+    kw: Dict[str, Any],
+) -> List[Sequence[_CrudParamElementStr]]:
+    values_0 = initial_values
+    values = [initial_values]
+
+    mp = compile_state._multi_parameters
+    assert mp is not None
+    for i, row in enumerate(mp[1:]):
+        extension: List[_CrudParamElementStr] = []
+
+        row = {_column_as_key(key): v for key, v in row.items()}
+
+        for col, col_expr, param, accumulated_names in values_0:
+            if col.key in row:
+                key = col.key
+
+                if coercions._is_literal(row[key]):
+                    new_param = _create_bind_param(
+                        compiler,
+                        col,
+                        row[key],
+                        name="%s_m%d" % (col.key, i + 1),
+                        **kw,
+                    )
+                else:
+                    new_param = compiler.process(row[key].self_group(), **kw)
+            else:
+                new_param = _process_multiparam_default_bind(
+                    compiler, stmt, col, i, kw
+                )
+
+            extension.append((col, col_expr, new_param, accumulated_names))
+
+        values.append(extension)
+
+    return values
+
+
+def _get_stmt_parameter_tuples_params(
+    compiler,
+    compile_state,
+    parameters,
+    stmt_parameter_tuples,
+    _column_as_key,
+    values,
+    kw,
+):
+    for k, v in stmt_parameter_tuples:
+        colkey = _column_as_key(k)
+        if colkey is not None:
+            parameters.setdefault(colkey, v)
+        else:
+            # a non-Column expression on the left side;
+            # add it to values() in an "as-is" state,
+            # coercing right side to bound param
+
+            # note one of the main use cases for this is array slice
+            # updates on PostgreSQL, as the left side is also an expression.
+
+            col_expr = compiler.process(
+                k, include_table=compile_state.include_table_with_column_exprs
+            )
+
+            if coercions._is_literal(v):
+                v = compiler.process(
+                    elements.BindParameter(None, v, type_=k.type), **kw
+                )
+            else:
+                if v._is_bind_parameter and v.type._isnull:
+                    # either unique parameter, or other bound parameters that
+                    # were passed in directly
+                    # set type to that of the column unconditionally
+                    v = v._with_binary_element_type(k.type)
+
+                v = compiler.process(v.self_group(), **kw)
+
+            # TODO: not sure if accumulated_bind_names applies here
+            values.append((k, col_expr, v, ()))
+
+
+def _get_returning_modifiers(compiler, stmt, compile_state, toplevel):
+    """determines RETURNING strategy, if any, for the statement.
+
+    This is where it's determined what we need to fetch from the
+    INSERT or UPDATE statement after it's invoked.
+
+    """
+
+    dialect = compiler.dialect
+
+    need_pks = (
+        toplevel
+        and _compile_state_isinsert(compile_state)
+        and not stmt._inline
+        and (
+            not compiler.for_executemany
+            or (dialect.insert_executemany_returning and stmt._return_defaults)
+        )
+        and not stmt._returning
+        # and (not stmt._returning or stmt._return_defaults)
+        and not compile_state._has_multi_parameters
+    )
+
+    # check if we have access to simple cursor.lastrowid.  we can use that
+    # after the INSERT if that's all we need.
+    postfetch_lastrowid = (
+        need_pks
+        and dialect.postfetch_lastrowid
+        and stmt.table._autoincrement_column is not None
+    )
+
+    # see if we want to add RETURNING to an INSERT in order to get
+    # primary key columns back.  This would be instead of postfetch_lastrowid
+    # if that's set.
+    implicit_returning = (
+        # statement itself can veto it
+        need_pks
+        # the dialect can veto it if it just doesnt support RETURNING
+        # with INSERT
+        and dialect.insert_returning
+        # user-defined implicit_returning on Table can veto it
+        and compile_state._primary_table.implicit_returning
+        # the compile_state can veto it (SQlite uses this to disable
+        # RETURNING for an ON CONFLICT insert, as SQLite does not return
+        # for rows that were updated, which is wrong)
+        and compile_state._supports_implicit_returning
+        and (
+            # since we support MariaDB and SQLite which also support lastrowid,
+            # decide if we should use lastrowid or RETURNING.  for insert
+            # that didnt call return_defaults() and has just one set of
+            # parameters, we can use lastrowid.   this is more "traditional"
+            # and a lot of weird use cases are supported by it.
+            # SQLite lastrowid times 3x faster than returning,
+            # Mariadb lastrowid 2x faster than returning
+            (not postfetch_lastrowid or dialect.favor_returning_over_lastrowid)
+            or compile_state._has_multi_parameters
+            or stmt._return_defaults
+        )
+    )
+    if implicit_returning:
+        postfetch_lastrowid = False
+
+    if _compile_state_isinsert(compile_state):
+        should_implicit_return_defaults = (
+            implicit_returning and stmt._return_defaults
+        )
+        explicit_returning = (
+            should_implicit_return_defaults
+            or stmt._returning
+            or stmt._supplemental_returning
+        )
+        use_insertmanyvalues = (
+            toplevel
+            and compiler.for_executemany
+            and dialect.use_insertmanyvalues
+            and (
+                explicit_returning or dialect.use_insertmanyvalues_wo_returning
+            )
+        )
+
+        use_sentinel_columns = None
+        if (
+            use_insertmanyvalues
+            and explicit_returning
+            and stmt._sort_by_parameter_order
+        ):
+            use_sentinel_columns = compiler._get_sentinel_column_for_table(
+                stmt.table
+            )
+
+    elif compile_state.isupdate:
+        should_implicit_return_defaults = (
+            stmt._return_defaults
+            and compile_state._primary_table.implicit_returning
+            and compile_state._supports_implicit_returning
+            and dialect.update_returning
+        )
+        use_insertmanyvalues = False
+        use_sentinel_columns = None
+    elif compile_state.isdelete:
+        should_implicit_return_defaults = (
+            stmt._return_defaults
+            and compile_state._primary_table.implicit_returning
+            and compile_state._supports_implicit_returning
+            and dialect.delete_returning
+        )
+        use_insertmanyvalues = False
+        use_sentinel_columns = None
+    else:
+        should_implicit_return_defaults = False  # pragma: no cover
+        use_insertmanyvalues = False
+        use_sentinel_columns = None
+
+    if should_implicit_return_defaults:
+        if not stmt._return_defaults_columns:
+            # TODO: this is weird.  See #9685 where we have to
+            # take an extra step to prevent this from happening.  why
+            # would this ever be *all* columns?  but if we set to blank, then
+            # that seems to break things also in the ORM.  So we should
+            # try to clean this up and figure out what return_defaults
+            # needs to do w/ the ORM etc. here
+            implicit_return_defaults = set(stmt.table.c)
+        else:
+            implicit_return_defaults = set(stmt._return_defaults_columns)
+    else:
+        implicit_return_defaults = None
+
+    return (
+        need_pks,
+        implicit_returning or should_implicit_return_defaults,
+        implicit_return_defaults,
+        postfetch_lastrowid,
+        use_insertmanyvalues,
+        use_sentinel_columns,
+    )
+
+
+def _warn_pk_with_no_anticipated_value(c):
+    msg = (
+        "Column '%s.%s' is marked as a member of the "
+        "primary key for table '%s', "
+        "but has no Python-side or server-side default generator indicated, "
+        "nor does it indicate 'autoincrement=True' or 'nullable=True', "
+        "and no explicit value is passed.  "
+        "Primary key columns typically may not store NULL."
+        % (c.table.fullname, c.name, c.table.fullname)
+    )
+    if len(c.table.primary_key) > 1:
+        msg += (
+            " Note that as of SQLAlchemy 1.1, 'autoincrement=True' must be "
+            "indicated explicitly for composite (e.g. multicolumn) primary "
+            "keys if AUTO_INCREMENT/SERIAL/IDENTITY "
+            "behavior is expected for one of the columns in the primary key. "
+            "CREATE TABLE statements are impacted by this change as well on "
+            "most backends."
+        )
+    util.warn(msg)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py
new file mode 100644
index 00000000..81a49151
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/ddl.py
@@ -0,0 +1,1438 @@
+# sql/ddl.py
+# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""
+Provides the hierarchy of DDL-defining schema items as well as routines
+to invoke them for a create/drop call.
+
+"""
+from __future__ import annotations
+
+import contextlib
+import typing
+from typing import Any
+from typing import Callable
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Sequence as typing_Sequence
+from typing import Tuple
+
+from . import roles
+from .base import _generative
+from .base import Executable
+from .base import SchemaVisitor
+from .elements import ClauseElement
+from .. import exc
+from .. import util
+from ..util import topological
+from ..util.typing import Protocol
+from ..util.typing import Self
+
+if typing.TYPE_CHECKING:
+    from .compiler import Compiled
+    from .compiler import DDLCompiler
+    from .elements import BindParameter
+    from .schema import Constraint
+    from .schema import ForeignKeyConstraint
+    from .schema import SchemaItem
+    from .schema import Sequence
+    from .schema import Table
+    from .selectable import TableClause
+    from ..engine.base import Connection
+    from ..engine.interfaces import CacheStats
+    from ..engine.interfaces import CompiledCacheType
+    from ..engine.interfaces import Dialect
+    from ..engine.interfaces import SchemaTranslateMapType
+
+
+class BaseDDLElement(ClauseElement):
+    """The root of DDL constructs, including those that are sub-elements
+    within the "create table" and other processes.
+
+    .. versionadded:: 2.0
+
+    """
+
+    _hierarchy_supports_caching = False
+    """disable cache warnings for all _DDLCompiles subclasses. """
+
+    def _compiler(self, dialect, **kw):
+        """Return a compiler appropriate for this ClauseElement, given a
+        Dialect."""
+
+        return dialect.ddl_compiler(dialect, self, **kw)
+
+    def _compile_w_cache(
+        self,
+        dialect: Dialect,
+        *,
+        compiled_cache: Optional[CompiledCacheType],
+        column_keys: List[str],
+        for_executemany: bool = False,
+        schema_translate_map: Optional[SchemaTranslateMapType] = None,
+        **kw: Any,
+    ) -> Tuple[
+        Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats
+    ]:
+        raise NotImplementedError()
+
+
+class DDLIfCallable(Protocol):
+    def __call__(
+        self,
+        ddl: BaseDDLElement,
+        target: SchemaItem,
+        bind: Optional[Connection],
+        tables: Optional[List[Table]] = None,
+        state: Optional[Any] = None,
+        *,
+        dialect: Dialect,
+        compiler: Optional[DDLCompiler] = ...,
+        checkfirst: bool,
+    ) -> bool: ...
+
+
+class DDLIf(typing.NamedTuple):
+    dialect: Optional[str]
+    callable_: Optional[DDLIfCallable]
+    state: Optional[Any]
+
+    def _should_execute(
+        self,
+        ddl: BaseDDLElement,
+        target: SchemaItem,
+        bind: Optional[Connection],
+        compiler: Optional[DDLCompiler] = None,
+        **kw: Any,
+    ) -> bool:
+        if bind is not None:
+            dialect = bind.dialect
+        elif compiler is not None:
+            dialect = compiler.dialect
+        else:
+            assert False, "compiler or dialect is required"
+
+        if isinstance(self.dialect, str):
+            if self.dialect != dialect.name:
+                return False
+        elif isinstance(self.dialect, (tuple, list, set)):
+            if dialect.name not in self.dialect:
+                return False
+        if self.callable_ is not None and not self.callable_(
+            ddl,
+            target,
+            bind,
+            state=self.state,
+            dialect=dialect,
+            compiler=compiler,
+            **kw,
+        ):
+            return False
+
+        return True
+
+
+class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement):
+    """Base class for standalone executable DDL expression constructs.
+
+    This class is the base for the general purpose :class:`.DDL` class,
+    as well as the various create/drop clause constructs such as
+    :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
+    etc.
+
+    .. versionchanged:: 2.0  :class:`.ExecutableDDLElement` is renamed from
+       :class:`.DDLElement`, which still exists for backwards compatibility.
+
+    :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events,
+    introduced in :ref:`event_toplevel`.  An instance of one is
+    itself an event receiving callable::
+
+        event.listen(
+            users,
+            "after_create",
+            AddConstraint(constraint).execute_if(dialect="postgresql"),
+        )
+
+    .. seealso::
+
+        :class:`.DDL`
+
+        :class:`.DDLEvents`
+
+        :ref:`event_toplevel`
+
+        :ref:`schema_ddl_sequences`
+
+    """
+
+    _ddl_if: Optional[DDLIf] = None
+    target: Optional[SchemaItem] = None
+
+    def _execute_on_connection(
+        self, connection, distilled_params, execution_options
+    ):
+        return connection._execute_ddl(
+            self, distilled_params, execution_options
+        )
+
+    @_generative
+    def against(self, target: SchemaItem) -> Self:
+        """Return a copy of this :class:`_schema.ExecutableDDLElement` which
+        will include the given target.
+
+        This essentially applies the given item to the ``.target`` attribute of
+        the returned :class:`_schema.ExecutableDDLElement` object. This target
+        is then usable by event handlers and compilation routines in order to
+        provide services such as tokenization of a DDL string in terms of a
+        particular :class:`_schema.Table`.
+
+        When a :class:`_schema.ExecutableDDLElement` object is established as
+        an event handler for the :meth:`_events.DDLEvents.before_create` or
+        :meth:`_events.DDLEvents.after_create` events, and the event then
+        occurs for a given target such as a :class:`_schema.Constraint` or
+        :class:`_schema.Table`, that target is established with a copy of the
+        :class:`_schema.ExecutableDDLElement` object using this method, which
+        then proceeds to the :meth:`_schema.ExecutableDDLElement.execute`
+        method in order to invoke the actual DDL instruction.
+
+        :param target: a :class:`_schema.SchemaItem` that will be the subject
+         of a DDL operation.
+
+        :return: a copy of this :class:`_schema.ExecutableDDLElement` with the
+         ``.target`` attribute assigned to the given
+         :class:`_schema.SchemaItem`.
+
+        .. seealso::
+
+            :class:`_schema.DDL` - uses tokenization against the "target" when
+            processing the DDL string.
+
+        """
+        self.target = target
+        return self
+
+    @_generative
+    def execute_if(
+        self,
+        dialect: Optional[str] = None,
+        callable_: Optional[DDLIfCallable] = None,
+        state: Optional[Any] = None,
+    ) -> Self:
+        r"""Return a callable that will execute this
+        :class:`_ddl.ExecutableDDLElement` conditionally within an event
+        handler.
+
+        Used to provide a wrapper for event listening::
+
+            event.listen(
+                metadata,
+                "before_create",
+                DDL("my_ddl").execute_if(dialect="postgresql"),
+            )
+
+        :param dialect: May be a string or tuple of strings.
+          If a string, it will be compared to the name of the
+          executing database dialect::
+
+            DDL("something").execute_if(dialect="postgresql")
+
+          If a tuple, specifies multiple dialect names::
+
+            DDL("something").execute_if(dialect=("postgresql", "mysql"))
+
+        :param callable\_: A callable, which will be invoked with
+          three positional arguments as well as optional keyword
+          arguments:
+
+            :ddl:
+              This DDL element.
+
+            :target:
+              The :class:`_schema.Table` or :class:`_schema.MetaData`
+              object which is the
+              target of this event. May be None if the DDL is executed
+              explicitly.
+
+            :bind:
+              The :class:`_engine.Connection` being used for DDL execution.
+              May be None if this construct is being created inline within
+              a table, in which case ``compiler`` will be present.
+
+            :tables:
+              Optional keyword argument - a list of Table objects which are to
+              be created/ dropped within a MetaData.create_all() or drop_all()
+              method call.
+
+            :dialect: keyword argument, but always present - the
+              :class:`.Dialect` involved in the operation.
+
+            :compiler: keyword argument.  Will be ``None`` for an engine
+              level DDL invocation, but will refer to a :class:`.DDLCompiler`
+              if this DDL element is being created inline within a table.
+
+            :state:
+              Optional keyword argument - will be the ``state`` argument
+              passed to this function.
+
+            :checkfirst:
+             Keyword argument, will be True if the 'checkfirst' flag was
+             set during the call to ``create()``, ``create_all()``,
+             ``drop()``, ``drop_all()``.
+
+          If the callable returns a True value, the DDL statement will be
+          executed.
+
+        :param state: any value which will be passed to the callable\_
+          as the ``state`` keyword argument.
+
+        .. seealso::
+
+            :meth:`.SchemaItem.ddl_if`
+
+            :class:`.DDLEvents`
+
+            :ref:`event_toplevel`
+
+        """
+        self._ddl_if = DDLIf(dialect, callable_, state)
+        return self
+
+    def _should_execute(self, target, bind, **kw):
+        if self._ddl_if is None:
+            return True
+        else:
+            return self._ddl_if._should_execute(self, target, bind, **kw)
+
+    def _invoke_with(self, bind):
+        if self._should_execute(self.target, bind):
+            return bind.execute(self)
+
+    def __call__(self, target, bind, **kw):
+        """Execute the DDL as a ddl_listener."""
+
+        self.against(target)._invoke_with(bind)
+
+    def _generate(self):
+        s = self.__class__.__new__(self.__class__)
+        s.__dict__ = self.__dict__.copy()
+        return s
+
+
+DDLElement = ExecutableDDLElement
+""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`."""
+
+
+class DDL(ExecutableDDLElement):
+    """A literal DDL statement.
+
+    Specifies literal SQL DDL to be executed by the database.  DDL objects
+    function as DDL event listeners, and can be subscribed to those events
+    listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
+    :class:`_schema.MetaData` objects as targets.
+    Basic templating support allows
+    a single DDL instance to handle repetitive tasks for multiple tables.
+
+    Examples::
+
+      from sqlalchemy import event, DDL
+
+      tbl = Table("users", metadata, Column("uid", Integer))
+      event.listen(tbl, "before_create", DDL("DROP TRIGGER users_trigger"))
+
+      spow = DDL("ALTER TABLE %(table)s SET secretpowers TRUE")
+      event.listen(tbl, "after_create", spow.execute_if(dialect="somedb"))
+
+      drop_spow = DDL("ALTER TABLE users SET secretpowers FALSE")
+      connection.execute(drop_spow)
+
+    When operating on Table events, the following ``statement``
+    string substitutions are available:
+
+    .. sourcecode:: text
+
+      %(table)s  - the Table name, with any required quoting applied
+      %(schema)s - the schema name, with any required quoting applied
+      %(fullname)s - the Table name including schema, quoted if needed
+
+    The DDL's "context", if any, will be combined with the standard
+    substitutions noted above.  Keys present in the context will override
+    the standard substitutions.
+
+    """
+
+    __visit_name__ = "ddl"
+
+    def __init__(self, statement, context=None):
+        """Create a DDL statement.
+
+        :param statement:
+          A string or unicode string to be executed.  Statements will be
+          processed with Python's string formatting operator using
+          a fixed set of string substitutions, as well as additional
+          substitutions provided by the optional :paramref:`.DDL.context`
+          parameter.
+
+          A literal '%' in a statement must be escaped as '%%'.
+
+          SQL bind parameters are not available in DDL statements.
+
+        :param context:
+          Optional dictionary, defaults to None.  These values will be
+          available for use in string substitutions on the DDL statement.
+
+        .. seealso::
+
+            :class:`.DDLEvents`
+
+            :ref:`event_toplevel`
+
+        """
+
+        if not isinstance(statement, str):
+            raise exc.ArgumentError(
+                "Expected a string or unicode SQL statement, got '%r'"
+                % statement
+            )
+
+        self.statement = statement
+        self.context = context or {}
+
+    def __repr__(self):
+        parts = [repr(self.statement)]
+        if self.context:
+            parts.append(f"context={self.context}")
+
+        return "<%s@%s; %s>" % (
+            type(self).__name__,
+            id(self),
+            ", ".join(parts),
+        )
+
+
+class _CreateDropBase(ExecutableDDLElement):
+    """Base class for DDL constructs that represent CREATE and DROP or
+    equivalents.
+
+    The common theme of _CreateDropBase is a single
+    ``element`` attribute which refers to the element
+    to be created or dropped.
+
+    """
+
+    def __init__(
+        self,
+        element,
+    ):
+        self.element = self.target = element
+        self._ddl_if = getattr(element, "_ddl_if", None)
+
+    @property
+    def stringify_dialect(self):
+        return self.element.create_drop_stringify_dialect
+
+    def _create_rule_disable(self, compiler):
+        """Allow disable of _create_rule using a callable.
+
+        Pass to _create_rule using
+        util.portable_instancemethod(self._create_rule_disable)
+        to retain serializability.
+
+        """
+        return False
+
+
+class _CreateBase(_CreateDropBase):
+    def __init__(self, element, if_not_exists=False):
+        super().__init__(element)
+        self.if_not_exists = if_not_exists
+
+
+class _DropBase(_CreateDropBase):
+    def __init__(self, element, if_exists=False):
+        super().__init__(element)
+        self.if_exists = if_exists
+
+
+class CreateSchema(_CreateBase):
+    """Represent a CREATE SCHEMA statement.
+
+    The argument here is the string name of the schema.
+
+    """
+
+    __visit_name__ = "create_schema"
+
+    stringify_dialect = "default"
+
+    def __init__(
+        self,
+        name: str,
+        if_not_exists: bool = False,
+    ):
+        """Create a new :class:`.CreateSchema` construct."""
+
+        super().__init__(element=name, if_not_exists=if_not_exists)
+
+
+class DropSchema(_DropBase):
+    """Represent a DROP SCHEMA statement.
+
+    The argument here is the string name of the schema.
+
+    """
+
+    __visit_name__ = "drop_schema"
+
+    stringify_dialect = "default"
+
+    def __init__(
+        self,
+        name: str,
+        cascade: bool = False,
+        if_exists: bool = False,
+    ):
+        """Create a new :class:`.DropSchema` construct."""
+
+        super().__init__(element=name, if_exists=if_exists)
+        self.cascade = cascade
+
+
+class CreateTable(_CreateBase):
+    """Represent a CREATE TABLE statement."""
+
+    __visit_name__ = "create_table"
+
+    def __init__(
+        self,
+        element: Table,
+        include_foreign_key_constraints: Optional[
+            typing_Sequence[ForeignKeyConstraint]
+        ] = None,
+        if_not_exists: bool = False,
+    ):
+        """Create a :class:`.CreateTable` construct.
+
+        :param element: a :class:`_schema.Table` that's the subject
+         of the CREATE
+        :param on: See the description for 'on' in :class:`.DDL`.
+        :param include_foreign_key_constraints: optional sequence of
+         :class:`_schema.ForeignKeyConstraint` objects that will be included
+         inline within the CREATE construct; if omitted, all foreign key
+         constraints that do not specify use_alter=True are included.
+
+        :param if_not_exists: if True, an IF NOT EXISTS operator will be
+         applied to the construct.
+
+         .. versionadded:: 1.4.0b2
+
+        """
+        super().__init__(element, if_not_exists=if_not_exists)
+        self.columns = [CreateColumn(column) for column in element.columns]
+        self.include_foreign_key_constraints = include_foreign_key_constraints
+
+
+class _DropView(_DropBase):
+    """Semi-public 'DROP VIEW' construct.
+
+    Used by the test suite for dialect-agnostic drops of views.
+    This object will eventually be part of a public "view" API.
+
+    """
+
+    __visit_name__ = "drop_view"
+
+
+class CreateConstraint(BaseDDLElement):
+    def __init__(self, element: Constraint):
+        self.element = element
+
+
+class CreateColumn(BaseDDLElement):
+    """Represent a :class:`_schema.Column`
+    as rendered in a CREATE TABLE statement,
+    via the :class:`.CreateTable` construct.
+
+    This is provided to support custom column DDL within the generation
+    of CREATE TABLE statements, by using the
+    compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
+    to extend :class:`.CreateColumn`.
+
+    Typical integration is to examine the incoming :class:`_schema.Column`
+    object, and to redirect compilation if a particular flag or condition
+    is found::
+
+        from sqlalchemy import schema
+        from sqlalchemy.ext.compiler import compiles
+
+
+        @compiles(schema.CreateColumn)
+        def compile(element, compiler, **kw):
+            column = element.element
+
+            if "special" not in column.info:
+                return compiler.visit_create_column(element, **kw)
+
+            text = "%s SPECIAL DIRECTIVE %s" % (
+                column.name,
+                compiler.type_compiler.process(column.type),
+            )
+            default = compiler.get_column_default_string(column)
+            if default is not None:
+                text += " DEFAULT " + default
+
+            if not column.nullable:
+                text += " NOT NULL"
+
+            if column.constraints:
+                text += " ".join(
+                    compiler.process(const) for const in column.constraints
+                )
+            return text
+
+    The above construct can be applied to a :class:`_schema.Table`
+    as follows::
+
+        from sqlalchemy import Table, Metadata, Column, Integer, String
+        from sqlalchemy import schema
+
+        metadata = MetaData()
+
+        table = Table(
+            "mytable",
+            MetaData(),
+            Column("x", Integer, info={"special": True}, primary_key=True),
+            Column("y", String(50)),
+            Column("z", String(20), info={"special": True}),
+        )
+
+        metadata.create_all(conn)
+
+    Above, the directives we've added to the :attr:`_schema.Column.info`
+    collection
+    will be detected by our custom compilation scheme:
+
+    .. sourcecode:: sql
+
+        CREATE TABLE mytable (
+                x SPECIAL DIRECTIVE INTEGER NOT NULL,
+                y VARCHAR(50),
+                z SPECIAL DIRECTIVE VARCHAR(20),
+            PRIMARY KEY (x)
+        )
+
+    The :class:`.CreateColumn` construct can also be used to skip certain
+    columns when producing a ``CREATE TABLE``.  This is accomplished by
+    creating a compilation rule that conditionally returns ``None``.
+    This is essentially how to produce the same effect as using the
+    ``system=True`` argument on :class:`_schema.Column`, which marks a column
+    as an implicitly-present "system" column.
+
+    For example, suppose we wish to produce a :class:`_schema.Table`
+    which skips
+    rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
+    backend, but on other backends does render it, in anticipation of a
+    triggered rule.  A conditional compilation rule could skip this name only
+    on PostgreSQL::
+
+        from sqlalchemy.schema import CreateColumn
+
+
+        @compiles(CreateColumn, "postgresql")
+        def skip_xmin(element, compiler, **kw):
+            if element.element.name == "xmin":
+                return None
+            else:
+                return compiler.visit_create_column(element, **kw)
+
+
+        my_table = Table(
+            "mytable",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("xmin", Integer),
+        )
+
+    Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
+    which only includes the ``id`` column in the string; the ``xmin`` column
+    will be omitted, but only against the PostgreSQL backend.
+
+    """
+
+    __visit_name__ = "create_column"
+
+    def __init__(self, element):
+        self.element = element
+
+
+class DropTable(_DropBase):
+    """Represent a DROP TABLE statement."""
+
+    __visit_name__ = "drop_table"
+
+    def __init__(self, element: Table, if_exists: bool = False):
+        """Create a :class:`.DropTable` construct.
+
+        :param element: a :class:`_schema.Table` that's the subject
+         of the DROP.
+        :param on: See the description for 'on' in :class:`.DDL`.
+        :param if_exists: if True, an IF EXISTS operator will be applied to the
+         construct.
+
+         .. versionadded:: 1.4.0b2
+
+        """
+        super().__init__(element, if_exists=if_exists)
+
+
+class CreateSequence(_CreateBase):
+    """Represent a CREATE SEQUENCE statement."""
+
+    __visit_name__ = "create_sequence"
+
+    def __init__(self, element: Sequence, if_not_exists: bool = False):
+        super().__init__(element, if_not_exists=if_not_exists)
+
+
+class DropSequence(_DropBase):
+    """Represent a DROP SEQUENCE statement."""
+
+    __visit_name__ = "drop_sequence"
+
+    def __init__(self, element: Sequence, if_exists: bool = False):
+        super().__init__(element, if_exists=if_exists)
+
+
+class CreateIndex(_CreateBase):
+    """Represent a CREATE INDEX statement."""
+
+    __visit_name__ = "create_index"
+
+    def __init__(self, element, if_not_exists=False):
+        """Create a :class:`.Createindex` construct.
+
+        :param element: a :class:`_schema.Index` that's the subject
+         of the CREATE.
+        :param if_not_exists: if True, an IF NOT EXISTS operator will be
+         applied to the construct.
+
+         .. versionadded:: 1.4.0b2
+
+        """
+        super().__init__(element, if_not_exists=if_not_exists)
+
+
+class DropIndex(_DropBase):
+    """Represent a DROP INDEX statement."""
+
+    __visit_name__ = "drop_index"
+
+    def __init__(self, element, if_exists=False):
+        """Create a :class:`.DropIndex` construct.
+
+        :param element: a :class:`_schema.Index` that's the subject
+         of the DROP.
+        :param if_exists: if True, an IF EXISTS operator will be applied to the
+         construct.
+
+         .. versionadded:: 1.4.0b2
+
+        """
+        super().__init__(element, if_exists=if_exists)
+
+
+class AddConstraint(_CreateBase):
+    """Represent an ALTER TABLE ADD CONSTRAINT statement."""
+
+    __visit_name__ = "add_constraint"
+
+    def __init__(
+        self,
+        element: Constraint,
+        *,
+        isolate_from_table: bool = True,
+    ):
+        """Construct a new :class:`.AddConstraint` construct.
+
+        :param element: a :class:`.Constraint` object
+
+        :param isolate_from_table: optional boolean, defaults to True.  Has
+         the effect of the incoming constraint being isolated from being
+         included in a CREATE TABLE sequence when associated with a
+         :class:`.Table`.
+
+         .. versionadded:: 2.0.39 - added
+            :paramref:`.AddConstraint.isolate_from_table`, defaulting
+            to True.  Previously, the behavior of this parameter was implicitly
+            turned on in all cases.
+
+        """
+        super().__init__(element)
+
+        if isolate_from_table:
+            element._create_rule = util.portable_instancemethod(
+                self._create_rule_disable
+            )
+
+
+class DropConstraint(_DropBase):
+    """Represent an ALTER TABLE DROP CONSTRAINT statement."""
+
+    __visit_name__ = "drop_constraint"
+
+    def __init__(
+        self,
+        element: Constraint,
+        *,
+        cascade: bool = False,
+        if_exists: bool = False,
+        isolate_from_table: bool = True,
+        **kw: Any,
+    ):
+        """Construct a new :class:`.DropConstraint` construct.
+
+        :param element: a :class:`.Constraint` object
+        :param cascade: optional boolean, indicates backend-specific
+         "CASCADE CONSTRAINT" directive should be rendered if available
+        :param if_exists: optional boolean, indicates backend-specific
+         "IF EXISTS" directive should be rendered if available
+        :param isolate_from_table: optional boolean, defaults to True.  Has
+         the effect of the incoming constraint being isolated from being
+         included in a CREATE TABLE sequence when associated with a
+         :class:`.Table`.
+
+         .. versionadded:: 2.0.39 - added
+            :paramref:`.DropConstraint.isolate_from_table`, defaulting
+            to True.  Previously, the behavior of this parameter was implicitly
+            turned on in all cases.
+
+        """
+        self.cascade = cascade
+        super().__init__(element, if_exists=if_exists, **kw)
+
+        if isolate_from_table:
+            element._create_rule = util.portable_instancemethod(
+                self._create_rule_disable
+            )
+
+
+class SetTableComment(_CreateDropBase):
+    """Represent a COMMENT ON TABLE IS statement."""
+
+    __visit_name__ = "set_table_comment"
+
+
+class DropTableComment(_CreateDropBase):
+    """Represent a COMMENT ON TABLE '' statement.
+
+    Note this varies a lot across database backends.
+
+    """
+
+    __visit_name__ = "drop_table_comment"
+
+
+class SetColumnComment(_CreateDropBase):
+    """Represent a COMMENT ON COLUMN IS statement."""
+
+    __visit_name__ = "set_column_comment"
+
+
+class DropColumnComment(_CreateDropBase):
+    """Represent a COMMENT ON COLUMN IS NULL statement."""
+
+    __visit_name__ = "drop_column_comment"
+
+
+class SetConstraintComment(_CreateDropBase):
+    """Represent a COMMENT ON CONSTRAINT IS statement."""
+
+    __visit_name__ = "set_constraint_comment"
+
+
+class DropConstraintComment(_CreateDropBase):
+    """Represent a COMMENT ON CONSTRAINT IS NULL statement."""
+
+    __visit_name__ = "drop_constraint_comment"
+
+
+class InvokeDDLBase(SchemaVisitor):
+    def __init__(self, connection):
+        self.connection = connection
+
+    @contextlib.contextmanager
+    def with_ddl_events(self, target, **kw):
+        """helper context manager that will apply appropriate DDL events
+        to a CREATE or DROP operation."""
+
+        raise NotImplementedError()
+
+
+class InvokeCreateDDLBase(InvokeDDLBase):
+    @contextlib.contextmanager
+    def with_ddl_events(self, target, **kw):
+        """helper context manager that will apply appropriate DDL events
+        to a CREATE or DROP operation."""
+
+        target.dispatch.before_create(
+            target, self.connection, _ddl_runner=self, **kw
+        )
+        yield
+        target.dispatch.after_create(
+            target, self.connection, _ddl_runner=self, **kw
+        )
+
+
+class InvokeDropDDLBase(InvokeDDLBase):
+    @contextlib.contextmanager
+    def with_ddl_events(self, target, **kw):
+        """helper context manager that will apply appropriate DDL events
+        to a CREATE or DROP operation."""
+
+        target.dispatch.before_drop(
+            target, self.connection, _ddl_runner=self, **kw
+        )
+        yield
+        target.dispatch.after_drop(
+            target, self.connection, _ddl_runner=self, **kw
+        )
+
+
+class SchemaGenerator(InvokeCreateDDLBase):
+    def __init__(
+        self, dialect, connection, checkfirst=False, tables=None, **kwargs
+    ):
+        super().__init__(connection, **kwargs)
+        self.checkfirst = checkfirst
+        self.tables = tables
+        self.preparer = dialect.identifier_preparer
+        self.dialect = dialect
+        self.memo = {}
+
+    def _can_create_table(self, table):
+        self.dialect.validate_identifier(table.name)
+        effective_schema = self.connection.schema_for_object(table)
+        if effective_schema:
+            self.dialect.validate_identifier(effective_schema)
+        return not self.checkfirst or not self.dialect.has_table(
+            self.connection, table.name, schema=effective_schema
+        )
+
+    def _can_create_index(self, index):
+        effective_schema = self.connection.schema_for_object(index.table)
+        if effective_schema:
+            self.dialect.validate_identifier(effective_schema)
+        return not self.checkfirst or not self.dialect.has_index(
+            self.connection,
+            index.table.name,
+            index.name,
+            schema=effective_schema,
+        )
+
+    def _can_create_sequence(self, sequence):
+        effective_schema = self.connection.schema_for_object(sequence)
+
+        return self.dialect.supports_sequences and (
+            (not self.dialect.sequences_optional or not sequence.optional)
+            and (
+                not self.checkfirst
+                or not self.dialect.has_sequence(
+                    self.connection, sequence.name, schema=effective_schema
+                )
+            )
+        )
+
+    def visit_metadata(self, metadata):
+        if self.tables is not None:
+            tables = self.tables
+        else:
+            tables = list(metadata.tables.values())
+
+        collection = sort_tables_and_constraints(
+            [t for t in tables if self._can_create_table(t)]
+        )
+
+        seq_coll = [
+            s
+            for s in metadata._sequences.values()
+            if s.column is None and self._can_create_sequence(s)
+        ]
+
+        event_collection = [t for (t, fks) in collection if t is not None]
+
+        with self.with_ddl_events(
+            metadata,
+            tables=event_collection,
+            checkfirst=self.checkfirst,
+        ):
+            for seq in seq_coll:
+                self.traverse_single(seq, create_ok=True)
+
+            for table, fkcs in collection:
+                if table is not None:
+                    self.traverse_single(
+                        table,
+                        create_ok=True,
+                        include_foreign_key_constraints=fkcs,
+                        _is_metadata_operation=True,
+                    )
+                else:
+                    for fkc in fkcs:
+                        self.traverse_single(fkc)
+
+    def visit_table(
+        self,
+        table,
+        create_ok=False,
+        include_foreign_key_constraints=None,
+        _is_metadata_operation=False,
+    ):
+        if not create_ok and not self._can_create_table(table):
+            return
+
+        with self.with_ddl_events(
+            table,
+            checkfirst=self.checkfirst,
+            _is_metadata_operation=_is_metadata_operation,
+        ):
+            for column in table.columns:
+                if column.default is not None:
+                    self.traverse_single(column.default)
+
+            if not self.dialect.supports_alter:
+                # e.g., don't omit any foreign key constraints
+                include_foreign_key_constraints = None
+
+            CreateTable(
+                table,
+                include_foreign_key_constraints=(
+                    include_foreign_key_constraints
+                ),
+            )._invoke_with(self.connection)
+
+            if hasattr(table, "indexes"):
+                for index in table.indexes:
+                    self.traverse_single(index, create_ok=True)
+
+            if (
+                self.dialect.supports_comments
+                and not self.dialect.inline_comments
+            ):
+                if table.comment is not None:
+                    SetTableComment(table)._invoke_with(self.connection)
+
+                for column in table.columns:
+                    if column.comment is not None:
+                        SetColumnComment(column)._invoke_with(self.connection)
+
+                if self.dialect.supports_constraint_comments:
+                    for constraint in table.constraints:
+                        if constraint.comment is not None:
+                            self.connection.execute(
+                                SetConstraintComment(constraint)
+                            )
+
+    def visit_foreign_key_constraint(self, constraint):
+        if not self.dialect.supports_alter:
+            return
+
+        with self.with_ddl_events(constraint):
+            AddConstraint(constraint)._invoke_with(self.connection)
+
+    def visit_sequence(self, sequence, create_ok=False):
+        if not create_ok and not self._can_create_sequence(sequence):
+            return
+        with self.with_ddl_events(sequence):
+            CreateSequence(sequence)._invoke_with(self.connection)
+
+    def visit_index(self, index, create_ok=False):
+        if not create_ok and not self._can_create_index(index):
+            return
+        with self.with_ddl_events(index):
+            CreateIndex(index)._invoke_with(self.connection)
+
+
+class SchemaDropper(InvokeDropDDLBase):
+    def __init__(
+        self, dialect, connection, checkfirst=False, tables=None, **kwargs
+    ):
+        super().__init__(connection, **kwargs)
+        self.checkfirst = checkfirst
+        self.tables = tables
+        self.preparer = dialect.identifier_preparer
+        self.dialect = dialect
+        self.memo = {}
+
+    def visit_metadata(self, metadata):
+        if self.tables is not None:
+            tables = self.tables
+        else:
+            tables = list(metadata.tables.values())
+
+        try:
+            unsorted_tables = [t for t in tables if self._can_drop_table(t)]
+            collection = list(
+                reversed(
+                    sort_tables_and_constraints(
+                        unsorted_tables,
+                        filter_fn=lambda constraint: (
+                            False
+                            if not self.dialect.supports_alter
+                            or constraint.name is None
+                            else None
+                        ),
+                    )
+                )
+            )
+        except exc.CircularDependencyError as err2:
+            if not self.dialect.supports_alter:
+                util.warn(
+                    "Can't sort tables for DROP; an "
+                    "unresolvable foreign key "
+                    "dependency exists between tables: %s; and backend does "
+                    "not support ALTER.  To restore at least a partial sort, "
+                    "apply use_alter=True to ForeignKey and "
+                    "ForeignKeyConstraint "
+                    "objects involved in the cycle to mark these as known "
+                    "cycles that will be ignored."
+                    % (", ".join(sorted([t.fullname for t in err2.cycles])))
+                )
+                collection = [(t, ()) for t in unsorted_tables]
+            else:
+                raise exc.CircularDependencyError(
+                    err2.args[0],
+                    err2.cycles,
+                    err2.edges,
+                    msg="Can't sort tables for DROP; an "
+                    "unresolvable foreign key "
+                    "dependency exists between tables: %s.  Please ensure "
+                    "that the ForeignKey and ForeignKeyConstraint objects "
+                    "involved in the cycle have "
+                    "names so that they can be dropped using "
+                    "DROP CONSTRAINT."
+                    % (", ".join(sorted([t.fullname for t in err2.cycles]))),
+                ) from err2
+
+        seq_coll = [
+            s
+            for s in metadata._sequences.values()
+            if self._can_drop_sequence(s)
+        ]
+
+        event_collection = [t for (t, fks) in collection if t is not None]
+
+        with self.with_ddl_events(
+            metadata,
+            tables=event_collection,
+            checkfirst=self.checkfirst,
+        ):
+            for table, fkcs in collection:
+                if table is not None:
+                    self.traverse_single(
+                        table,
+                        drop_ok=True,
+                        _is_metadata_operation=True,
+                        _ignore_sequences=seq_coll,
+                    )
+                else:
+                    for fkc in fkcs:
+                        self.traverse_single(fkc)
+
+            for seq in seq_coll:
+                self.traverse_single(seq, drop_ok=seq.column is None)
+
+    def _can_drop_table(self, table):
+        self.dialect.validate_identifier(table.name)
+        effective_schema = self.connection.schema_for_object(table)
+        if effective_schema:
+            self.dialect.validate_identifier(effective_schema)
+        return not self.checkfirst or self.dialect.has_table(
+            self.connection, table.name, schema=effective_schema
+        )
+
+    def _can_drop_index(self, index):
+        effective_schema = self.connection.schema_for_object(index.table)
+        if effective_schema:
+            self.dialect.validate_identifier(effective_schema)
+        return not self.checkfirst or self.dialect.has_index(
+            self.connection,
+            index.table.name,
+            index.name,
+            schema=effective_schema,
+        )
+
+    def _can_drop_sequence(self, sequence):
+        effective_schema = self.connection.schema_for_object(sequence)
+        return self.dialect.supports_sequences and (
+            (not self.dialect.sequences_optional or not sequence.optional)
+            and (
+                not self.checkfirst
+                or self.dialect.has_sequence(
+                    self.connection, sequence.name, schema=effective_schema
+                )
+            )
+        )
+
+    def visit_index(self, index, drop_ok=False):
+        if not drop_ok and not self._can_drop_index(index):
+            return
+
+        with self.with_ddl_events(index):
+            DropIndex(index)(index, self.connection)
+
+    def visit_table(
+        self,
+        table,
+        drop_ok=False,
+        _is_metadata_operation=False,
+        _ignore_sequences=(),
+    ):
+        if not drop_ok and not self._can_drop_table(table):
+            return
+
+        with self.with_ddl_events(
+            table,
+            checkfirst=self.checkfirst,
+            _is_metadata_operation=_is_metadata_operation,
+        ):
+            DropTable(table)._invoke_with(self.connection)
+
+            # traverse client side defaults which may refer to server-side
+            # sequences. noting that some of these client side defaults may
+            # also be set up as server side defaults
+            # (see https://docs.sqlalchemy.org/en/
+            # latest/core/defaults.html
+            # #associating-a-sequence-as-the-server-side-
+            # default), so have to be dropped after the table is dropped.
+            for column in table.columns:
+                if (
+                    column.default is not None
+                    and column.default not in _ignore_sequences
+                ):
+                    self.traverse_single(column.default)
+
+    def visit_foreign_key_constraint(self, constraint):
+        if not self.dialect.supports_alter:
+            return
+        with self.with_ddl_events(constraint):
+            DropConstraint(constraint)._invoke_with(self.connection)
+
+    def visit_sequence(self, sequence, drop_ok=False):
+        if not drop_ok and not self._can_drop_sequence(sequence):
+            return
+        with self.with_ddl_events(sequence):
+            DropSequence(sequence)._invoke_with(self.connection)
+
+
+def sort_tables(
+    tables: Iterable[TableClause],
+    skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None,
+    extra_dependencies: Optional[
+        typing_Sequence[Tuple[TableClause, TableClause]]
+    ] = None,
+) -> List[Table]:
+    """Sort a collection of :class:`_schema.Table` objects based on
+    dependency.
+
+    This is a dependency-ordered sort which will emit :class:`_schema.Table`
+    objects such that they will follow their dependent :class:`_schema.Table`
+    objects.
+    Tables are dependent on another based on the presence of
+    :class:`_schema.ForeignKeyConstraint`
+    objects as well as explicit dependencies
+    added by :meth:`_schema.Table.add_is_dependent_on`.
+
+    .. warning::
+
+        The :func:`._schema.sort_tables` function cannot by itself
+        accommodate automatic resolution of dependency cycles between
+        tables, which are usually caused by mutually dependent foreign key
+        constraints. When these cycles are detected, the foreign keys
+        of these tables are omitted from consideration in the sort.
+        A warning is emitted when this condition occurs, which will be an
+        exception raise in a future release.   Tables which are not part
+        of the cycle will still be returned in dependency order.
+
+        To resolve these cycles, the
+        :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
+        applied to those constraints which create a cycle.  Alternatively,
+        the :func:`_schema.sort_tables_and_constraints` function will
+        automatically return foreign key constraints in a separate
+        collection when cycles are detected so that they may be applied
+        to a schema separately.
+
+        .. versionchanged:: 1.3.17 - a warning is emitted when
+           :func:`_schema.sort_tables` cannot perform a proper sort due to
+           cyclical dependencies.  This will be an exception in a future
+           release.  Additionally, the sort will continue to return
+           other tables not involved in the cycle in dependency order
+           which was not the case previously.
+
+    :param tables: a sequence of :class:`_schema.Table` objects.
+
+    :param skip_fn: optional callable which will be passed a
+     :class:`_schema.ForeignKeyConstraint` object; if it returns True, this
+     constraint will not be considered as a dependency.  Note this is
+     **different** from the same parameter in
+     :func:`.sort_tables_and_constraints`, which is
+     instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
+
+    :param extra_dependencies: a sequence of 2-tuples of tables which will
+     also be considered as dependent on each other.
+
+    .. seealso::
+
+        :func:`.sort_tables_and_constraints`
+
+        :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
+
+
+    """
+
+    if skip_fn is not None:
+        fixed_skip_fn = skip_fn
+
+        def _skip_fn(fkc):
+            for fk in fkc.elements:
+                if fixed_skip_fn(fk):
+                    return True
+            else:
+                return None
+
+    else:
+        _skip_fn = None  # type: ignore
+
+    return [
+        t
+        for (t, fkcs) in sort_tables_and_constraints(
+            tables,
+            filter_fn=_skip_fn,
+            extra_dependencies=extra_dependencies,
+            _warn_for_cycles=True,
+        )
+        if t is not None
+    ]
+
+
+def sort_tables_and_constraints(
+    tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
+):
+    """Sort a collection of :class:`_schema.Table`  /
+    :class:`_schema.ForeignKeyConstraint`
+    objects.
+
+    This is a dependency-ordered sort which will emit tuples of
+    ``(Table, [ForeignKeyConstraint, ...])`` such that each
+    :class:`_schema.Table` follows its dependent :class:`_schema.Table`
+    objects.
+    Remaining :class:`_schema.ForeignKeyConstraint`
+    objects that are separate due to
+    dependency rules not satisfied by the sort are emitted afterwards
+    as ``(None, [ForeignKeyConstraint ...])``.
+
+    Tables are dependent on another based on the presence of
+    :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
+    added by :meth:`_schema.Table.add_is_dependent_on`,
+    as well as dependencies
+    stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
+    and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
+    parameters.
+
+    :param tables: a sequence of :class:`_schema.Table` objects.
+
+    :param filter_fn: optional callable which will be passed a
+     :class:`_schema.ForeignKeyConstraint` object,
+     and returns a value based on
+     whether this constraint should definitely be included or excluded as
+     an inline constraint, or neither.   If it returns False, the constraint
+     will definitely be included as a dependency that cannot be subject
+     to ALTER; if True, it will **only** be included as an ALTER result at
+     the end.   Returning None means the constraint is included in the
+     table-based result unless it is detected as part of a dependency cycle.
+
+    :param extra_dependencies: a sequence of 2-tuples of tables which will
+     also be considered as dependent on each other.
+
+    .. seealso::
+
+        :func:`.sort_tables`
+
+
+    """
+
+    fixed_dependencies = set()
+    mutable_dependencies = set()
+
+    if extra_dependencies is not None:
+        fixed_dependencies.update(extra_dependencies)
+
+    remaining_fkcs = set()
+    for table in tables:
+        for fkc in table.foreign_key_constraints:
+            if fkc.use_alter is True:
+                remaining_fkcs.add(fkc)
+                continue
+
+            if filter_fn:
+                filtered = filter_fn(fkc)
+
+                if filtered is True:
+                    remaining_fkcs.add(fkc)
+                    continue
+
+            dependent_on = fkc.referred_table
+            if dependent_on is not table:
+                mutable_dependencies.add((dependent_on, table))
+
+        fixed_dependencies.update(
+            (parent, table) for parent in table._extra_dependencies
+        )
+
+    try:
+        candidate_sort = list(
+            topological.sort(
+                fixed_dependencies.union(mutable_dependencies),
+                tables,
+            )
+        )
+    except exc.CircularDependencyError as err:
+        if _warn_for_cycles:
+            util.warn(
+                "Cannot correctly sort tables; there are unresolvable cycles "
+                'between tables "%s", which is usually caused by mutually '
+                "dependent foreign key constraints.  Foreign key constraints "
+                "involving these tables will not be considered; this warning "
+                "may raise an error in a future release."
+                % (", ".join(sorted(t.fullname for t in err.cycles)),)
+            )
+        for edge in err.edges:
+            if edge in mutable_dependencies:
+                table = edge[1]
+                if table not in err.cycles:
+                    continue
+                can_remove = [
+                    fkc
+                    for fkc in table.foreign_key_constraints
+                    if filter_fn is None or filter_fn(fkc) is not False
+                ]
+                remaining_fkcs.update(can_remove)
+                for fkc in can_remove:
+                    dependent_on = fkc.referred_table
+                    if dependent_on is not table:
+                        mutable_dependencies.discard((dependent_on, table))
+        candidate_sort = list(
+            topological.sort(
+                fixed_dependencies.union(mutable_dependencies),
+                tables,
+            )
+        )
+
+    return [
+        (table, table.foreign_key_constraints.difference(remaining_fkcs))
+        for table in candidate_sort
+    ] + [(None, list(remaining_fkcs))]
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py
new file mode 100644
index 00000000..7fa5dafe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/default_comparator.py
@@ -0,0 +1,552 @@
+# sql/default_comparator.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Default implementation of SQL comparison operations.
+"""
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import NoReturn
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import Union
+
+from . import coercions
+from . import operators
+from . import roles
+from . import type_api
+from .elements import and_
+from .elements import BinaryExpression
+from .elements import ClauseElement
+from .elements import CollationClause
+from .elements import CollectionAggregate
+from .elements import ExpressionClauseList
+from .elements import False_
+from .elements import Null
+from .elements import OperatorExpression
+from .elements import or_
+from .elements import True_
+from .elements import UnaryExpression
+from .operators import OperatorType
+from .. import exc
+from .. import util
+
+_T = typing.TypeVar("_T", bound=Any)
+
+if typing.TYPE_CHECKING:
+    from .elements import ColumnElement
+    from .operators import custom_op
+    from .type_api import TypeEngine
+
+
+def _boolean_compare(
+    expr: ColumnElement[Any],
+    op: OperatorType,
+    obj: Any,
+    *,
+    negate_op: Optional[OperatorType] = None,
+    reverse: bool = False,
+    _python_is_types: Tuple[Type[Any], ...] = (type(None), bool),
+    result_type: Optional[TypeEngine[bool]] = None,
+    **kwargs: Any,
+) -> OperatorExpression[bool]:
+    if result_type is None:
+        result_type = type_api.BOOLEANTYPE
+
+    if isinstance(obj, _python_is_types + (Null, True_, False_)):
+        # allow x ==/!= True/False to be treated as a literal.
+        # this comes out to "== / != true/false" or "1/0" if those
+        # constants aren't supported and works on all platforms
+        if op in (operators.eq, operators.ne) and isinstance(
+            obj, (bool, True_, False_)
+        ):
+            return OperatorExpression._construct_for_op(
+                expr,
+                coercions.expect(roles.ConstExprRole, obj),
+                op,
+                type_=result_type,
+                negate=negate_op,
+                modifiers=kwargs,
+            )
+        elif op in (
+            operators.is_distinct_from,
+            operators.is_not_distinct_from,
+        ):
+            return OperatorExpression._construct_for_op(
+                expr,
+                coercions.expect(roles.ConstExprRole, obj),
+                op,
+                type_=result_type,
+                negate=negate_op,
+                modifiers=kwargs,
+            )
+        elif expr._is_collection_aggregate:
+            obj = coercions.expect(
+                roles.ConstExprRole, element=obj, operator=op, expr=expr
+            )
+        else:
+            # all other None uses IS, IS NOT
+            if op in (operators.eq, operators.is_):
+                return OperatorExpression._construct_for_op(
+                    expr,
+                    coercions.expect(roles.ConstExprRole, obj),
+                    operators.is_,
+                    negate=operators.is_not,
+                    type_=result_type,
+                )
+            elif op in (operators.ne, operators.is_not):
+                return OperatorExpression._construct_for_op(
+                    expr,
+                    coercions.expect(roles.ConstExprRole, obj),
+                    operators.is_not,
+                    negate=operators.is_,
+                    type_=result_type,
+                )
+            else:
+                raise exc.ArgumentError(
+                    "Only '=', '!=', 'is_()', 'is_not()', "
+                    "'is_distinct_from()', 'is_not_distinct_from()' "
+                    "operators can be used with None/True/False"
+                )
+    else:
+        obj = coercions.expect(
+            roles.BinaryElementRole, element=obj, operator=op, expr=expr
+        )
+
+    if reverse:
+        return OperatorExpression._construct_for_op(
+            obj,
+            expr,
+            op,
+            type_=result_type,
+            negate=negate_op,
+            modifiers=kwargs,
+        )
+    else:
+        return OperatorExpression._construct_for_op(
+            expr,
+            obj,
+            op,
+            type_=result_type,
+            negate=negate_op,
+            modifiers=kwargs,
+        )
+
+
+def _custom_op_operate(
+    expr: ColumnElement[Any],
+    op: custom_op[Any],
+    obj: Any,
+    reverse: bool = False,
+    result_type: Optional[TypeEngine[Any]] = None,
+    **kw: Any,
+) -> ColumnElement[Any]:
+    if result_type is None:
+        if op.return_type:
+            result_type = op.return_type
+        elif op.is_comparison:
+            result_type = type_api.BOOLEANTYPE
+
+    return _binary_operate(
+        expr, op, obj, reverse=reverse, result_type=result_type, **kw
+    )
+
+
+def _binary_operate(
+    expr: ColumnElement[Any],
+    op: OperatorType,
+    obj: roles.BinaryElementRole[Any],
+    *,
+    reverse: bool = False,
+    result_type: Optional[TypeEngine[_T]] = None,
+    **kw: Any,
+) -> OperatorExpression[_T]:
+    coerced_obj = coercions.expect(
+        roles.BinaryElementRole, obj, expr=expr, operator=op
+    )
+
+    if reverse:
+        left, right = coerced_obj, expr
+    else:
+        left, right = expr, coerced_obj
+
+    if result_type is None:
+        op, result_type = left.comparator._adapt_expression(
+            op, right.comparator
+        )
+
+    return OperatorExpression._construct_for_op(
+        left, right, op, type_=result_type, modifiers=kw
+    )
+
+
+def _conjunction_operate(
+    expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any
+) -> ColumnElement[Any]:
+    if op is operators.and_:
+        return and_(expr, other)
+    elif op is operators.or_:
+        return or_(expr, other)
+    else:
+        raise NotImplementedError()
+
+
+def _scalar(
+    expr: ColumnElement[Any],
+    op: OperatorType,
+    fn: Callable[[ColumnElement[Any]], ColumnElement[Any]],
+    **kw: Any,
+) -> ColumnElement[Any]:
+    return fn(expr)
+
+
+def _in_impl(
+    expr: ColumnElement[Any],
+    op: OperatorType,
+    seq_or_selectable: ClauseElement,
+    negate_op: OperatorType,
+    **kw: Any,
+) -> ColumnElement[Any]:
+    seq_or_selectable = coercions.expect(
+        roles.InElementRole, seq_or_selectable, expr=expr, operator=op
+    )
+    if "in_ops" in seq_or_selectable._annotations:
+        op, negate_op = seq_or_selectable._annotations["in_ops"]
+
+    return _boolean_compare(
+        expr, op, seq_or_selectable, negate_op=negate_op, **kw
+    )
+
+
+def _getitem_impl(
+    expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any
+) -> ColumnElement[Any]:
+    if (
+        isinstance(expr.type, type_api.INDEXABLE)
+        or isinstance(expr.type, type_api.TypeDecorator)
+        and isinstance(expr.type.impl_instance, type_api.INDEXABLE)
+    ):
+        other = coercions.expect(
+            roles.BinaryElementRole, other, expr=expr, operator=op
+        )
+        return _binary_operate(expr, op, other, **kw)
+    else:
+        _unsupported_impl(expr, op, other, **kw)
+
+
+def _unsupported_impl(
+    expr: ColumnElement[Any], op: OperatorType, *arg: Any, **kw: Any
+) -> NoReturn:
+    raise NotImplementedError(
+        "Operator '%s' is not supported on this expression" % op.__name__
+    )
+
+
+def _inv_impl(
+    expr: ColumnElement[Any], op: OperatorType, **kw: Any
+) -> ColumnElement[Any]:
+    """See :meth:`.ColumnOperators.__inv__`."""
+
+    # undocumented element currently used by the ORM for
+    # relationship.contains()
+    if hasattr(expr, "negation_clause"):
+        return expr.negation_clause
+    else:
+        return expr._negate()
+
+
+def _neg_impl(
+    expr: ColumnElement[Any], op: OperatorType, **kw: Any
+) -> ColumnElement[Any]:
+    """See :meth:`.ColumnOperators.__neg__`."""
+    return UnaryExpression(expr, operator=operators.neg, type_=expr.type)
+
+
+def _bitwise_not_impl(
+    expr: ColumnElement[Any], op: OperatorType, **kw: Any
+) -> ColumnElement[Any]:
+    """See :meth:`.ColumnOperators.bitwise_not`."""
+
+    return UnaryExpression(
+        expr, operator=operators.bitwise_not_op, type_=expr.type
+    )
+
+
+def _match_impl(
+    expr: ColumnElement[Any], op: OperatorType, other: Any, **kw: Any
+) -> ColumnElement[Any]:
+    """See :meth:`.ColumnOperators.match`."""
+
+    return _boolean_compare(
+        expr,
+        operators.match_op,
+        coercions.expect(
+            roles.BinaryElementRole,
+            other,
+            expr=expr,
+            operator=operators.match_op,
+        ),
+        result_type=type_api.MATCHTYPE,
+        negate_op=(
+            operators.not_match_op
+            if op is operators.match_op
+            else operators.match_op
+        ),
+        **kw,
+    )
+
+
+def _distinct_impl(
+    expr: ColumnElement[Any], op: OperatorType, **kw: Any
+) -> ColumnElement[Any]:
+    """See :meth:`.ColumnOperators.distinct`."""
+    return UnaryExpression(
+        expr, operator=operators.distinct_op, type_=expr.type
+    )
+
+
+def _between_impl(
+    expr: ColumnElement[Any],
+    op: OperatorType,
+    cleft: Any,
+    cright: Any,
+    **kw: Any,
+) -> ColumnElement[Any]:
+    """See :meth:`.ColumnOperators.between`."""
+    return BinaryExpression(
+        expr,
+        ExpressionClauseList._construct_for_list(
+            operators.and_,
+            type_api.NULLTYPE,
+            coercions.expect(
+                roles.BinaryElementRole,
+                cleft,
+                expr=expr,
+                operator=operators.and_,
+            ),
+            coercions.expect(
+                roles.BinaryElementRole,
+                cright,
+                expr=expr,
+                operator=operators.and_,
+            ),
+            group=False,
+        ),
+        op,
+        negate=(
+            operators.not_between_op
+            if op is operators.between_op
+            else operators.between_op
+        ),
+        modifiers=kw,
+    )
+
+
+def _collate_impl(
+    expr: ColumnElement[str], op: OperatorType, collation: str, **kw: Any
+) -> ColumnElement[str]:
+    return CollationClause._create_collation_expression(expr, collation)
+
+
+def _regexp_match_impl(
+    expr: ColumnElement[str],
+    op: OperatorType,
+    pattern: Any,
+    flags: Optional[str],
+    **kw: Any,
+) -> ColumnElement[Any]:
+    return BinaryExpression(
+        expr,
+        coercions.expect(
+            roles.BinaryElementRole,
+            pattern,
+            expr=expr,
+            operator=operators.comma_op,
+        ),
+        op,
+        negate=operators.not_regexp_match_op,
+        modifiers={"flags": flags},
+    )
+
+
+def _regexp_replace_impl(
+    expr: ColumnElement[Any],
+    op: OperatorType,
+    pattern: Any,
+    replacement: Any,
+    flags: Optional[str],
+    **kw: Any,
+) -> ColumnElement[Any]:
+    return BinaryExpression(
+        expr,
+        ExpressionClauseList._construct_for_list(
+            operators.comma_op,
+            type_api.NULLTYPE,
+            coercions.expect(
+                roles.BinaryElementRole,
+                pattern,
+                expr=expr,
+                operator=operators.comma_op,
+            ),
+            coercions.expect(
+                roles.BinaryElementRole,
+                replacement,
+                expr=expr,
+                operator=operators.comma_op,
+            ),
+            group=False,
+        ),
+        op,
+        modifiers={"flags": flags},
+    )
+
+
+# a mapping of operators with the method they use, along with
+# additional keyword arguments to be passed
+operator_lookup: Dict[
+    str,
+    Tuple[
+        Callable[..., ColumnElement[Any]],
+        util.immutabledict[
+            str, Union[OperatorType, Callable[..., ColumnElement[Any]]]
+        ],
+    ],
+] = {
+    "and_": (_conjunction_operate, util.EMPTY_DICT),
+    "or_": (_conjunction_operate, util.EMPTY_DICT),
+    "inv": (_inv_impl, util.EMPTY_DICT),
+    "add": (_binary_operate, util.EMPTY_DICT),
+    "mul": (_binary_operate, util.EMPTY_DICT),
+    "sub": (_binary_operate, util.EMPTY_DICT),
+    "div": (_binary_operate, util.EMPTY_DICT),
+    "mod": (_binary_operate, util.EMPTY_DICT),
+    "bitwise_xor_op": (_binary_operate, util.EMPTY_DICT),
+    "bitwise_or_op": (_binary_operate, util.EMPTY_DICT),
+    "bitwise_and_op": (_binary_operate, util.EMPTY_DICT),
+    "bitwise_not_op": (_bitwise_not_impl, util.EMPTY_DICT),
+    "bitwise_lshift_op": (_binary_operate, util.EMPTY_DICT),
+    "bitwise_rshift_op": (_binary_operate, util.EMPTY_DICT),
+    "truediv": (_binary_operate, util.EMPTY_DICT),
+    "floordiv": (_binary_operate, util.EMPTY_DICT),
+    "custom_op": (_custom_op_operate, util.EMPTY_DICT),
+    "json_path_getitem_op": (_binary_operate, util.EMPTY_DICT),
+    "json_getitem_op": (_binary_operate, util.EMPTY_DICT),
+    "concat_op": (_binary_operate, util.EMPTY_DICT),
+    "any_op": (
+        _scalar,
+        util.immutabledict({"fn": CollectionAggregate._create_any}),
+    ),
+    "all_op": (
+        _scalar,
+        util.immutabledict({"fn": CollectionAggregate._create_all}),
+    ),
+    "lt": (_boolean_compare, util.immutabledict({"negate_op": operators.ge})),
+    "le": (_boolean_compare, util.immutabledict({"negate_op": operators.gt})),
+    "ne": (_boolean_compare, util.immutabledict({"negate_op": operators.eq})),
+    "gt": (_boolean_compare, util.immutabledict({"negate_op": operators.le})),
+    "ge": (_boolean_compare, util.immutabledict({"negate_op": operators.lt})),
+    "eq": (_boolean_compare, util.immutabledict({"negate_op": operators.ne})),
+    "is_distinct_from": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.is_not_distinct_from}),
+    ),
+    "is_not_distinct_from": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.is_distinct_from}),
+    ),
+    "like_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_like_op}),
+    ),
+    "ilike_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_ilike_op}),
+    ),
+    "not_like_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.like_op}),
+    ),
+    "not_ilike_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.ilike_op}),
+    ),
+    "contains_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_contains_op}),
+    ),
+    "icontains_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_icontains_op}),
+    ),
+    "startswith_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_startswith_op}),
+    ),
+    "istartswith_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_istartswith_op}),
+    ),
+    "endswith_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_endswith_op}),
+    ),
+    "iendswith_op": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.not_iendswith_op}),
+    ),
+    "desc_op": (
+        _scalar,
+        util.immutabledict({"fn": UnaryExpression._create_desc}),
+    ),
+    "asc_op": (
+        _scalar,
+        util.immutabledict({"fn": UnaryExpression._create_asc}),
+    ),
+    "nulls_first_op": (
+        _scalar,
+        util.immutabledict({"fn": UnaryExpression._create_nulls_first}),
+    ),
+    "nulls_last_op": (
+        _scalar,
+        util.immutabledict({"fn": UnaryExpression._create_nulls_last}),
+    ),
+    "in_op": (
+        _in_impl,
+        util.immutabledict({"negate_op": operators.not_in_op}),
+    ),
+    "not_in_op": (
+        _in_impl,
+        util.immutabledict({"negate_op": operators.in_op}),
+    ),
+    "is_": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.is_}),
+    ),
+    "is_not": (
+        _boolean_compare,
+        util.immutabledict({"negate_op": operators.is_not}),
+    ),
+    "collate": (_collate_impl, util.EMPTY_DICT),
+    "match_op": (_match_impl, util.EMPTY_DICT),
+    "not_match_op": (_match_impl, util.EMPTY_DICT),
+    "distinct_op": (_distinct_impl, util.EMPTY_DICT),
+    "between_op": (_between_impl, util.EMPTY_DICT),
+    "not_between_op": (_between_impl, util.EMPTY_DICT),
+    "neg": (_neg_impl, util.EMPTY_DICT),
+    "getitem": (_getitem_impl, util.EMPTY_DICT),
+    "lshift": (_unsupported_impl, util.EMPTY_DICT),
+    "rshift": (_unsupported_impl, util.EMPTY_DICT),
+    "contains": (_unsupported_impl, util.EMPTY_DICT),
+    "regexp_match_op": (_regexp_match_impl, util.EMPTY_DICT),
+    "not_regexp_match_op": (_regexp_match_impl, util.EMPTY_DICT),
+    "regexp_replace_op": (_regexp_replace_impl, util.EMPTY_DICT),
+}
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py
new file mode 100644
index 00000000..f5071146
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/dml.py
@@ -0,0 +1,1837 @@
+# sql/dml.py
+# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+"""
+Provide :class:`_expression.Insert`, :class:`_expression.Update` and
+:class:`_expression.Delete`.
+
+"""
+from __future__ import annotations
+
+import collections.abc as collections_abc
+import operator
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import MutableMapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import coercions
+from . import roles
+from . import util as sql_util
+from ._typing import _TP
+from ._typing import _unexpected_kw
+from ._typing import is_column_element
+from ._typing import is_named_from_clause
+from .base import _entity_namespace_key
+from .base import _exclusive_against
+from .base import _from_objects
+from .base import _generative
+from .base import _select_iterables
+from .base import ColumnCollection
+from .base import ColumnSet
+from .base import CompileState
+from .base import DialectKWArgs
+from .base import Executable
+from .base import Generative
+from .base import HasCompileState
+from .elements import BooleanClauseList
+from .elements import ClauseElement
+from .elements import ColumnClause
+from .elements import ColumnElement
+from .elements import Null
+from .selectable import Alias
+from .selectable import ExecutableReturnsRows
+from .selectable import FromClause
+from .selectable import HasCTE
+from .selectable import HasPrefixes
+from .selectable import Join
+from .selectable import SelectLabelStyle
+from .selectable import TableClause
+from .selectable import TypedReturnsRows
+from .sqltypes import NullType
+from .visitors import InternalTraversal
+from .. import exc
+from .. import util
+from ..util.typing import Self
+from ..util.typing import TypeGuard
+
+if TYPE_CHECKING:
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _ColumnsClauseArgument
+    from ._typing import _DMLColumnArgument
+    from ._typing import _DMLColumnKeyMapping
+    from ._typing import _DMLTableArgument
+    from ._typing import _T0  # noqa
+    from ._typing import _T1  # noqa
+    from ._typing import _T2  # noqa
+    from ._typing import _T3  # noqa
+    from ._typing import _T4  # noqa
+    from ._typing import _T5  # noqa
+    from ._typing import _T6  # noqa
+    from ._typing import _T7  # noqa
+    from ._typing import _TypedColumnClauseArgument as _TCCA  # noqa
+    from .base import ReadOnlyColumnCollection
+    from .compiler import SQLCompiler
+    from .elements import KeyedColumnElement
+    from .selectable import _ColumnsClauseElement
+    from .selectable import _SelectIterable
+    from .selectable import Select
+    from .selectable import Selectable
+
+    def isupdate(dml: DMLState) -> TypeGuard[UpdateDMLState]: ...
+
+    def isdelete(dml: DMLState) -> TypeGuard[DeleteDMLState]: ...
+
+    def isinsert(dml: DMLState) -> TypeGuard[InsertDMLState]: ...
+
+else:
+    isupdate = operator.attrgetter("isupdate")
+    isdelete = operator.attrgetter("isdelete")
+    isinsert = operator.attrgetter("isinsert")
+
+
+_T = TypeVar("_T", bound=Any)
+
+_DMLColumnElement = Union[str, ColumnClause[Any]]
+_DMLTableElement = Union[TableClause, Alias, Join]
+
+
+class DMLState(CompileState):
+    _no_parameters = True
+    _dict_parameters: Optional[MutableMapping[_DMLColumnElement, Any]] = None
+    _multi_parameters: Optional[
+        List[MutableMapping[_DMLColumnElement, Any]]
+    ] = None
+    _ordered_values: Optional[List[Tuple[_DMLColumnElement, Any]]] = None
+    _parameter_ordering: Optional[List[_DMLColumnElement]] = None
+    _primary_table: FromClause
+    _supports_implicit_returning = True
+
+    isupdate = False
+    isdelete = False
+    isinsert = False
+
+    statement: UpdateBase
+
+    def __init__(
+        self, statement: UpdateBase, compiler: SQLCompiler, **kw: Any
+    ):
+        raise NotImplementedError()
+
+    @classmethod
+    def get_entity_description(cls, statement: UpdateBase) -> Dict[str, Any]:
+        return {
+            "name": (
+                statement.table.name
+                if is_named_from_clause(statement.table)
+                else None
+            ),
+            "table": statement.table,
+        }
+
+    @classmethod
+    def get_returning_column_descriptions(
+        cls, statement: UpdateBase
+    ) -> List[Dict[str, Any]]:
+        return [
+            {
+                "name": c.key,
+                "type": c.type,
+                "expr": c,
+            }
+            for c in statement._all_selected_columns
+        ]
+
+    @property
+    def dml_table(self) -> _DMLTableElement:
+        return self.statement.table
+
+    if TYPE_CHECKING:
+
+        @classmethod
+        def get_plugin_class(cls, statement: Executable) -> Type[DMLState]: ...
+
+    @classmethod
+    def _get_multi_crud_kv_pairs(
+        cls,
+        statement: UpdateBase,
+        multi_kv_iterator: Iterable[Dict[_DMLColumnArgument, Any]],
+    ) -> List[Dict[_DMLColumnElement, Any]]:
+        return [
+            {
+                coercions.expect(roles.DMLColumnRole, k): v
+                for k, v in mapping.items()
+            }
+            for mapping in multi_kv_iterator
+        ]
+
+    @classmethod
+    def _get_crud_kv_pairs(
+        cls,
+        statement: UpdateBase,
+        kv_iterator: Iterable[Tuple[_DMLColumnArgument, Any]],
+        needs_to_be_cacheable: bool,
+    ) -> List[Tuple[_DMLColumnElement, Any]]:
+        return [
+            (
+                coercions.expect(roles.DMLColumnRole, k),
+                (
+                    v
+                    if not needs_to_be_cacheable
+                    else coercions.expect(
+                        roles.ExpressionElementRole,
+                        v,
+                        type_=NullType(),
+                        is_crud=True,
+                    )
+                ),
+            )
+            for k, v in kv_iterator
+        ]
+
+    def _make_extra_froms(
+        self, statement: DMLWhereBase
+    ) -> Tuple[FromClause, List[FromClause]]:
+        froms: List[FromClause] = []
+
+        all_tables = list(sql_util.tables_from_leftmost(statement.table))
+        primary_table = all_tables[0]
+        seen = {primary_table}
+
+        consider = statement._where_criteria
+        if self._dict_parameters:
+            consider += tuple(self._dict_parameters.values())
+
+        for crit in consider:
+            for item in _from_objects(crit):
+                if not seen.intersection(item._cloned_set):
+                    froms.append(item)
+                seen.update(item._cloned_set)
+
+        froms.extend(all_tables[1:])
+        return primary_table, froms
+
+    def _process_values(self, statement: ValuesBase) -> None:
+        if self._no_parameters:
+            self._dict_parameters = statement._values
+            self._no_parameters = False
+
+    def _process_select_values(self, statement: ValuesBase) -> None:
+        assert statement._select_names is not None
+        parameters: MutableMapping[_DMLColumnElement, Any] = {
+            name: Null() for name in statement._select_names
+        }
+
+        if self._no_parameters:
+            self._no_parameters = False
+            self._dict_parameters = parameters
+        else:
+            # this condition normally not reachable as the Insert
+            # does not allow this construction to occur
+            assert False, "This statement already has parameters"
+
+    def _no_multi_values_supported(self, statement: ValuesBase) -> NoReturn:
+        raise exc.InvalidRequestError(
+            "%s construct does not support "
+            "multiple parameter sets." % statement.__visit_name__.upper()
+        )
+
+    def _cant_mix_formats_error(self) -> NoReturn:
+        raise exc.InvalidRequestError(
+            "Can't mix single and multiple VALUES "
+            "formats in one INSERT statement; one style appends to a "
+            "list while the other replaces values, so the intent is "
+            "ambiguous."
+        )
+
+
+@CompileState.plugin_for("default", "insert")
+class InsertDMLState(DMLState):
+    isinsert = True
+
+    include_table_with_column_exprs = False
+
+    _has_multi_parameters = False
+
+    def __init__(
+        self,
+        statement: Insert,
+        compiler: SQLCompiler,
+        disable_implicit_returning: bool = False,
+        **kw: Any,
+    ):
+        self.statement = statement
+        self._primary_table = statement.table
+
+        if disable_implicit_returning:
+            self._supports_implicit_returning = False
+
+        self.isinsert = True
+        if statement._select_names:
+            self._process_select_values(statement)
+        if statement._values is not None:
+            self._process_values(statement)
+        if statement._multi_values:
+            self._process_multi_values(statement)
+
+    @util.memoized_property
+    def _insert_col_keys(self) -> List[str]:
+        # this is also done in crud.py -> _key_getters_for_crud_column
+        return [
+            coercions.expect(roles.DMLColumnRole, col, as_key=True)
+            for col in self._dict_parameters or ()
+        ]
+
+    def _process_values(self, statement: ValuesBase) -> None:
+        if self._no_parameters:
+            self._has_multi_parameters = False
+            self._dict_parameters = statement._values
+            self._no_parameters = False
+        elif self._has_multi_parameters:
+            self._cant_mix_formats_error()
+
+    def _process_multi_values(self, statement: ValuesBase) -> None:
+        for parameters in statement._multi_values:
+            multi_parameters: List[MutableMapping[_DMLColumnElement, Any]] = [
+                (
+                    {
+                        c.key: value
+                        for c, value in zip(statement.table.c, parameter_set)
+                    }
+                    if isinstance(parameter_set, collections_abc.Sequence)
+                    else parameter_set
+                )
+                for parameter_set in parameters
+            ]
+
+            if self._no_parameters:
+                self._no_parameters = False
+                self._has_multi_parameters = True
+                self._multi_parameters = multi_parameters
+                self._dict_parameters = self._multi_parameters[0]
+            elif not self._has_multi_parameters:
+                self._cant_mix_formats_error()
+            else:
+                assert self._multi_parameters
+                self._multi_parameters.extend(multi_parameters)
+
+
+@CompileState.plugin_for("default", "update")
+class UpdateDMLState(DMLState):
+    isupdate = True
+
+    include_table_with_column_exprs = False
+
+    def __init__(self, statement: Update, compiler: SQLCompiler, **kw: Any):
+        self.statement = statement
+
+        self.isupdate = True
+        if statement._ordered_values is not None:
+            self._process_ordered_values(statement)
+        elif statement._values is not None:
+            self._process_values(statement)
+        elif statement._multi_values:
+            self._no_multi_values_supported(statement)
+        t, ef = self._make_extra_froms(statement)
+        self._primary_table = t
+        self._extra_froms = ef
+
+        self.is_multitable = mt = ef
+        self.include_table_with_column_exprs = bool(
+            mt and compiler.render_table_with_column_in_update_from
+        )
+
+    def _process_ordered_values(self, statement: ValuesBase) -> None:
+        parameters = statement._ordered_values
+
+        if self._no_parameters:
+            self._no_parameters = False
+            assert parameters is not None
+            self._dict_parameters = dict(parameters)
+            self._ordered_values = parameters
+            self._parameter_ordering = [key for key, value in parameters]
+        else:
+            raise exc.InvalidRequestError(
+                "Can only invoke ordered_values() once, and not mixed "
+                "with any other values() call"
+            )
+
+
+@CompileState.plugin_for("default", "delete")
+class DeleteDMLState(DMLState):
+    isdelete = True
+
+    def __init__(self, statement: Delete, compiler: SQLCompiler, **kw: Any):
+        self.statement = statement
+
+        self.isdelete = True
+        t, ef = self._make_extra_froms(statement)
+        self._primary_table = t
+        self._extra_froms = ef
+        self.is_multitable = ef
+
+
+class UpdateBase(
+    roles.DMLRole,
+    HasCTE,
+    HasCompileState,
+    DialectKWArgs,
+    HasPrefixes,
+    Generative,
+    ExecutableReturnsRows,
+    ClauseElement,
+):
+    """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements."""
+
+    __visit_name__ = "update_base"
+
+    _hints: util.immutabledict[Tuple[_DMLTableElement, str], str] = (
+        util.EMPTY_DICT
+    )
+    named_with_column = False
+
+    _label_style: SelectLabelStyle = (
+        SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY
+    )
+    table: _DMLTableElement
+
+    _return_defaults = False
+    _return_defaults_columns: Optional[Tuple[_ColumnsClauseElement, ...]] = (
+        None
+    )
+    _supplemental_returning: Optional[Tuple[_ColumnsClauseElement, ...]] = None
+    _returning: Tuple[_ColumnsClauseElement, ...] = ()
+
+    is_dml = True
+
+    def _generate_fromclause_column_proxies(
+        self,
+        fromclause: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        columns._populate_separate_keys(
+            col._make_proxy(
+                fromclause, primary_key=primary_key, foreign_keys=foreign_keys
+            )
+            for col in self._all_selected_columns
+            if is_column_element(col)
+        )
+
+    def params(self, *arg: Any, **kw: Any) -> NoReturn:
+        """Set the parameters for the statement.
+
+        This method raises ``NotImplementedError`` on the base class,
+        and is overridden by :class:`.ValuesBase` to provide the
+        SET/VALUES clause of UPDATE and INSERT.
+
+        """
+        raise NotImplementedError(
+            "params() is not supported for INSERT/UPDATE/DELETE statements."
+            " To set the values for an INSERT or UPDATE statement, use"
+            " stmt.values(**parameters)."
+        )
+
+    @_generative
+    def with_dialect_options(self, **opt: Any) -> Self:
+        """Add dialect options to this INSERT/UPDATE/DELETE object.
+
+        e.g.::
+
+            upd = table.update().dialect_options(mysql_limit=10)
+
+        .. versionadded: 1.4 - this method supersedes the dialect options
+           associated with the constructor.
+
+
+        """
+        self._validate_dialect_kwargs(opt)
+        return self
+
+    @_generative
+    def return_defaults(
+        self,
+        *cols: _DMLColumnArgument,
+        supplemental_cols: Optional[Iterable[_DMLColumnArgument]] = None,
+        sort_by_parameter_order: bool = False,
+    ) -> Self:
+        """Make use of a :term:`RETURNING` clause for the purpose
+        of fetching server-side expressions and defaults, for supporting
+        backends only.
+
+        .. deepalchemy::
+
+            The :meth:`.UpdateBase.return_defaults` method is used by the ORM
+            for its internal work in fetching newly generated primary key
+            and server default values, in particular to provide the underyling
+            implementation of the :paramref:`_orm.Mapper.eager_defaults`
+            ORM feature as well as to allow RETURNING support with bulk
+            ORM inserts.  Its behavior is fairly idiosyncratic
+            and is not really intended for general use.  End users should
+            stick with using :meth:`.UpdateBase.returning` in order to
+            add RETURNING clauses to their INSERT, UPDATE and DELETE
+            statements.
+
+        Normally, a single row INSERT statement will automatically populate the
+        :attr:`.CursorResult.inserted_primary_key` attribute when executed,
+        which stores the primary key of the row that was just inserted in the
+        form of a :class:`.Row` object with column names as named tuple keys
+        (and the :attr:`.Row._mapping` view fully populated as well). The
+        dialect in use chooses the strategy to use in order to populate this
+        data; if it was generated using server-side defaults and / or SQL
+        expressions, dialect-specific approaches such as ``cursor.lastrowid``
+        or ``RETURNING`` are typically used to acquire the new primary key
+        value.
+
+        However, when the statement is modified by calling
+        :meth:`.UpdateBase.return_defaults` before executing the statement,
+        additional behaviors take place **only** for backends that support
+        RETURNING and for :class:`.Table` objects that maintain the
+        :paramref:`.Table.implicit_returning` parameter at its default value of
+        ``True``. In these cases, when the :class:`.CursorResult` is returned
+        from the statement's execution, not only will
+        :attr:`.CursorResult.inserted_primary_key` be populated as always, the
+        :attr:`.CursorResult.returned_defaults` attribute will also be
+        populated with a :class:`.Row` named-tuple representing the full range
+        of server generated
+        values from that single row, including values for any columns that
+        specify :paramref:`_schema.Column.server_default` or which make use of
+        :paramref:`_schema.Column.default` using a SQL expression.
+
+        When invoking INSERT statements with multiple rows using
+        :ref:`insertmanyvalues <engine_insertmanyvalues>`, the
+        :meth:`.UpdateBase.return_defaults` modifier will have the effect of
+        the :attr:`_engine.CursorResult.inserted_primary_key_rows` and
+        :attr:`_engine.CursorResult.returned_defaults_rows` attributes being
+        fully populated with lists of :class:`.Row` objects representing newly
+        inserted primary key values as well as newly inserted server generated
+        values for each row inserted. The
+        :attr:`.CursorResult.inserted_primary_key` and
+        :attr:`.CursorResult.returned_defaults` attributes will also continue
+        to be populated with the first row of these two collections.
+
+        If the backend does not support RETURNING or the :class:`.Table` in use
+        has disabled :paramref:`.Table.implicit_returning`, then no RETURNING
+        clause is added and no additional data is fetched, however the
+        INSERT, UPDATE or DELETE statement proceeds normally.
+
+        E.g.::
+
+            stmt = table.insert().values(data="newdata").return_defaults()
+
+            result = connection.execute(stmt)
+
+            server_created_at = result.returned_defaults["created_at"]
+
+        When used against an UPDATE statement
+        :meth:`.UpdateBase.return_defaults` instead looks for columns that
+        include :paramref:`_schema.Column.onupdate` or
+        :paramref:`_schema.Column.server_onupdate` parameters assigned, when
+        constructing the columns that will be included in the RETURNING clause
+        by default if explicit columns were not specified. When used against a
+        DELETE statement, no columns are included in RETURNING by default, they
+        instead must be specified explicitly as there are no columns that
+        normally change values when a DELETE statement proceeds.
+
+        .. versionadded:: 2.0  :meth:`.UpdateBase.return_defaults` is supported
+           for DELETE statements also and has been moved from
+           :class:`.ValuesBase` to :class:`.UpdateBase`.
+
+        The :meth:`.UpdateBase.return_defaults` method is mutually exclusive
+        against the :meth:`.UpdateBase.returning` method and errors will be
+        raised during the SQL compilation process if both are used at the same
+        time on one statement. The RETURNING clause of the INSERT, UPDATE or
+        DELETE statement is therefore controlled by only one of these methods
+        at a time.
+
+        The :meth:`.UpdateBase.return_defaults` method differs from
+        :meth:`.UpdateBase.returning` in these ways:
+
+        1. :meth:`.UpdateBase.return_defaults` method causes the
+           :attr:`.CursorResult.returned_defaults` collection to be populated
+           with the first row from the RETURNING result. This attribute is not
+           populated when using :meth:`.UpdateBase.returning`.
+
+        2. :meth:`.UpdateBase.return_defaults` is compatible with existing
+           logic used to fetch auto-generated primary key values that are then
+           populated into the :attr:`.CursorResult.inserted_primary_key`
+           attribute. By contrast, using :meth:`.UpdateBase.returning` will
+           have the effect of the :attr:`.CursorResult.inserted_primary_key`
+           attribute being left unpopulated.
+
+        3. :meth:`.UpdateBase.return_defaults` can be called against any
+           backend. Backends that don't support RETURNING will skip the usage
+           of the feature, rather than raising an exception, *unless*
+           ``supplemental_cols`` is passed. The return value
+           of :attr:`_engine.CursorResult.returned_defaults` will be ``None``
+           for backends that don't support RETURNING or for which the target
+           :class:`.Table` sets :paramref:`.Table.implicit_returning` to
+           ``False``.
+
+        4. An INSERT statement invoked with executemany() is supported if the
+           backend database driver supports the
+           :ref:`insertmanyvalues <engine_insertmanyvalues>`
+           feature which is now supported by most SQLAlchemy-included backends.
+           When executemany is used, the
+           :attr:`_engine.CursorResult.returned_defaults_rows` and
+           :attr:`_engine.CursorResult.inserted_primary_key_rows` accessors
+           will return the inserted defaults and primary keys.
+
+           .. versionadded:: 1.4 Added
+              :attr:`_engine.CursorResult.returned_defaults_rows` and
+              :attr:`_engine.CursorResult.inserted_primary_key_rows` accessors.
+              In version 2.0, the underlying implementation which fetches and
+              populates the data for these attributes was generalized to be
+              supported by most backends, whereas in 1.4 they were only
+              supported by the ``psycopg2`` driver.
+
+
+        :param cols: optional list of column key names or
+         :class:`_schema.Column` that acts as a filter for those columns that
+         will be fetched.
+        :param supplemental_cols: optional list of RETURNING expressions,
+          in the same form as one would pass to the
+          :meth:`.UpdateBase.returning` method. When present, the additional
+          columns will be included in the RETURNING clause, and the
+          :class:`.CursorResult` object will be "rewound" when returned, so
+          that methods like :meth:`.CursorResult.all` will return new rows
+          mostly as though the statement used :meth:`.UpdateBase.returning`
+          directly. However, unlike when using :meth:`.UpdateBase.returning`
+          directly, the **order of the columns is undefined**, so can only be
+          targeted using names or :attr:`.Row._mapping` keys; they cannot
+          reliably be targeted positionally.
+
+          .. versionadded:: 2.0
+
+        :param sort_by_parameter_order: for a batch INSERT that is being
+         executed against multiple parameter sets, organize the results of
+         RETURNING so that the returned rows correspond to the order of
+         parameter sets passed in.  This applies only to an :term:`executemany`
+         execution for supporting dialects and typically makes use of the
+         :term:`insertmanyvalues` feature.
+
+         .. versionadded:: 2.0.10
+
+         .. seealso::
+
+            :ref:`engine_insertmanyvalues_returning_order` - background on
+            sorting of RETURNING rows for bulk INSERT
+
+        .. seealso::
+
+            :meth:`.UpdateBase.returning`
+
+            :attr:`_engine.CursorResult.returned_defaults`
+
+            :attr:`_engine.CursorResult.returned_defaults_rows`
+
+            :attr:`_engine.CursorResult.inserted_primary_key`
+
+            :attr:`_engine.CursorResult.inserted_primary_key_rows`
+
+        """
+
+        if self._return_defaults:
+            # note _return_defaults_columns = () means return all columns,
+            # so if we have been here before, only update collection if there
+            # are columns in the collection
+            if self._return_defaults_columns and cols:
+                self._return_defaults_columns = tuple(
+                    util.OrderedSet(self._return_defaults_columns).union(
+                        coercions.expect(roles.ColumnsClauseRole, c)
+                        for c in cols
+                    )
+                )
+            else:
+                # set for all columns
+                self._return_defaults_columns = ()
+        else:
+            self._return_defaults_columns = tuple(
+                coercions.expect(roles.ColumnsClauseRole, c) for c in cols
+            )
+        self._return_defaults = True
+        if sort_by_parameter_order:
+            if not self.is_insert:
+                raise exc.ArgumentError(
+                    "The 'sort_by_parameter_order' argument to "
+                    "return_defaults() only applies to INSERT statements"
+                )
+            self._sort_by_parameter_order = True
+        if supplemental_cols:
+            # uniquifying while also maintaining order (the maintain of order
+            # is for test suites but also for vertical splicing
+            supplemental_col_tup = (
+                coercions.expect(roles.ColumnsClauseRole, c)
+                for c in supplemental_cols
+            )
+
+            if self._supplemental_returning is None:
+                self._supplemental_returning = tuple(
+                    util.unique_list(supplemental_col_tup)
+                )
+            else:
+                self._supplemental_returning = tuple(
+                    util.unique_list(
+                        self._supplemental_returning
+                        + tuple(supplemental_col_tup)
+                    )
+                )
+
+        return self
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        """Return ``True`` if this :class:`.ReturnsRows` is
+        'derived' from the given :class:`.FromClause`.
+
+        Since these are DMLs, we dont want such statements ever being adapted
+        so we return False for derives.
+
+        """
+        return False
+
+    @_generative
+    def returning(
+        self,
+        *cols: _ColumnsClauseArgument[Any],
+        sort_by_parameter_order: bool = False,
+        **__kw: Any,
+    ) -> UpdateBase:
+        r"""Add a :term:`RETURNING` or equivalent clause to this statement.
+
+        e.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> stmt = (
+            ...     table.update()
+            ...     .where(table.c.data == "value")
+            ...     .values(status="X")
+            ...     .returning(table.c.server_flag, table.c.updated_timestamp)
+            ... )
+            >>> print(stmt)
+            {printsql}UPDATE some_table SET status=:status
+            WHERE some_table.data = :data_1
+            RETURNING some_table.server_flag, some_table.updated_timestamp
+
+        The method may be invoked multiple times to add new entries to the
+        list of expressions to be returned.
+
+        .. versionadded:: 1.4.0b2 The method may be invoked multiple times to
+         add new entries to the list of expressions to be returned.
+
+        The given collection of column expressions should be derived from the
+        table that is the target of the INSERT, UPDATE, or DELETE.  While
+        :class:`_schema.Column` objects are typical, the elements can also be
+        expressions:
+
+        .. sourcecode:: pycon+sql
+
+            >>> stmt = table.insert().returning(
+            ...     (table.c.first_name + " " + table.c.last_name).label("fullname")
+            ... )
+            >>> print(stmt)
+            {printsql}INSERT INTO some_table (first_name, last_name)
+            VALUES (:first_name, :last_name)
+            RETURNING some_table.first_name || :first_name_1 || some_table.last_name AS fullname
+
+        Upon compilation, a RETURNING clause, or database equivalent,
+        will be rendered within the statement.   For INSERT and UPDATE,
+        the values are the newly inserted/updated values.  For DELETE,
+        the values are those of the rows which were deleted.
+
+        Upon execution, the values of the columns to be returned are made
+        available via the result set and can be iterated using
+        :meth:`_engine.CursorResult.fetchone` and similar.
+        For DBAPIs which do not
+        natively support returning values (i.e. cx_oracle), SQLAlchemy will
+        approximate this behavior at the result level so that a reasonable
+        amount of behavioral neutrality is provided.
+
+        Note that not all databases/DBAPIs
+        support RETURNING.   For those backends with no support,
+        an exception is raised upon compilation and/or execution.
+        For those who do support it, the functionality across backends
+        varies greatly, including restrictions on executemany()
+        and other statements which return multiple rows. Please
+        read the documentation notes for the database in use in
+        order to determine the availability of RETURNING.
+
+        :param \*cols: series of columns, SQL expressions, or whole tables
+         entities to be returned.
+        :param sort_by_parameter_order: for a batch INSERT that is being
+         executed against multiple parameter sets, organize the results of
+         RETURNING so that the returned rows correspond to the order of
+         parameter sets passed in.  This applies only to an :term:`executemany`
+         execution for supporting dialects and typically makes use of the
+         :term:`insertmanyvalues` feature.
+
+         .. versionadded:: 2.0.10
+
+         .. seealso::
+
+            :ref:`engine_insertmanyvalues_returning_order` - background on
+            sorting of RETURNING rows for bulk INSERT (Core level discussion)
+
+            :ref:`orm_queryguide_bulk_insert_returning_ordered` - example of
+            use with :ref:`orm_queryguide_bulk_insert` (ORM level discussion)
+
+        .. seealso::
+
+          :meth:`.UpdateBase.return_defaults` - an alternative method tailored
+          towards efficient fetching of server-side defaults and triggers
+          for single-row INSERTs or UPDATEs.
+
+          :ref:`tutorial_insert_returning` - in the :ref:`unified_tutorial`
+
+        """  # noqa: E501
+        if __kw:
+            raise _unexpected_kw("UpdateBase.returning()", __kw)
+        if self._return_defaults:
+            raise exc.InvalidRequestError(
+                "return_defaults() is already configured on this statement"
+            )
+        self._returning += tuple(
+            coercions.expect(roles.ColumnsClauseRole, c) for c in cols
+        )
+        if sort_by_parameter_order:
+            if not self.is_insert:
+                raise exc.ArgumentError(
+                    "The 'sort_by_parameter_order' argument to returning() "
+                    "only applies to INSERT statements"
+                )
+            self._sort_by_parameter_order = True
+        return self
+
+    def corresponding_column(
+        self, column: KeyedColumnElement[Any], require_embedded: bool = False
+    ) -> Optional[ColumnElement[Any]]:
+        return self.exported_columns.corresponding_column(
+            column, require_embedded=require_embedded
+        )
+
+    @util.ro_memoized_property
+    def _all_selected_columns(self) -> _SelectIterable:
+        return [c for c in _select_iterables(self._returning)]
+
+    @util.ro_memoized_property
+    def exported_columns(
+        self,
+    ) -> ReadOnlyColumnCollection[Optional[str], ColumnElement[Any]]:
+        """Return the RETURNING columns as a column collection for this
+        statement.
+
+        .. versionadded:: 1.4
+
+        """
+        return ColumnCollection(
+            (c.key, c)
+            for c in self._all_selected_columns
+            if is_column_element(c)
+        ).as_readonly()
+
+    @_generative
+    def with_hint(
+        self,
+        text: str,
+        selectable: Optional[_DMLTableArgument] = None,
+        dialect_name: str = "*",
+    ) -> Self:
+        """Add a table hint for a single table to this
+        INSERT/UPDATE/DELETE statement.
+
+        .. note::
+
+         :meth:`.UpdateBase.with_hint` currently applies only to
+         Microsoft SQL Server.  For MySQL INSERT/UPDATE/DELETE hints, use
+         :meth:`.UpdateBase.prefix_with`.
+
+        The text of the hint is rendered in the appropriate
+        location for the database backend in use, relative
+        to the :class:`_schema.Table` that is the subject of this
+        statement, or optionally to that of the given
+        :class:`_schema.Table` passed as the ``selectable`` argument.
+
+        The ``dialect_name`` option will limit the rendering of a particular
+        hint to a particular backend. Such as, to add a hint
+        that only takes effect for SQL Server::
+
+            mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
+
+        :param text: Text of the hint.
+        :param selectable: optional :class:`_schema.Table` that specifies
+         an element of the FROM clause within an UPDATE or DELETE
+         to be the subject of the hint - applies only to certain backends.
+        :param dialect_name: defaults to ``*``, if specified as the name
+         of a particular dialect, will apply these hints only when
+         that dialect is in use.
+        """
+        if selectable is None:
+            selectable = self.table
+        else:
+            selectable = coercions.expect(roles.DMLTableRole, selectable)
+        self._hints = self._hints.union({(selectable, dialect_name): text})
+        return self
+
+    @property
+    def entity_description(self) -> Dict[str, Any]:
+        """Return a :term:`plugin-enabled` description of the table and/or
+        entity which this DML construct is operating against.
+
+        This attribute is generally useful when using the ORM, as an
+        extended structure which includes information about mapped
+        entities is returned.  The section :ref:`queryguide_inspection`
+        contains more background.
+
+        For a Core statement, the structure returned by this accessor
+        is derived from the :attr:`.UpdateBase.table` attribute, and
+        refers to the :class:`.Table` being inserted, updated, or deleted::
+
+            >>> stmt = insert(user_table)
+            >>> stmt.entity_description
+            {
+                "name": "user_table",
+                "table": Table("user_table", ...)
+            }
+
+        .. versionadded:: 1.4.33
+
+        .. seealso::
+
+            :attr:`.UpdateBase.returning_column_descriptions`
+
+            :attr:`.Select.column_descriptions` - entity information for
+            a :func:`.select` construct
+
+            :ref:`queryguide_inspection` - ORM background
+
+        """
+        meth = DMLState.get_plugin_class(self).get_entity_description
+        return meth(self)
+
+    @property
+    def returning_column_descriptions(self) -> List[Dict[str, Any]]:
+        """Return a :term:`plugin-enabled` description of the columns
+        which this DML construct is RETURNING against, in other words
+        the expressions established as part of :meth:`.UpdateBase.returning`.
+
+        This attribute is generally useful when using the ORM, as an
+        extended structure which includes information about mapped
+        entities is returned.  The section :ref:`queryguide_inspection`
+        contains more background.
+
+        For a Core statement, the structure returned by this accessor is
+        derived from the same objects that are returned by the
+        :attr:`.UpdateBase.exported_columns` accessor::
+
+            >>> stmt = insert(user_table).returning(user_table.c.id, user_table.c.name)
+            >>> stmt.entity_description
+            [
+                {
+                    "name": "id",
+                    "type": Integer,
+                    "expr": Column("id", Integer(), table=<user>, ...)
+                },
+                {
+                    "name": "name",
+                    "type": String(),
+                    "expr": Column("name", String(), table=<user>, ...)
+                },
+            ]
+
+        .. versionadded:: 1.4.33
+
+        .. seealso::
+
+            :attr:`.UpdateBase.entity_description`
+
+            :attr:`.Select.column_descriptions` - entity information for
+            a :func:`.select` construct
+
+            :ref:`queryguide_inspection` - ORM background
+
+        """  # noqa: E501
+        meth = DMLState.get_plugin_class(
+            self
+        ).get_returning_column_descriptions
+        return meth(self)
+
+
+class ValuesBase(UpdateBase):
+    """Supplies support for :meth:`.ValuesBase.values` to
+    INSERT and UPDATE constructs."""
+
+    __visit_name__ = "values_base"
+
+    _supports_multi_parameters = False
+
+    select: Optional[Select[Any]] = None
+    """SELECT statement for INSERT .. FROM SELECT"""
+
+    _post_values_clause: Optional[ClauseElement] = None
+    """used by extensions to Insert etc. to add additional syntacitcal
+    constructs, e.g. ON CONFLICT etc."""
+
+    _values: Optional[util.immutabledict[_DMLColumnElement, Any]] = None
+    _multi_values: Tuple[
+        Union[
+            Sequence[Dict[_DMLColumnElement, Any]],
+            Sequence[Sequence[Any]],
+        ],
+        ...,
+    ] = ()
+
+    _ordered_values: Optional[List[Tuple[_DMLColumnElement, Any]]] = None
+
+    _select_names: Optional[List[str]] = None
+    _inline: bool = False
+
+    def __init__(self, table: _DMLTableArgument):
+        self.table = coercions.expect(
+            roles.DMLTableRole, table, apply_propagate_attrs=self
+        )
+
+    @_generative
+    @_exclusive_against(
+        "_select_names",
+        "_ordered_values",
+        msgs={
+            "_select_names": "This construct already inserts from a SELECT",
+            "_ordered_values": "This statement already has ordered "
+            "values present",
+        },
+    )
+    def values(
+        self,
+        *args: Union[
+            _DMLColumnKeyMapping[Any],
+            Sequence[Any],
+        ],
+        **kwargs: Any,
+    ) -> Self:
+        r"""Specify a fixed VALUES clause for an INSERT statement, or the SET
+        clause for an UPDATE.
+
+        Note that the :class:`_expression.Insert` and
+        :class:`_expression.Update`
+        constructs support
+        per-execution time formatting of the VALUES and/or SET clauses,
+        based on the arguments passed to :meth:`_engine.Connection.execute`.
+        However, the :meth:`.ValuesBase.values` method can be used to "fix" a
+        particular set of parameters into the statement.
+
+        Multiple calls to :meth:`.ValuesBase.values` will produce a new
+        construct, each one with the parameter list modified to include
+        the new parameters sent.  In the typical case of a single
+        dictionary of parameters, the newly passed keys will replace
+        the same keys in the previous construct.  In the case of a list-based
+        "multiple values" construct, each new list of values is extended
+        onto the existing list of values.
+
+        :param \**kwargs: key value pairs representing the string key
+          of a :class:`_schema.Column`
+          mapped to the value to be rendered into the
+          VALUES or SET clause::
+
+                users.insert().values(name="some name")
+
+                users.update().where(users.c.id == 5).values(name="some name")
+
+        :param \*args: As an alternative to passing key/value parameters,
+         a dictionary, tuple, or list of dictionaries or tuples can be passed
+         as a single positional argument in order to form the VALUES or
+         SET clause of the statement.  The forms that are accepted vary
+         based on whether this is an :class:`_expression.Insert` or an
+         :class:`_expression.Update` construct.
+
+         For either an :class:`_expression.Insert` or
+         :class:`_expression.Update`
+         construct, a single dictionary can be passed, which works the same as
+         that of the kwargs form::
+
+            users.insert().values({"name": "some name"})
+
+            users.update().values({"name": "some new name"})
+
+         Also for either form but more typically for the
+         :class:`_expression.Insert` construct, a tuple that contains an
+         entry for every column in the table is also accepted::
+
+            users.insert().values((5, "some name"))
+
+         The :class:`_expression.Insert` construct also supports being
+         passed a list of dictionaries or full-table-tuples, which on the
+         server will render the less common SQL syntax of "multiple values" -
+         this syntax is supported on backends such as SQLite, PostgreSQL,
+         MySQL, but not necessarily others::
+
+            users.insert().values(
+                [
+                    {"name": "some name"},
+                    {"name": "some other name"},
+                    {"name": "yet another name"},
+                ]
+            )
+
+         The above form would render a multiple VALUES statement similar to:
+
+         .. sourcecode:: sql
+
+                INSERT INTO users (name) VALUES
+                                (:name_1),
+                                (:name_2),
+                                (:name_3)
+
+         It is essential to note that **passing multiple values is
+         NOT the same as using traditional executemany() form**.  The above
+         syntax is a **special** syntax not typically used.  To emit an
+         INSERT statement against multiple rows, the normal method is
+         to pass a multiple values list to the
+         :meth:`_engine.Connection.execute`
+         method, which is supported by all database backends and is generally
+         more efficient for a very large number of parameters.
+
+           .. seealso::
+
+               :ref:`tutorial_multiple_parameters` - an introduction to
+               the traditional Core method of multiple parameter set
+               invocation for INSERTs and other statements.
+
+          The UPDATE construct also supports rendering the SET parameters
+          in a specific order.  For this feature refer to the
+          :meth:`_expression.Update.ordered_values` method.
+
+           .. seealso::
+
+              :meth:`_expression.Update.ordered_values`
+
+
+        """
+        if args:
+            # positional case.  this is currently expensive.   we don't
+            # yet have positional-only args so we have to check the length.
+            # then we need to check multiparams vs. single dictionary.
+            # since the parameter format is needed in order to determine
+            # a cache key, we need to determine this up front.
+            arg = args[0]
+
+            if kwargs:
+                raise exc.ArgumentError(
+                    "Can't pass positional and kwargs to values() "
+                    "simultaneously"
+                )
+            elif len(args) > 1:
+                raise exc.ArgumentError(
+                    "Only a single dictionary/tuple or list of "
+                    "dictionaries/tuples is accepted positionally."
+                )
+
+            elif isinstance(arg, collections_abc.Sequence):
+                if arg and isinstance(arg[0], dict):
+                    multi_kv_generator = DMLState.get_plugin_class(
+                        self
+                    )._get_multi_crud_kv_pairs
+                    self._multi_values += (multi_kv_generator(self, arg),)
+                    return self
+
+                if arg and isinstance(arg[0], (list, tuple)):
+                    self._multi_values += (arg,)
+                    return self
+
+                if TYPE_CHECKING:
+                    # crud.py raises during compilation if this is not the
+                    # case
+                    assert isinstance(self, Insert)
+
+                # tuple values
+                arg = {c.key: value for c, value in zip(self.table.c, arg)}
+
+        else:
+            # kwarg path.  this is the most common path for non-multi-params
+            # so this is fairly quick.
+            arg = cast("Dict[_DMLColumnArgument, Any]", kwargs)
+            if args:
+                raise exc.ArgumentError(
+                    "Only a single dictionary/tuple or list of "
+                    "dictionaries/tuples is accepted positionally."
+                )
+
+        # for top level values(), convert literals to anonymous bound
+        # parameters at statement construction time, so that these values can
+        # participate in the cache key process like any other ClauseElement.
+        # crud.py now intercepts bound parameters with unique=True from here
+        # and ensures they get the "crud"-style name when rendered.
+
+        kv_generator = DMLState.get_plugin_class(self)._get_crud_kv_pairs
+        coerced_arg = dict(kv_generator(self, arg.items(), True))
+        if self._values:
+            self._values = self._values.union(coerced_arg)
+        else:
+            self._values = util.immutabledict(coerced_arg)
+        return self
+
+
+class Insert(ValuesBase):
+    """Represent an INSERT construct.
+
+    The :class:`_expression.Insert` object is created using the
+    :func:`_expression.insert()` function.
+
+    """
+
+    __visit_name__ = "insert"
+
+    _supports_multi_parameters = True
+
+    select = None
+    include_insert_from_select_defaults = False
+
+    _sort_by_parameter_order: bool = False
+
+    is_insert = True
+
+    table: TableClause
+
+    _traverse_internals = (
+        [
+            ("table", InternalTraversal.dp_clauseelement),
+            ("_inline", InternalTraversal.dp_boolean),
+            ("_select_names", InternalTraversal.dp_string_list),
+            ("_values", InternalTraversal.dp_dml_values),
+            ("_multi_values", InternalTraversal.dp_dml_multi_values),
+            ("select", InternalTraversal.dp_clauseelement),
+            ("_post_values_clause", InternalTraversal.dp_clauseelement),
+            ("_returning", InternalTraversal.dp_clauseelement_tuple),
+            ("_hints", InternalTraversal.dp_table_hint_list),
+            ("_return_defaults", InternalTraversal.dp_boolean),
+            (
+                "_return_defaults_columns",
+                InternalTraversal.dp_clauseelement_tuple,
+            ),
+            ("_sort_by_parameter_order", InternalTraversal.dp_boolean),
+        ]
+        + HasPrefixes._has_prefixes_traverse_internals
+        + DialectKWArgs._dialect_kwargs_traverse_internals
+        + Executable._executable_traverse_internals
+        + HasCTE._has_ctes_traverse_internals
+    )
+
+    def __init__(self, table: _DMLTableArgument):
+        super().__init__(table)
+
+    @_generative
+    def inline(self) -> Self:
+        """Make this :class:`_expression.Insert` construct "inline" .
+
+        When set, no attempt will be made to retrieve the
+        SQL-generated default values to be provided within the statement;
+        in particular,
+        this allows SQL expressions to be rendered 'inline' within the
+        statement without the need to pre-execute them beforehand; for
+        backends that support "returning", this turns off the "implicit
+        returning" feature for the statement.
+
+
+        .. versionchanged:: 1.4 the :paramref:`_expression.Insert.inline`
+           parameter
+           is now superseded by the :meth:`_expression.Insert.inline` method.
+
+        """
+        self._inline = True
+        return self
+
+    @_generative
+    def from_select(
+        self,
+        names: Sequence[_DMLColumnArgument],
+        select: Selectable,
+        include_defaults: bool = True,
+    ) -> Self:
+        """Return a new :class:`_expression.Insert` construct which represents
+        an ``INSERT...FROM SELECT`` statement.
+
+        e.g.::
+
+            sel = select(table1.c.a, table1.c.b).where(table1.c.c > 5)
+            ins = table2.insert().from_select(["a", "b"], sel)
+
+        :param names: a sequence of string column names or
+         :class:`_schema.Column`
+         objects representing the target columns.
+        :param select: a :func:`_expression.select` construct,
+         :class:`_expression.FromClause`
+         or other construct which resolves into a
+         :class:`_expression.FromClause`,
+         such as an ORM :class:`_query.Query` object, etc.  The order of
+         columns returned from this FROM clause should correspond to the
+         order of columns sent as the ``names`` parameter;  while this
+         is not checked before passing along to the database, the database
+         would normally raise an exception if these column lists don't
+         correspond.
+        :param include_defaults: if True, non-server default values and
+         SQL expressions as specified on :class:`_schema.Column` objects
+         (as documented in :ref:`metadata_defaults_toplevel`) not
+         otherwise specified in the list of names will be rendered
+         into the INSERT and SELECT statements, so that these values are also
+         included in the data to be inserted.
+
+         .. note:: A Python-side default that uses a Python callable function
+            will only be invoked **once** for the whole statement, and **not
+            per row**.
+
+        """
+
+        if self._values:
+            raise exc.InvalidRequestError(
+                "This construct already inserts value expressions"
+            )
+
+        self._select_names = [
+            coercions.expect(roles.DMLColumnRole, name, as_key=True)
+            for name in names
+        ]
+        self._inline = True
+        self.include_insert_from_select_defaults = include_defaults
+        self.select = coercions.expect(roles.DMLSelectRole, select)
+        return self
+
+    if TYPE_CHECKING:
+        # START OVERLOADED FUNCTIONS self.returning ReturningInsert 1-8 ", *, sort_by_parameter_order: bool = False"  # noqa: E501
+
+        # code within this block is **programmatically,
+        # statically generated** by tools/generate_tuple_map_overloads.py
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0], *, sort_by_parameter_order: bool = False
+        ) -> ReturningInsert[Tuple[_T0]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[Tuple[_T0, _T1]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[Tuple[_T0, _T1, _T2]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            __ent6: _TCCA[_T6],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            __ent6: _TCCA[_T6],
+            __ent7: _TCCA[_T7],
+            *,
+            sort_by_parameter_order: bool = False,
+        ) -> ReturningInsert[
+            Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]
+        ]: ...
+
+        # END OVERLOADED FUNCTIONS self.returning
+
+        @overload
+        def returning(
+            self,
+            *cols: _ColumnsClauseArgument[Any],
+            sort_by_parameter_order: bool = False,
+            **__kw: Any,
+        ) -> ReturningInsert[Any]: ...
+
+        def returning(
+            self,
+            *cols: _ColumnsClauseArgument[Any],
+            sort_by_parameter_order: bool = False,
+            **__kw: Any,
+        ) -> ReturningInsert[Any]: ...
+
+
+class ReturningInsert(Insert, TypedReturnsRows[_TP]):
+    """Typing-only class that establishes a generic type form of
+    :class:`.Insert` which tracks returned column types.
+
+    This datatype is delivered when calling the
+    :meth:`.Insert.returning` method.
+
+    .. versionadded:: 2.0
+
+    """
+
+
+class DMLWhereBase:
+    table: _DMLTableElement
+    _where_criteria: Tuple[ColumnElement[Any], ...] = ()
+
+    @_generative
+    def where(self, *whereclause: _ColumnExpressionArgument[bool]) -> Self:
+        """Return a new construct with the given expression(s) added to
+        its WHERE clause, joined to the existing clause via AND, if any.
+
+        Both :meth:`_dml.Update.where` and :meth:`_dml.Delete.where`
+        support multiple-table forms, including database-specific
+        ``UPDATE...FROM`` as well as ``DELETE..USING``.  For backends that
+        don't have multiple-table support, a backend agnostic approach
+        to using multiple tables is to make use of correlated subqueries.
+        See the linked tutorial sections below for examples.
+
+        .. seealso::
+
+            :ref:`tutorial_correlated_updates`
+
+            :ref:`tutorial_update_from`
+
+            :ref:`tutorial_multi_table_deletes`
+
+        """
+
+        for criterion in whereclause:
+            where_criteria: ColumnElement[Any] = coercions.expect(
+                roles.WhereHavingRole, criterion, apply_propagate_attrs=self
+            )
+            self._where_criteria += (where_criteria,)
+        return self
+
+    def filter(self, *criteria: roles.ExpressionElementRole[Any]) -> Self:
+        """A synonym for the :meth:`_dml.DMLWhereBase.where` method.
+
+        .. versionadded:: 1.4
+
+        """
+
+        return self.where(*criteria)
+
+    def _filter_by_zero(self) -> _DMLTableElement:
+        return self.table
+
+    def filter_by(self, **kwargs: Any) -> Self:
+        r"""apply the given filtering criterion as a WHERE clause
+        to this select.
+
+        """
+        from_entity = self._filter_by_zero()
+
+        clauses = [
+            _entity_namespace_key(from_entity, key) == value
+            for key, value in kwargs.items()
+        ]
+        return self.filter(*clauses)
+
+    @property
+    def whereclause(self) -> Optional[ColumnElement[Any]]:
+        """Return the completed WHERE clause for this :class:`.DMLWhereBase`
+        statement.
+
+        This assembles the current collection of WHERE criteria
+        into a single :class:`_expression.BooleanClauseList` construct.
+
+
+        .. versionadded:: 1.4
+
+        """
+
+        return BooleanClauseList._construct_for_whereclause(
+            self._where_criteria
+        )
+
+
+class Update(DMLWhereBase, ValuesBase):
+    """Represent an Update construct.
+
+    The :class:`_expression.Update` object is created using the
+    :func:`_expression.update()` function.
+
+    """
+
+    __visit_name__ = "update"
+
+    is_update = True
+
+    _traverse_internals = (
+        [
+            ("table", InternalTraversal.dp_clauseelement),
+            ("_where_criteria", InternalTraversal.dp_clauseelement_tuple),
+            ("_inline", InternalTraversal.dp_boolean),
+            ("_ordered_values", InternalTraversal.dp_dml_ordered_values),
+            ("_values", InternalTraversal.dp_dml_values),
+            ("_returning", InternalTraversal.dp_clauseelement_tuple),
+            ("_hints", InternalTraversal.dp_table_hint_list),
+            ("_return_defaults", InternalTraversal.dp_boolean),
+            (
+                "_return_defaults_columns",
+                InternalTraversal.dp_clauseelement_tuple,
+            ),
+        ]
+        + HasPrefixes._has_prefixes_traverse_internals
+        + DialectKWArgs._dialect_kwargs_traverse_internals
+        + Executable._executable_traverse_internals
+        + HasCTE._has_ctes_traverse_internals
+    )
+
+    def __init__(self, table: _DMLTableArgument):
+        super().__init__(table)
+
+    @_generative
+    def ordered_values(self, *args: Tuple[_DMLColumnArgument, Any]) -> Self:
+        """Specify the VALUES clause of this UPDATE statement with an explicit
+        parameter ordering that will be maintained in the SET clause of the
+        resulting UPDATE statement.
+
+        E.g.::
+
+            stmt = table.update().ordered_values(("name", "ed"), ("ident", "foo"))
+
+        .. seealso::
+
+           :ref:`tutorial_parameter_ordered_updates` - full example of the
+           :meth:`_expression.Update.ordered_values` method.
+
+        .. versionchanged:: 1.4 The :meth:`_expression.Update.ordered_values`
+           method
+           supersedes the
+           :paramref:`_expression.update.preserve_parameter_order`
+           parameter, which will be removed in SQLAlchemy 2.0.
+
+        """  # noqa: E501
+        if self._values:
+            raise exc.ArgumentError(
+                "This statement already has values present"
+            )
+        elif self._ordered_values:
+            raise exc.ArgumentError(
+                "This statement already has ordered values present"
+            )
+
+        kv_generator = DMLState.get_plugin_class(self)._get_crud_kv_pairs
+        self._ordered_values = kv_generator(self, args, True)
+        return self
+
+    @_generative
+    def inline(self) -> Self:
+        """Make this :class:`_expression.Update` construct "inline" .
+
+        When set, SQL defaults present on :class:`_schema.Column`
+        objects via the
+        ``default`` keyword will be compiled 'inline' into the statement and
+        not pre-executed.  This means that their values will not be available
+        in the dictionary returned from
+        :meth:`_engine.CursorResult.last_updated_params`.
+
+        .. versionchanged:: 1.4 the :paramref:`_expression.update.inline`
+           parameter
+           is now superseded by the :meth:`_expression.Update.inline` method.
+
+        """
+        self._inline = True
+        return self
+
+    if TYPE_CHECKING:
+        # START OVERLOADED FUNCTIONS self.returning ReturningUpdate 1-8
+
+        # code within this block is **programmatically,
+        # statically generated** by tools/generate_tuple_map_overloads.py
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0]
+        ) -> ReturningUpdate[Tuple[_T0]]: ...
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
+        ) -> ReturningUpdate[Tuple[_T0, _T1]]: ...
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
+        ) -> ReturningUpdate[Tuple[_T0, _T1, _T2]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+        ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+        ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+        ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            __ent6: _TCCA[_T6],
+        ) -> ReturningUpdate[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            __ent6: _TCCA[_T6],
+            __ent7: _TCCA[_T7],
+        ) -> ReturningUpdate[
+            Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]
+        ]: ...
+
+        # END OVERLOADED FUNCTIONS self.returning
+
+        @overload
+        def returning(
+            self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
+        ) -> ReturningUpdate[Any]: ...
+
+        def returning(
+            self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
+        ) -> ReturningUpdate[Any]: ...
+
+
+class ReturningUpdate(Update, TypedReturnsRows[_TP]):
+    """Typing-only class that establishes a generic type form of
+    :class:`.Update` which tracks returned column types.
+
+    This datatype is delivered when calling the
+    :meth:`.Update.returning` method.
+
+    .. versionadded:: 2.0
+
+    """
+
+
+class Delete(DMLWhereBase, UpdateBase):
+    """Represent a DELETE construct.
+
+    The :class:`_expression.Delete` object is created using the
+    :func:`_expression.delete()` function.
+
+    """
+
+    __visit_name__ = "delete"
+
+    is_delete = True
+
+    _traverse_internals = (
+        [
+            ("table", InternalTraversal.dp_clauseelement),
+            ("_where_criteria", InternalTraversal.dp_clauseelement_tuple),
+            ("_returning", InternalTraversal.dp_clauseelement_tuple),
+            ("_hints", InternalTraversal.dp_table_hint_list),
+        ]
+        + HasPrefixes._has_prefixes_traverse_internals
+        + DialectKWArgs._dialect_kwargs_traverse_internals
+        + Executable._executable_traverse_internals
+        + HasCTE._has_ctes_traverse_internals
+    )
+
+    def __init__(self, table: _DMLTableArgument):
+        self.table = coercions.expect(
+            roles.DMLTableRole, table, apply_propagate_attrs=self
+        )
+
+    if TYPE_CHECKING:
+        # START OVERLOADED FUNCTIONS self.returning ReturningDelete 1-8
+
+        # code within this block is **programmatically,
+        # statically generated** by tools/generate_tuple_map_overloads.py
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0]
+        ) -> ReturningDelete[Tuple[_T0]]: ...
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1]
+        ) -> ReturningDelete[Tuple[_T0, _T1]]: ...
+
+        @overload
+        def returning(
+            self, __ent0: _TCCA[_T0], __ent1: _TCCA[_T1], __ent2: _TCCA[_T2]
+        ) -> ReturningDelete[Tuple[_T0, _T1, _T2]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+        ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+        ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+        ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            __ent6: _TCCA[_T6],
+        ) -> ReturningDelete[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+        @overload
+        def returning(
+            self,
+            __ent0: _TCCA[_T0],
+            __ent1: _TCCA[_T1],
+            __ent2: _TCCA[_T2],
+            __ent3: _TCCA[_T3],
+            __ent4: _TCCA[_T4],
+            __ent5: _TCCA[_T5],
+            __ent6: _TCCA[_T6],
+            __ent7: _TCCA[_T7],
+        ) -> ReturningDelete[
+            Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]
+        ]: ...
+
+        # END OVERLOADED FUNCTIONS self.returning
+
+        @overload
+        def returning(
+            self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
+        ) -> ReturningDelete[Any]: ...
+
+        def returning(
+            self, *cols: _ColumnsClauseArgument[Any], **__kw: Any
+        ) -> ReturningDelete[Any]: ...
+
+
+class ReturningDelete(Update, TypedReturnsRows[_TP]):
+    """Typing-only class that establishes a generic type form of
+    :class:`.Delete` which tracks returned column types.
+
+    This datatype is delivered when calling the
+    :meth:`.Delete.returning` method.
+
+    .. versionadded:: 2.0
+
+    """
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py
new file mode 100644
index 00000000..cd1dc34e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/elements.py
@@ -0,0 +1,5537 @@
+# sql/elements.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Core SQL expression elements, including :class:`_expression.ClauseElement`,
+:class:`_expression.ColumnElement`, and derived classes.
+
+"""
+
+from __future__ import annotations
+
+from decimal import Decimal
+from enum import Enum
+import itertools
+import operator
+import re
+import typing
+from typing import AbstractSet
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import FrozenSet
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple as typing_Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import coercions
+from . import operators
+from . import roles
+from . import traversals
+from . import type_api
+from ._typing import has_schema_attr
+from ._typing import is_named_from_clause
+from ._typing import is_quoted_name
+from ._typing import is_tuple_type
+from .annotation import Annotated
+from .annotation import SupportsWrappingAnnotations
+from .base import _clone
+from .base import _expand_cloned
+from .base import _generative
+from .base import _NoArg
+from .base import Executable
+from .base import Generative
+from .base import HasMemoized
+from .base import Immutable
+from .base import NO_ARG
+from .base import SingletonConstant
+from .cache_key import MemoizedHasCacheKey
+from .cache_key import NO_CACHE
+from .coercions import _document_text_coercion  # noqa
+from .operators import ColumnOperators
+from .traversals import HasCopyInternals
+from .visitors import cloned_traverse
+from .visitors import ExternallyTraversible
+from .visitors import InternalTraversal
+from .visitors import traverse
+from .visitors import Visitable
+from .. import exc
+from .. import inspection
+from .. import util
+from ..util import HasMemoized_ro_memoized_attribute
+from ..util import TypingOnly
+from ..util.typing import Literal
+from ..util.typing import ParamSpec
+from ..util.typing import Self
+
+if typing.TYPE_CHECKING:
+    from ._typing import _ByArgument
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _ColumnExpressionOrStrLabelArgument
+    from ._typing import _HasDialect
+    from ._typing import _InfoType
+    from ._typing import _PropagateAttrsType
+    from ._typing import _TypeEngineArgument
+    from .base import ColumnSet
+    from .cache_key import _CacheKeyTraversalType
+    from .cache_key import CacheKey
+    from .compiler import Compiled
+    from .compiler import SQLCompiler
+    from .functions import FunctionElement
+    from .operators import OperatorType
+    from .schema import Column
+    from .schema import DefaultGenerator
+    from .schema import FetchedValue
+    from .schema import ForeignKey
+    from .selectable import _SelectIterable
+    from .selectable import FromClause
+    from .selectable import NamedFromClause
+    from .selectable import TextualSelect
+    from .sqltypes import TupleType
+    from .type_api import TypeEngine
+    from .visitors import _CloneCallableType
+    from .visitors import _TraverseInternalsType
+    from .visitors import anon_map
+    from ..engine import Connection
+    from ..engine import Dialect
+    from ..engine.interfaces import _CoreMultiExecuteParams
+    from ..engine.interfaces import CacheStats
+    from ..engine.interfaces import CompiledCacheType
+    from ..engine.interfaces import CoreExecuteOptionsParameter
+    from ..engine.interfaces import SchemaTranslateMapType
+    from ..engine.result import Result
+
+_NUMERIC = Union[float, Decimal]
+_NUMBER = Union[float, int, Decimal]
+
+_T = TypeVar("_T", bound="Any")
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+_OPT = TypeVar("_OPT", bound="Any")
+_NT = TypeVar("_NT", bound="_NUMERIC")
+
+_NMT = TypeVar("_NMT", bound="_NUMBER")
+
+
+@overload
+def literal(
+    value: Any,
+    type_: _TypeEngineArgument[_T],
+    literal_execute: bool = False,
+) -> BindParameter[_T]: ...
+
+
+@overload
+def literal(
+    value: _T,
+    type_: None = None,
+    literal_execute: bool = False,
+) -> BindParameter[_T]: ...
+
+
+@overload
+def literal(
+    value: Any,
+    type_: Optional[_TypeEngineArgument[Any]] = None,
+    literal_execute: bool = False,
+) -> BindParameter[Any]: ...
+
+
+def literal(
+    value: Any,
+    type_: Optional[_TypeEngineArgument[Any]] = None,
+    literal_execute: bool = False,
+) -> BindParameter[Any]:
+    r"""Return a literal clause, bound to a bind parameter.
+
+    Literal clauses are created automatically when non-
+    :class:`_expression.ClauseElement` objects (such as strings, ints, dates,
+    etc.) are
+    used in a comparison operation with a :class:`_expression.ColumnElement`
+    subclass,
+    such as a :class:`~sqlalchemy.schema.Column` object.  Use this function
+    to force the generation of a literal clause, which will be created as a
+    :class:`BindParameter` with a bound value.
+
+    :param value: the value to be bound. Can be any Python object supported by
+     the underlying DB-API, or is translatable via the given type argument.
+
+    :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which will
+     provide bind-parameter translation for this literal.
+
+    :param literal_execute: optional bool, when True, the SQL engine will
+     attempt to render the bound value directly in the SQL statement at
+     execution time rather than providing as a parameter value.
+
+     .. versionadded:: 2.0
+
+    """
+    return coercions.expect(
+        roles.LiteralValueRole,
+        value,
+        type_=type_,
+        literal_execute=literal_execute,
+    )
+
+
+def literal_column(
+    text: str, type_: Optional[_TypeEngineArgument[_T]] = None
+) -> ColumnClause[_T]:
+    r"""Produce a :class:`.ColumnClause` object that has the
+    :paramref:`_expression.column.is_literal` flag set to True.
+
+    :func:`_expression.literal_column` is similar to
+    :func:`_expression.column`, except that
+    it is more often used as a "standalone" column expression that renders
+    exactly as stated; while :func:`_expression.column`
+    stores a string name that
+    will be assumed to be part of a table and may be quoted as such,
+    :func:`_expression.literal_column` can be that,
+    or any other arbitrary column-oriented
+    expression.
+
+    :param text: the text of the expression; can be any SQL expression.
+      Quoting rules will not be applied. To specify a column-name expression
+      which should be subject to quoting rules, use the :func:`column`
+      function.
+
+    :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
+      object which will
+      provide result-set translation and additional expression semantics for
+      this column. If left as ``None`` the type will be :class:`.NullType`.
+
+    .. seealso::
+
+        :func:`_expression.column`
+
+        :func:`_expression.text`
+
+        :ref:`tutorial_select_arbitrary_text`
+
+    """
+    return ColumnClause(text, type_=type_, is_literal=True)
+
+
+class CompilerElement(Visitable):
+    """base class for SQL elements that can be compiled to produce a
+    SQL string.
+
+    .. versionadded:: 2.0
+
+    """
+
+    __slots__ = ()
+    __visit_name__ = "compiler_element"
+
+    supports_execution = False
+
+    stringify_dialect = "default"
+
+    @util.preload_module("sqlalchemy.engine.default")
+    @util.preload_module("sqlalchemy.engine.url")
+    def compile(
+        self,
+        bind: Optional[_HasDialect] = None,
+        dialect: Optional[Dialect] = None,
+        **kw: Any,
+    ) -> Compiled:
+        """Compile this SQL expression.
+
+        The return value is a :class:`~.Compiled` object.
+        Calling ``str()`` or ``unicode()`` on the returned value will yield a
+        string representation of the result. The
+        :class:`~.Compiled` object also can return a
+        dictionary of bind parameter names and values
+        using the ``params`` accessor.
+
+        :param bind: An :class:`.Connection` or :class:`.Engine` which
+           can provide a :class:`.Dialect` in order to generate a
+           :class:`.Compiled` object.  If the ``bind`` and
+           ``dialect`` parameters are both omitted, a default SQL compiler
+           is used.
+
+        :param column_keys: Used for INSERT and UPDATE statements, a list of
+            column names which should be present in the VALUES clause of the
+            compiled statement. If ``None``, all columns from the target table
+            object are rendered.
+
+        :param dialect: A :class:`.Dialect` instance which can generate
+            a :class:`.Compiled` object.  This argument takes precedence over
+            the ``bind`` argument.
+
+        :param compile_kwargs: optional dictionary of additional parameters
+            that will be passed through to the compiler within all "visit"
+            methods.  This allows any custom flag to be passed through to
+            a custom compilation construct, for example.  It is also used
+            for the case of passing the ``literal_binds`` flag through::
+
+                from sqlalchemy.sql import table, column, select
+
+                t = table("t", column("x"))
+
+                s = select(t).where(t.c.x == 5)
+
+                print(s.compile(compile_kwargs={"literal_binds": True}))
+
+        .. seealso::
+
+            :ref:`faq_sql_expression_string`
+
+        """
+
+        if dialect is None:
+            if bind:
+                dialect = bind.dialect
+            elif self.stringify_dialect == "default":
+                dialect = self._default_dialect()
+            else:
+                url = util.preloaded.engine_url
+                dialect = url.URL.create(
+                    self.stringify_dialect
+                ).get_dialect()()
+
+        return self._compiler(dialect, **kw)
+
+    def _default_dialect(self):
+        default = util.preloaded.engine_default
+        return default.StrCompileDialect()
+
+    def _compiler(self, dialect: Dialect, **kw: Any) -> Compiled:
+        """Return a compiler appropriate for this ClauseElement, given a
+        Dialect."""
+
+        if TYPE_CHECKING:
+            assert isinstance(self, ClauseElement)
+        return dialect.statement_compiler(dialect, self, **kw)
+
+    def __str__(self) -> str:
+        return str(self.compile())
+
+
+@inspection._self_inspects
+class ClauseElement(
+    SupportsWrappingAnnotations,
+    MemoizedHasCacheKey,
+    HasCopyInternals,
+    ExternallyTraversible,
+    CompilerElement,
+):
+    """Base class for elements of a programmatically constructed SQL
+    expression.
+
+    """
+
+    __visit_name__ = "clause"
+
+    if TYPE_CHECKING:
+
+        @util.memoized_property
+        def _propagate_attrs(self) -> _PropagateAttrsType:
+            """like annotations, however these propagate outwards liberally
+            as SQL constructs are built, and are set up at construction time.
+
+            """
+            ...
+
+    else:
+        _propagate_attrs = util.EMPTY_DICT
+
+    @util.ro_memoized_property
+    def description(self) -> Optional[str]:
+        return None
+
+    _is_clone_of: Optional[Self] = None
+
+    is_clause_element = True
+    is_selectable = False
+    is_dml = False
+    _is_column_element = False
+    _is_keyed_column_element = False
+    _is_table = False
+    _gen_static_annotations_cache_key = False
+    _is_textual = False
+    _is_from_clause = False
+    _is_returns_rows = False
+    _is_text_clause = False
+    _is_from_container = False
+    _is_select_container = False
+    _is_select_base = False
+    _is_select_statement = False
+    _is_bind_parameter = False
+    _is_clause_list = False
+    _is_lambda_element = False
+    _is_singleton_constant = False
+    _is_immutable = False
+    _is_star = False
+
+    @property
+    def _order_by_label_element(self) -> Optional[Label[Any]]:
+        return None
+
+    _cache_key_traversal: _CacheKeyTraversalType = None
+
+    negation_clause: ColumnElement[bool]
+
+    if typing.TYPE_CHECKING:
+
+        def get_children(
+            self, *, omit_attrs: typing_Tuple[str, ...] = ..., **kw: Any
+        ) -> Iterable[ClauseElement]: ...
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return []
+
+    def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self:
+        # usually, self._propagate_attrs is empty here.  one case where it's
+        # not is a subquery against ORM select, that is then pulled as a
+        # property of an aliased class.   should all be good
+
+        # assert not self._propagate_attrs
+
+        self._propagate_attrs = util.immutabledict(values)
+        return self
+
+    def _default_compiler(self) -> SQLCompiler:
+        dialect = self._default_dialect()
+        return dialect.statement_compiler(dialect, self)  # type: ignore
+
+    def _clone(self, **kw: Any) -> Self:
+        """Create a shallow copy of this ClauseElement.
+
+        This method may be used by a generative API.  Its also used as
+        part of the "deep" copy afforded by a traversal that combines
+        the _copy_internals() method.
+
+        """
+
+        skip = self._memoized_keys
+        c = self.__class__.__new__(self.__class__)
+
+        if skip:
+            # ensure this iteration remains atomic
+            c.__dict__ = {
+                k: v for k, v in self.__dict__.copy().items() if k not in skip
+            }
+        else:
+            c.__dict__ = self.__dict__.copy()
+
+        # this is a marker that helps to "equate" clauses to each other
+        # when a Select returns its list of FROM clauses.  the cloning
+        # process leaves around a lot of remnants of the previous clause
+        # typically in the form of column expressions still attached to the
+        # old table.
+        cc = self._is_clone_of
+        c._is_clone_of = cc if cc is not None else self
+        return c
+
+    def _negate_in_binary(self, negated_op, original_op):
+        """a hook to allow the right side of a binary expression to respond
+        to a negation of the binary expression.
+
+        Used for the special case of expanding bind parameter with IN.
+
+        """
+        return self
+
+    def _with_binary_element_type(self, type_):
+        """in the context of binary expression, convert the type of this
+        object to the one given.
+
+        applies only to :class:`_expression.ColumnElement` classes.
+
+        """
+        return self
+
+    @property
+    def _constructor(self):
+        """return the 'constructor' for this ClauseElement.
+
+        This is for the purposes for creating a new object of
+        this type.   Usually, its just the element's __class__.
+        However, the "Annotated" version of the object overrides
+        to return the class of its proxied element.
+
+        """
+        return self.__class__
+
+    @HasMemoized.memoized_attribute
+    def _cloned_set(self):
+        """Return the set consisting all cloned ancestors of this
+        ClauseElement.
+
+        Includes this ClauseElement.  This accessor tends to be used for
+        FromClause objects to identify 'equivalent' FROM clauses, regardless
+        of transformative operations.
+
+        """
+        s = util.column_set()
+        f: Optional[ClauseElement] = self
+
+        # note this creates a cycle, asserted in test_memusage. however,
+        # turning this into a plain @property adds tends of thousands of method
+        # calls to Core / ORM performance tests, so the small overhead
+        # introduced by the relatively small amount of short term cycles
+        # produced here is preferable
+        while f is not None:
+            s.add(f)
+            f = f._is_clone_of
+        return s
+
+    def _de_clone(self):
+        while self._is_clone_of is not None:
+            self = self._is_clone_of
+        return self
+
+    @property
+    def entity_namespace(self):
+        raise AttributeError(
+            "This SQL expression has no entity namespace "
+            "with which to filter from."
+        )
+
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        d.pop("_is_clone_of", None)
+        d.pop("_generate_cache_key", None)
+        return d
+
+    def _execute_on_connection(
+        self,
+        connection: Connection,
+        distilled_params: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> Result[Any]:
+        if self.supports_execution:
+            if TYPE_CHECKING:
+                assert isinstance(self, Executable)
+            return connection._execute_clauseelement(
+                self, distilled_params, execution_options
+            )
+        else:
+            raise exc.ObjectNotExecutableError(self)
+
+    def _execute_on_scalar(
+        self,
+        connection: Connection,
+        distilled_params: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> Any:
+        """an additional hook for subclasses to provide a different
+        implementation for connection.scalar() vs. connection.execute().
+
+        .. versionadded:: 2.0
+
+        """
+        return self._execute_on_connection(
+            connection, distilled_params, execution_options
+        ).scalar()
+
+    def _get_embedded_bindparams(self) -> Sequence[BindParameter[Any]]:
+        """Return the list of :class:`.BindParameter` objects embedded in the
+        object.
+
+        This accomplishes the same purpose as ``visitors.traverse()`` or
+        similar would provide, however by making use of the cache key
+        it takes advantage of memoization of the key to result in fewer
+        net method calls, assuming the statement is also going to be
+        executed.
+
+        """
+
+        key = self._generate_cache_key()
+        if key is None:
+            bindparams: List[BindParameter[Any]] = []
+
+            traverse(self, {}, {"bindparam": bindparams.append})
+            return bindparams
+
+        else:
+            return key.bindparams
+
+    def unique_params(
+        self,
+        __optionaldict: Optional[Dict[str, Any]] = None,
+        **kwargs: Any,
+    ) -> Self:
+        """Return a copy with :func:`_expression.bindparam` elements
+        replaced.
+
+        Same functionality as :meth:`_expression.ClauseElement.params`,
+        except adds `unique=True`
+        to affected bind parameters so that multiple statements can be
+        used.
+
+        """
+        return self._replace_params(True, __optionaldict, kwargs)
+
+    def params(
+        self,
+        __optionaldict: Optional[Mapping[str, Any]] = None,
+        **kwargs: Any,
+    ) -> Self:
+        """Return a copy with :func:`_expression.bindparam` elements
+        replaced.
+
+        Returns a copy of this ClauseElement with
+        :func:`_expression.bindparam`
+        elements replaced with values taken from the given dictionary::
+
+          >>> clause = column("x") + bindparam("foo")
+          >>> print(clause.compile().params)
+          {'foo':None}
+          >>> print(clause.params({"foo": 7}).compile().params)
+          {'foo':7}
+
+        """
+        return self._replace_params(False, __optionaldict, kwargs)
+
+    def _replace_params(
+        self,
+        unique: bool,
+        optionaldict: Optional[Mapping[str, Any]],
+        kwargs: Dict[str, Any],
+    ) -> Self:
+        if optionaldict:
+            kwargs.update(optionaldict)
+
+        def visit_bindparam(bind: BindParameter[Any]) -> None:
+            if bind.key in kwargs:
+                bind.value = kwargs[bind.key]
+                bind.required = False
+            if unique:
+                bind._convert_to_unique()
+
+        return cloned_traverse(
+            self,
+            {"maintain_key": True, "detect_subquery_cols": True},
+            {"bindparam": visit_bindparam},
+        )
+
+    def compare(self, other: ClauseElement, **kw: Any) -> bool:
+        r"""Compare this :class:`_expression.ClauseElement` to
+        the given :class:`_expression.ClauseElement`.
+
+        Subclasses should override the default behavior, which is a
+        straight identity comparison.
+
+        \**kw are arguments consumed by subclass ``compare()`` methods and
+        may be used to modify the criteria for comparison
+        (see :class:`_expression.ColumnElement`).
+
+        """
+        return traversals.compare(self, other, **kw)
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> ClauseElement:
+        """Apply a 'grouping' to this :class:`_expression.ClauseElement`.
+
+        This method is overridden by subclasses to return a "grouping"
+        construct, i.e. parenthesis.   In particular it's used by "binary"
+        expressions to provide a grouping around themselves when placed into a
+        larger expression, as well as by :func:`_expression.select`
+        constructs when placed into the FROM clause of another
+        :func:`_expression.select`.  (Note that subqueries should be
+        normally created using the :meth:`_expression.Select.alias` method,
+        as many
+        platforms require nested SELECT statements to be named).
+
+        As expressions are composed together, the application of
+        :meth:`self_group` is automatic - end-user code should never
+        need to use this method directly.  Note that SQLAlchemy's
+        clause constructs take operator precedence into account -
+        so parenthesis might not be needed, for example, in
+        an expression like ``x OR (y AND z)`` - AND takes precedence
+        over OR.
+
+        The base :meth:`self_group` method of
+        :class:`_expression.ClauseElement`
+        just returns self.
+        """
+        return self
+
+    def _ungroup(self) -> ClauseElement:
+        """Return this :class:`_expression.ClauseElement`
+        without any groupings.
+        """
+
+        return self
+
+    def _compile_w_cache(
+        self,
+        dialect: Dialect,
+        *,
+        compiled_cache: Optional[CompiledCacheType],
+        column_keys: List[str],
+        for_executemany: bool = False,
+        schema_translate_map: Optional[SchemaTranslateMapType] = None,
+        **kw: Any,
+    ) -> typing_Tuple[
+        Compiled, Optional[Sequence[BindParameter[Any]]], CacheStats
+    ]:
+        elem_cache_key: Optional[CacheKey]
+
+        if compiled_cache is not None and dialect._supports_statement_cache:
+            elem_cache_key = self._generate_cache_key()
+        else:
+            elem_cache_key = None
+
+        if elem_cache_key is not None:
+            if TYPE_CHECKING:
+                assert compiled_cache is not None
+
+            cache_key, extracted_params = elem_cache_key
+            key = (
+                dialect,
+                cache_key,
+                tuple(column_keys),
+                bool(schema_translate_map),
+                for_executemany,
+            )
+            compiled_sql = compiled_cache.get(key)
+
+            if compiled_sql is None:
+                cache_hit = dialect.CACHE_MISS
+                compiled_sql = self._compiler(
+                    dialect,
+                    cache_key=elem_cache_key,
+                    column_keys=column_keys,
+                    for_executemany=for_executemany,
+                    schema_translate_map=schema_translate_map,
+                    **kw,
+                )
+                compiled_cache[key] = compiled_sql
+            else:
+                cache_hit = dialect.CACHE_HIT
+        else:
+            extracted_params = None
+            compiled_sql = self._compiler(
+                dialect,
+                cache_key=elem_cache_key,
+                column_keys=column_keys,
+                for_executemany=for_executemany,
+                schema_translate_map=schema_translate_map,
+                **kw,
+            )
+
+            if not dialect._supports_statement_cache:
+                cache_hit = dialect.NO_DIALECT_SUPPORT
+            elif compiled_cache is None:
+                cache_hit = dialect.CACHING_DISABLED
+            else:
+                cache_hit = dialect.NO_CACHE_KEY
+
+        return compiled_sql, extracted_params, cache_hit
+
+    def __invert__(self):
+        # undocumented element currently used by the ORM for
+        # relationship.contains()
+        if hasattr(self, "negation_clause"):
+            return self.negation_clause
+        else:
+            return self._negate()
+
+    def _negate(self) -> ClauseElement:
+        grouped = self.self_group(against=operators.inv)
+        assert isinstance(grouped, ColumnElement)
+        return UnaryExpression(grouped, operator=operators.inv)
+
+    def __bool__(self):
+        raise TypeError("Boolean value of this clause is not defined")
+
+    def __repr__(self):
+        friendly = self.description
+        if friendly is None:
+            return object.__repr__(self)
+        else:
+            return "<%s.%s at 0x%x; %s>" % (
+                self.__module__,
+                self.__class__.__name__,
+                id(self),
+                friendly,
+            )
+
+
+class DQLDMLClauseElement(ClauseElement):
+    """represents a :class:`.ClauseElement` that compiles to a DQL or DML
+    expression, not DDL.
+
+    .. versionadded:: 2.0
+
+    """
+
+    if typing.TYPE_CHECKING:
+
+        def _compiler(self, dialect: Dialect, **kw: Any) -> SQLCompiler:
+            """Return a compiler appropriate for this ClauseElement, given a
+            Dialect."""
+            ...
+
+        def compile(  # noqa: A001
+            self,
+            bind: Optional[_HasDialect] = None,
+            dialect: Optional[Dialect] = None,
+            **kw: Any,
+        ) -> SQLCompiler: ...
+
+
+class CompilerColumnElement(
+    roles.DMLColumnRole,
+    roles.DDLConstraintColumnRole,
+    roles.ColumnsClauseRole,
+    CompilerElement,
+):
+    """A compiler-only column element used for ad-hoc string compilations.
+
+    .. versionadded:: 2.0
+
+    """
+
+    __slots__ = ()
+
+    _propagate_attrs = util.EMPTY_DICT
+    _is_collection_aggregate = False
+
+
+# SQLCoreOperations should be suiting the ExpressionElementRole
+# and ColumnsClauseRole.   however the MRO issues become too elaborate
+# at the moment.
+class SQLCoreOperations(Generic[_T_co], ColumnOperators, TypingOnly):
+    __slots__ = ()
+
+    # annotations for comparison methods
+    # these are from operators->Operators / ColumnOperators,
+    # redefined with the specific types returned by ColumnElement hierarchies
+    if typing.TYPE_CHECKING:
+
+        @util.non_memoized_property
+        def _propagate_attrs(self) -> _PropagateAttrsType: ...
+
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]: ...
+
+        def reverse_operate(
+            self, op: OperatorType, other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]: ...
+
+        @overload
+        def op(
+            self,
+            opstring: str,
+            precedence: int = ...,
+            is_comparison: bool = ...,
+            *,
+            return_type: _TypeEngineArgument[_OPT],
+            python_impl: Optional[Callable[..., Any]] = None,
+        ) -> Callable[[Any], BinaryExpression[_OPT]]: ...
+
+        @overload
+        def op(
+            self,
+            opstring: str,
+            precedence: int = ...,
+            is_comparison: bool = ...,
+            return_type: Optional[_TypeEngineArgument[Any]] = ...,
+            python_impl: Optional[Callable[..., Any]] = ...,
+        ) -> Callable[[Any], BinaryExpression[Any]]: ...
+
+        def op(
+            self,
+            opstring: str,
+            precedence: int = 0,
+            is_comparison: bool = False,
+            return_type: Optional[_TypeEngineArgument[Any]] = None,
+            python_impl: Optional[Callable[..., Any]] = None,
+        ) -> Callable[[Any], BinaryExpression[Any]]: ...
+
+        def bool_op(
+            self,
+            opstring: str,
+            precedence: int = 0,
+            python_impl: Optional[Callable[..., Any]] = None,
+        ) -> Callable[[Any], BinaryExpression[bool]]: ...
+
+        def __and__(self, other: Any) -> BooleanClauseList: ...
+
+        def __or__(self, other: Any) -> BooleanClauseList: ...
+
+        def __invert__(self) -> ColumnElement[_T_co]: ...
+
+        def __lt__(self, other: Any) -> ColumnElement[bool]: ...
+
+        def __le__(self, other: Any) -> ColumnElement[bool]: ...
+
+        # declare also that this class has an hash method otherwise
+        # it may be assumed to be None by type checkers since the
+        # object defines __eq__ and python sets it to None in that case:
+        # https://docs.python.org/3/reference/datamodel.html#object.__hash__
+        def __hash__(self) -> int: ...
+
+        def __eq__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+            ...
+
+        def __ne__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+            ...
+
+        def is_distinct_from(self, other: Any) -> ColumnElement[bool]: ...
+
+        def is_not_distinct_from(self, other: Any) -> ColumnElement[bool]: ...
+
+        def __gt__(self, other: Any) -> ColumnElement[bool]: ...
+
+        def __ge__(self, other: Any) -> ColumnElement[bool]: ...
+
+        def __neg__(self) -> UnaryExpression[_T_co]: ...
+
+        def __contains__(self, other: Any) -> ColumnElement[bool]: ...
+
+        def __getitem__(self, index: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __lshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ...
+
+        @overload
+        def __lshift__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __lshift__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __rshift__(self: _SQO[int], other: Any) -> ColumnElement[int]: ...
+
+        @overload
+        def __rshift__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __rshift__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def concat(self: _SQO[str], other: Any) -> ColumnElement[str]: ...
+
+        @overload
+        def concat(self, other: Any) -> ColumnElement[Any]: ...
+
+        def concat(self, other: Any) -> ColumnElement[Any]: ...
+
+        def like(
+            self, other: Any, escape: Optional[str] = None
+        ) -> BinaryExpression[bool]: ...
+
+        def ilike(
+            self, other: Any, escape: Optional[str] = None
+        ) -> BinaryExpression[bool]: ...
+
+        def bitwise_xor(self, other: Any) -> BinaryExpression[Any]: ...
+
+        def bitwise_or(self, other: Any) -> BinaryExpression[Any]: ...
+
+        def bitwise_and(self, other: Any) -> BinaryExpression[Any]: ...
+
+        def bitwise_not(self) -> UnaryExpression[_T_co]: ...
+
+        def bitwise_lshift(self, other: Any) -> BinaryExpression[Any]: ...
+
+        def bitwise_rshift(self, other: Any) -> BinaryExpression[Any]: ...
+
+        def in_(
+            self,
+            other: Union[
+                Iterable[Any], BindParameter[Any], roles.InElementRole
+            ],
+        ) -> BinaryExpression[bool]: ...
+
+        def not_in(
+            self,
+            other: Union[
+                Iterable[Any], BindParameter[Any], roles.InElementRole
+            ],
+        ) -> BinaryExpression[bool]: ...
+
+        def notin_(
+            self,
+            other: Union[
+                Iterable[Any], BindParameter[Any], roles.InElementRole
+            ],
+        ) -> BinaryExpression[bool]: ...
+
+        def not_like(
+            self, other: Any, escape: Optional[str] = None
+        ) -> BinaryExpression[bool]: ...
+
+        def notlike(
+            self, other: Any, escape: Optional[str] = None
+        ) -> BinaryExpression[bool]: ...
+
+        def not_ilike(
+            self, other: Any, escape: Optional[str] = None
+        ) -> BinaryExpression[bool]: ...
+
+        def notilike(
+            self, other: Any, escape: Optional[str] = None
+        ) -> BinaryExpression[bool]: ...
+
+        def is_(self, other: Any) -> BinaryExpression[bool]: ...
+
+        def is_not(self, other: Any) -> BinaryExpression[bool]: ...
+
+        def isnot(self, other: Any) -> BinaryExpression[bool]: ...
+
+        def startswith(
+            self,
+            other: Any,
+            escape: Optional[str] = None,
+            autoescape: bool = False,
+        ) -> ColumnElement[bool]: ...
+
+        def istartswith(
+            self,
+            other: Any,
+            escape: Optional[str] = None,
+            autoescape: bool = False,
+        ) -> ColumnElement[bool]: ...
+
+        def endswith(
+            self,
+            other: Any,
+            escape: Optional[str] = None,
+            autoescape: bool = False,
+        ) -> ColumnElement[bool]: ...
+
+        def iendswith(
+            self,
+            other: Any,
+            escape: Optional[str] = None,
+            autoescape: bool = False,
+        ) -> ColumnElement[bool]: ...
+
+        def contains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ...
+
+        def icontains(self, other: Any, **kw: Any) -> ColumnElement[bool]: ...
+
+        def match(self, other: Any, **kwargs: Any) -> ColumnElement[bool]: ...
+
+        def regexp_match(
+            self, pattern: Any, flags: Optional[str] = None
+        ) -> ColumnElement[bool]: ...
+
+        def regexp_replace(
+            self, pattern: Any, replacement: Any, flags: Optional[str] = None
+        ) -> ColumnElement[str]: ...
+
+        def desc(self) -> UnaryExpression[_T_co]: ...
+
+        def asc(self) -> UnaryExpression[_T_co]: ...
+
+        def nulls_first(self) -> UnaryExpression[_T_co]: ...
+
+        def nullsfirst(self) -> UnaryExpression[_T_co]: ...
+
+        def nulls_last(self) -> UnaryExpression[_T_co]: ...
+
+        def nullslast(self) -> UnaryExpression[_T_co]: ...
+
+        def collate(self, collation: str) -> CollationClause: ...
+
+        def between(
+            self, cleft: Any, cright: Any, symmetric: bool = False
+        ) -> BinaryExpression[bool]: ...
+
+        def distinct(self: _SQO[_T_co]) -> UnaryExpression[_T_co]: ...
+
+        def any_(self) -> CollectionAggregate[Any]: ...
+
+        def all_(self) -> CollectionAggregate[Any]: ...
+
+        # numeric overloads.  These need more tweaking
+        # in particular they all need to have a variant for Optiona[_T]
+        # because Optional only applies to the data side, not the expression
+        # side
+
+        @overload
+        def __add__(
+            self: _SQO[_NMT],
+            other: Any,
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __add__(
+            self: _SQO[str],
+            other: Any,
+        ) -> ColumnElement[str]: ...
+
+        @overload
+        def __add__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __add__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __radd__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __radd__(self: _SQO[str], other: Any) -> ColumnElement[str]: ...
+
+        def __radd__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __sub__(
+            self: _SQO[_NMT],
+            other: Any,
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __sub__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __sub__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __rsub__(
+            self: _SQO[_NMT],
+            other: Any,
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __rsub__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __rsub__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __mul__(
+            self: _SQO[_NMT],
+            other: Any,
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __mul__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __mul__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __rmul__(
+            self: _SQO[_NMT],
+            other: Any,
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __rmul__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __rmul__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __mod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __mod__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __mod__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __rmod__(self: _SQO[_NMT], other: Any) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __rmod__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __rmod__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __truediv__(
+            self: _SQO[int], other: Any
+        ) -> ColumnElement[_NUMERIC]: ...
+
+        @overload
+        def __truediv__(self: _SQO[_NT], other: Any) -> ColumnElement[_NT]: ...
+
+        @overload
+        def __truediv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __truediv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __rtruediv__(
+            self: _SQO[_NMT], other: Any
+        ) -> ColumnElement[_NUMERIC]: ...
+
+        @overload
+        def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __rtruediv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __floordiv__(
+            self: _SQO[_NMT], other: Any
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __floordiv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __floordiv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        @overload
+        def __rfloordiv__(
+            self: _SQO[_NMT], other: Any
+        ) -> ColumnElement[_NMT]: ...
+
+        @overload
+        def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ...
+
+        def __rfloordiv__(self, other: Any) -> ColumnElement[Any]: ...
+
+
+class SQLColumnExpression(
+    SQLCoreOperations[_T_co], roles.ExpressionElementRole[_T_co], TypingOnly
+):
+    """A type that may be used to indicate any SQL column element or object
+    that acts in place of one.
+
+    :class:`.SQLColumnExpression` is a base of
+    :class:`.ColumnElement`, as well as within the bases of ORM elements
+    such as :class:`.InstrumentedAttribute`, and may be used in :pep:`484`
+    typing to indicate arguments or return values that should behave
+    as column expressions.
+
+    .. versionadded:: 2.0.0b4
+
+
+    """
+
+    __slots__ = ()
+
+
+_SQO = SQLCoreOperations
+
+
+class ColumnElement(
+    roles.ColumnArgumentOrKeyRole,
+    roles.StatementOptionRole,
+    roles.WhereHavingRole,
+    roles.BinaryElementRole[_T],
+    roles.OrderByRole,
+    roles.ColumnsClauseRole,
+    roles.LimitOffsetRole,
+    roles.DMLColumnRole,
+    roles.DDLConstraintColumnRole,
+    roles.DDLExpressionRole,
+    SQLColumnExpression[_T],
+    DQLDMLClauseElement,
+):
+    """Represent a column-oriented SQL expression suitable for usage in the
+    "columns" clause, WHERE clause etc. of a statement.
+
+    While the most familiar kind of :class:`_expression.ColumnElement` is the
+    :class:`_schema.Column` object, :class:`_expression.ColumnElement`
+    serves as the basis
+    for any unit that may be present in a SQL expression, including
+    the expressions themselves, SQL functions, bound parameters,
+    literal expressions, keywords such as ``NULL``, etc.
+    :class:`_expression.ColumnElement`
+    is the ultimate base class for all such elements.
+
+    A wide variety of SQLAlchemy Core functions work at the SQL expression
+    level, and are intended to accept instances of
+    :class:`_expression.ColumnElement` as
+    arguments.  These functions will typically document that they accept a
+    "SQL expression" as an argument.  What this means in terms of SQLAlchemy
+    usually refers to an input which is either already in the form of a
+    :class:`_expression.ColumnElement` object,
+    or a value which can be **coerced** into
+    one.  The coercion rules followed by most, but not all, SQLAlchemy Core
+    functions with regards to SQL expressions are as follows:
+
+        * a literal Python value, such as a string, integer or floating
+          point value, boolean, datetime, ``Decimal`` object, or virtually
+          any other Python object, will be coerced into a "literal bound
+          value".  This generally means that a :func:`.bindparam` will be
+          produced featuring the given value embedded into the construct; the
+          resulting :class:`.BindParameter` object is an instance of
+          :class:`_expression.ColumnElement`.
+          The Python value will ultimately be sent
+          to the DBAPI at execution time as a parameterized argument to the
+          ``execute()`` or ``executemany()`` methods, after SQLAlchemy
+          type-specific converters (e.g. those provided by any associated
+          :class:`.TypeEngine` objects) are applied to the value.
+
+        * any special object value, typically ORM-level constructs, which
+          feature an accessor called ``__clause_element__()``.  The Core
+          expression system looks for this method when an object of otherwise
+          unknown type is passed to a function that is looking to coerce the
+          argument into a :class:`_expression.ColumnElement` and sometimes a
+          :class:`_expression.SelectBase` expression.
+          It is used within the ORM to
+          convert from ORM-specific objects like mapped classes and
+          mapped attributes into Core expression objects.
+
+        * The Python ``None`` value is typically interpreted as ``NULL``,
+          which in SQLAlchemy Core produces an instance of :func:`.null`.
+
+    A :class:`_expression.ColumnElement` provides the ability to generate new
+    :class:`_expression.ColumnElement`
+    objects using Python expressions.  This means that Python operators
+    such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
+    and allow the instantiation of further :class:`_expression.ColumnElement`
+    instances
+    which are composed from other, more fundamental
+    :class:`_expression.ColumnElement`
+    objects.  For example, two :class:`.ColumnClause` objects can be added
+    together with the addition operator ``+`` to produce
+    a :class:`.BinaryExpression`.
+    Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
+    of :class:`_expression.ColumnElement`:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy.sql import column
+        >>> column("a") + column("b")
+        <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+        >>> print(column("a") + column("b"))
+        {printsql}a + b
+
+    .. seealso::
+
+        :class:`_schema.Column`
+
+        :func:`_expression.column`
+
+    """
+
+    __visit_name__ = "column_element"
+
+    primary_key: bool = False
+    _is_clone_of: Optional[ColumnElement[_T]]
+    _is_column_element = True
+    _insert_sentinel: bool = False
+    _omit_from_statements = False
+    _is_collection_aggregate = False
+
+    foreign_keys: AbstractSet[ForeignKey] = frozenset()
+
+    @util.memoized_property
+    def _proxies(self) -> List[ColumnElement[Any]]:
+        return []
+
+    @util.non_memoized_property
+    def _tq_label(self) -> Optional[str]:
+        """The named label that can be used to target
+        this column in a result set in a "table qualified" context.
+
+        This label is almost always the label used when
+        rendering <expr> AS <label> in a SELECT statement when using
+        the LABEL_STYLE_TABLENAME_PLUS_COL label style, which is what the
+        legacy ORM ``Query`` object uses as well.
+
+        For a regular Column bound to a Table, this is typically the label
+        <tablename>_<columnname>.  For other constructs, different rules
+        may apply, such as anonymized labels and others.
+
+        .. versionchanged:: 1.4.21 renamed from ``._label``
+
+        """
+        return None
+
+    key: Optional[str] = None
+    """The 'key' that in some circumstances refers to this object in a
+    Python namespace.
+
+    This typically refers to the "key" of the column as present in the
+    ``.c`` collection of a selectable, e.g. ``sometable.c["somekey"]`` would
+    return a :class:`_schema.Column` with a ``.key`` of "somekey".
+
+    """
+
+    @HasMemoized.memoized_attribute
+    def _tq_key_label(self) -> Optional[str]:
+        """A label-based version of 'key' that in some circumstances refers
+        to this object in a Python namespace.
+
+
+        _tq_key_label comes into play when a select() statement is constructed
+        with apply_labels(); in this case, all Column objects in the ``.c``
+        collection are rendered as <tablename>_<columnname> in SQL; this is
+        essentially the value of ._label. But to locate those columns in the
+        ``.c`` collection, the name is along the lines of <tablename>_<key>;
+        that's the typical value of .key_label.
+
+        .. versionchanged:: 1.4.21 renamed from ``._key_label``
+
+        """
+        return self._proxy_key
+
+    @property
+    def _key_label(self) -> Optional[str]:
+        """legacy; renamed to _tq_key_label"""
+        return self._tq_key_label
+
+    @property
+    def _label(self) -> Optional[str]:
+        """legacy; renamed to _tq_label"""
+        return self._tq_label
+
+    @property
+    def _non_anon_label(self) -> Optional[str]:
+        """the 'name' that naturally applies this element when rendered in
+        SQL.
+
+        Concretely, this is the "name" of a column or a label in a
+        SELECT statement; ``<columnname>`` and ``<labelname>`` below:
+
+        .. sourcecode:: sql
+
+            SELECT <columnmame> FROM table
+
+            SELECT column AS <labelname> FROM table
+
+        Above, the two names noted will be what's present in the DBAPI
+        ``cursor.description`` as the names.
+
+        If this attribute returns ``None``, it means that the SQL element as
+        written does not have a 100% fully predictable "name" that would appear
+        in the ``cursor.description``. Examples include SQL functions, CAST
+        functions, etc. While such things do return names in
+        ``cursor.description``, they are only predictable on a
+        database-specific basis; e.g. an expression like ``MAX(table.col)`` may
+        appear as the string ``max`` on one database (like PostgreSQL) or may
+        appear as the whole expression ``max(table.col)`` on SQLite.
+
+        The default implementation looks for a ``.name`` attribute on the
+        object, as has been the precedent established in SQLAlchemy for many
+        years.  An exception is made on the ``FunctionElement`` subclass
+        so that the return value is always ``None``.
+
+        .. versionadded:: 1.4.21
+
+
+
+        """
+        return getattr(self, "name", None)
+
+    _render_label_in_columns_clause = True
+    """A flag used by select._columns_plus_names that helps to determine
+    we are actually going to render in terms of "SELECT <col> AS <label>".
+    This flag can be returned as False for some Column objects that want
+    to be rendered as simple "SELECT <col>"; typically columns that don't have
+    any parent table and are named the same as what the label would be
+    in any case.
+
+    """
+
+    _allow_label_resolve = True
+    """A flag that can be flipped to prevent a column from being resolvable
+    by string label name.
+
+    The joined eager loader strategy in the ORM uses this, for example.
+
+    """
+
+    _is_implicitly_boolean = False
+
+    _alt_names: Sequence[str] = ()
+
+    @overload
+    def self_group(self, against: None = None) -> ColumnElement[_T]: ...
+
+    @overload
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> ColumnElement[Any]: ...
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> ColumnElement[Any]:
+        if (
+            against in (operators.and_, operators.or_, operators._asbool)
+            and self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity
+        ):
+            return AsBoolean(self, operators.is_true, operators.is_false)
+        elif against in (operators.any_op, operators.all_op):
+            return Grouping(self)
+        else:
+            return self
+
+    @overload
+    def _negate(self: ColumnElement[bool]) -> ColumnElement[bool]: ...
+
+    @overload
+    def _negate(self: ColumnElement[_T]) -> ColumnElement[_T]: ...
+
+    def _negate(self) -> ColumnElement[Any]:
+        if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+            return AsBoolean(self, operators.is_false, operators.is_true)
+        else:
+            grouped = self.self_group(against=operators.inv)
+            assert isinstance(grouped, ColumnElement)
+            return UnaryExpression(
+                grouped, operator=operators.inv, wraps_column_expression=True
+            )
+
+    type: TypeEngine[_T]
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            # used for delayed setup of
+            # type_api
+            return type_api.NULLTYPE
+
+    @HasMemoized.memoized_attribute
+    def comparator(self) -> TypeEngine.Comparator[_T]:
+        try:
+            comparator_factory = self.type.comparator_factory
+        except AttributeError as err:
+            raise TypeError(
+                "Object %r associated with '.type' attribute "
+                "is not a TypeEngine class or object" % self.type
+            ) from err
+        else:
+            return comparator_factory(self)
+
+    def __setstate__(self, state):
+        self.__dict__.update(state)
+
+    def __getattr__(self, key: str) -> Any:
+        try:
+            return getattr(self.comparator, key)
+        except AttributeError as err:
+            raise AttributeError(
+                "Neither %r object nor %r object has an attribute %r"
+                % (
+                    type(self).__name__,
+                    type(self.comparator).__name__,
+                    key,
+                )
+            ) from err
+
+    def operate(
+        self,
+        op: operators.OperatorType,
+        *other: Any,
+        **kwargs: Any,
+    ) -> ColumnElement[Any]:
+        return op(self.comparator, *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def reverse_operate(
+        self, op: operators.OperatorType, other: Any, **kwargs: Any
+    ) -> ColumnElement[Any]:
+        return op(other, self.comparator, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
+
+    def _bind_param(
+        self,
+        operator: operators.OperatorType,
+        obj: Any,
+        type_: Optional[TypeEngine[_T]] = None,
+        expanding: bool = False,
+    ) -> BindParameter[_T]:
+        return BindParameter(
+            None,
+            obj,
+            _compared_to_operator=operator,
+            type_=type_,
+            _compared_to_type=self.type,
+            unique=True,
+            expanding=expanding,
+        )
+
+    @property
+    def expression(self) -> ColumnElement[Any]:
+        """Return a column expression.
+
+        Part of the inspection interface; returns self.
+
+        """
+        return self
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        return (self,)
+
+    @util.memoized_property
+    def base_columns(self) -> FrozenSet[ColumnElement[Any]]:
+        return frozenset(c for c in self.proxy_set if not c._proxies)
+
+    @util.memoized_property
+    def proxy_set(self) -> FrozenSet[ColumnElement[Any]]:
+        """set of all columns we are proxying
+
+        as of 2.0 this is explicitly deannotated columns.  previously it was
+        effectively deannotated columns but wasn't enforced.  annotated
+        columns should basically not go into sets if at all possible because
+        their hashing behavior is very non-performant.
+
+        """
+        return frozenset([self._deannotate()]).union(
+            itertools.chain(*[c.proxy_set for c in self._proxies])
+        )
+
+    @util.memoized_property
+    def _expanded_proxy_set(self) -> FrozenSet[ColumnElement[Any]]:
+        return frozenset(_expand_cloned(self.proxy_set))
+
+    def _uncached_proxy_list(self) -> List[ColumnElement[Any]]:
+        """An 'uncached' version of proxy set.
+
+        This list includes annotated columns which perform very poorly in
+        set operations.
+
+        """
+
+        return [self] + list(
+            itertools.chain(*[c._uncached_proxy_list() for c in self._proxies])
+        )
+
+    def shares_lineage(self, othercolumn: ColumnElement[Any]) -> bool:
+        """Return True if the given :class:`_expression.ColumnElement`
+        has a common ancestor to this :class:`_expression.ColumnElement`."""
+
+        return bool(self.proxy_set.intersection(othercolumn.proxy_set))
+
+    def _compare_name_for_result(self, other: ColumnElement[Any]) -> bool:
+        """Return True if the given column element compares to this one
+        when targeting within a result row."""
+
+        return (
+            hasattr(other, "name")
+            and hasattr(self, "name")
+            and other.name == self.name
+        )
+
+    @HasMemoized.memoized_attribute
+    def _proxy_key(self) -> Optional[str]:
+        if self._annotations and "proxy_key" in self._annotations:
+            return cast(str, self._annotations["proxy_key"])
+
+        name = self.key
+        if not name:
+            # there's a bit of a seeming contradiction which is that the
+            # "_non_anon_label" of a column can in fact be an
+            # "_anonymous_label"; this is when it's on a column that is
+            # proxying for an anonymous expression in a subquery.
+            name = self._non_anon_label
+
+        if isinstance(name, _anonymous_label):
+            return None
+        else:
+            return name
+
+    @HasMemoized.memoized_attribute
+    def _expression_label(self) -> Optional[str]:
+        """a suggested label to use in the case that the column has no name,
+        which should be used if possible as the explicit 'AS <label>'
+        where this expression would normally have an anon label.
+
+        this is essentially mostly what _proxy_key does except it returns
+        None if the column has a normal name that can be used.
+
+        """
+
+        if getattr(self, "name", None) is not None:
+            return None
+        elif self._annotations and "proxy_key" in self._annotations:
+            return cast(str, self._annotations["proxy_key"])
+        else:
+            return None
+
+    def _make_proxy(
+        self,
+        selectable: FromClause,
+        *,
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        name: Optional[str] = None,
+        key: Optional[str] = None,
+        name_is_truncatable: bool = False,
+        compound_select_cols: Optional[Sequence[ColumnElement[Any]]] = None,
+        **kw: Any,
+    ) -> typing_Tuple[str, ColumnClause[_T]]:
+        """Create a new :class:`_expression.ColumnElement` representing this
+        :class:`_expression.ColumnElement` as it appears in the select list of
+        a descending selectable.
+
+        """
+        if name is None:
+            name = self._anon_name_label
+            if key is None:
+                key = self._proxy_key
+        else:
+            key = name
+
+        assert key is not None
+
+        co: ColumnClause[_T] = ColumnClause(
+            (
+                coercions.expect(roles.TruncatedLabelRole, name)
+                if name_is_truncatable
+                else name
+            ),
+            type_=getattr(self, "type", None),
+            _selectable=selectable,
+        )
+
+        co._propagate_attrs = selectable._propagate_attrs
+        if compound_select_cols:
+            co._proxies = list(compound_select_cols)
+        else:
+            co._proxies = [self]
+        if selectable._is_clone_of is not None:
+            co._is_clone_of = selectable._is_clone_of.columns.get(key)
+        return key, co
+
+    def cast(self, type_: _TypeEngineArgument[_OPT]) -> Cast[_OPT]:
+        """Produce a type cast, i.e. ``CAST(<expression> AS <type>)``.
+
+        This is a shortcut to the :func:`_expression.cast` function.
+
+        .. seealso::
+
+            :ref:`tutorial_casts`
+
+            :func:`_expression.cast`
+
+            :func:`_expression.type_coerce`
+
+        """
+        return Cast(self, type_)
+
+    def label(self, name: Optional[str]) -> Label[_T]:
+        """Produce a column label, i.e. ``<columnname> AS <name>``.
+
+        This is a shortcut to the :func:`_expression.label` function.
+
+        If 'name' is ``None``, an anonymous label name will be generated.
+
+        """
+        return Label(name, self, self.type)
+
+    def _anon_label(
+        self, seed: Optional[str], add_hash: Optional[int] = None
+    ) -> _anonymous_label:
+        while self._is_clone_of is not None:
+            self = self._is_clone_of
+
+        # as of 1.4 anonymous label for ColumnElement uses hash(), not id(),
+        # as the identifier, because a column and its annotated version are
+        # the same thing in a SQL statement
+        hash_value = hash(self)
+
+        if add_hash:
+            # this path is used for disambiguating anon labels that would
+            # otherwise be the same name for the same element repeated.
+            # an additional numeric value is factored in for each label.
+
+            # shift hash(self) (which is id(self), typically 8 byte integer)
+            # 16 bits leftward.  fill extra add_hash on right
+            assert add_hash < (2 << 15)
+            assert seed
+            hash_value = (hash_value << 16) | add_hash
+
+            # extra underscore is added for labels with extra hash
+            # values, to isolate the "deduped anon" namespace from the
+            # regular namespace.  eliminates chance of these
+            # manufactured hash values overlapping with regular ones for some
+            # undefined python interpreter
+            seed = seed + "_"
+
+        if isinstance(seed, _anonymous_label):
+            return _anonymous_label.safe_construct(
+                hash_value, "", enclosing_label=seed
+            )
+
+        return _anonymous_label.safe_construct(hash_value, seed or "anon")
+
+    @util.memoized_property
+    def _anon_name_label(self) -> str:
+        """Provides a constant 'anonymous label' for this ColumnElement.
+
+        This is a label() expression which will be named at compile time.
+        The same label() is returned each time ``anon_label`` is called so
+        that expressions can reference ``anon_label`` multiple times,
+        producing the same label name at compile time.
+
+        The compiler uses this function automatically at compile time
+        for expressions that are known to be 'unnamed' like binary
+        expressions and function calls.
+
+        .. versionchanged:: 1.4.9 - this attribute was not intended to be
+           public and is renamed to _anon_name_label.  anon_name exists
+           for backwards compat
+
+        """
+        name = getattr(self, "name", None)
+        return self._anon_label(name)
+
+    @util.memoized_property
+    def _anon_key_label(self) -> _anonymous_label:
+        """Provides a constant 'anonymous key label' for this ColumnElement.
+
+        Compare to ``anon_label``, except that the "key" of the column,
+        if available, is used to generate the label.
+
+        This is used when a deduplicating key is placed into the columns
+        collection of a selectable.
+
+        .. versionchanged:: 1.4.9 - this attribute was not intended to be
+           public and is renamed to _anon_key_label.  anon_key_label exists
+           for backwards compat
+
+        """
+        return self._anon_label(self._proxy_key)
+
+    @property
+    @util.deprecated(
+        "1.4",
+        "The :attr:`_expression.ColumnElement.anon_label` attribute is now "
+        "private, and the public accessor is deprecated.",
+    )
+    def anon_label(self) -> str:
+        return self._anon_name_label
+
+    @property
+    @util.deprecated(
+        "1.4",
+        "The :attr:`_expression.ColumnElement.anon_key_label` attribute is "
+        "now private, and the public accessor is deprecated.",
+    )
+    def anon_key_label(self) -> str:
+        return self._anon_key_label
+
+    def _dedupe_anon_label_idx(self, idx: int) -> str:
+        """label to apply to a column that is anon labeled, but repeated
+        in the SELECT, so that we have to make an "extra anon" label that
+        disambiguates it from the previous appearance.
+
+        these labels come out like "foo_bar_id__1" and have double underscores
+        in them.
+
+        """
+        label = getattr(self, "name", None)
+
+        # current convention is that if the element doesn't have a
+        # ".name" (usually because it is not NamedColumn), we try to
+        # use a "table qualified" form for the "dedupe anon" label,
+        # based on the notion that a label like
+        # "CAST(casttest.v1 AS DECIMAL) AS casttest_v1__1" looks better than
+        # "CAST(casttest.v1 AS DECIMAL) AS anon__1"
+
+        if label is None:
+            return self._dedupe_anon_tq_label_idx(idx)
+        else:
+            return self._anon_label(label, add_hash=idx)
+
+    @util.memoized_property
+    def _anon_tq_label(self) -> _anonymous_label:
+        return self._anon_label(getattr(self, "_tq_label", None))
+
+    @util.memoized_property
+    def _anon_tq_key_label(self) -> _anonymous_label:
+        return self._anon_label(getattr(self, "_tq_key_label", None))
+
+    def _dedupe_anon_tq_label_idx(self, idx: int) -> _anonymous_label:
+        label = getattr(self, "_tq_label", None) or "anon"
+
+        return self._anon_label(label, add_hash=idx)
+
+
+class KeyedColumnElement(ColumnElement[_T]):
+    """ColumnElement where ``.key`` is non-None."""
+
+    _is_keyed_column_element = True
+
+    key: str
+
+
+class WrapsColumnExpression(ColumnElement[_T]):
+    """Mixin that defines a :class:`_expression.ColumnElement`
+    as a wrapper with special
+    labeling behavior for an expression that already has a name.
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`change_4449`
+
+
+    """
+
+    @property
+    def wrapped_column_expression(self) -> ColumnElement[_T]:
+        raise NotImplementedError()
+
+    @util.non_memoized_property
+    def _tq_label(self) -> Optional[str]:
+        wce = self.wrapped_column_expression
+        if hasattr(wce, "_tq_label"):
+            return wce._tq_label
+        else:
+            return None
+
+    @property
+    def _label(self) -> Optional[str]:
+        return self._tq_label
+
+    @property
+    def _non_anon_label(self) -> Optional[str]:
+        return None
+
+    @util.non_memoized_property
+    def _anon_name_label(self) -> str:
+        wce = self.wrapped_column_expression
+
+        # this logic tries to get the WrappedColumnExpression to render
+        # with "<expr> AS <name>", where "<name>" is the natural name
+        # within the expression itself.   e.g. "CAST(table.foo) AS foo".
+        if not wce._is_text_clause:
+            nal = wce._non_anon_label
+            if nal:
+                return nal
+            elif hasattr(wce, "_anon_name_label"):
+                return wce._anon_name_label
+        return super()._anon_name_label
+
+    def _dedupe_anon_label_idx(self, idx: int) -> str:
+        wce = self.wrapped_column_expression
+        nal = wce._non_anon_label
+        if nal:
+            return self._anon_label(nal + "_")
+        else:
+            return self._dedupe_anon_tq_label_idx(idx)
+
+    @property
+    def _proxy_key(self):
+        wce = self.wrapped_column_expression
+
+        if not wce._is_text_clause:
+            return wce._proxy_key
+        return super()._proxy_key
+
+
+class BindParameter(roles.InElementRole, KeyedColumnElement[_T]):
+    r"""Represent a "bound expression".
+
+    :class:`.BindParameter` is invoked explicitly using the
+    :func:`.bindparam` function, as in::
+
+        from sqlalchemy import bindparam
+
+        stmt = select(users_table).where(
+            users_table.c.name == bindparam("username")
+        )
+
+    Detailed discussion of how :class:`.BindParameter` is used is
+    at :func:`.bindparam`.
+
+    .. seealso::
+
+        :func:`.bindparam`
+
+    """
+
+    __visit_name__ = "bindparam"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("key", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("callable", InternalTraversal.dp_plain_dict),
+        ("value", InternalTraversal.dp_plain_obj),
+        ("literal_execute", InternalTraversal.dp_boolean),
+    ]
+
+    key: str
+    type: TypeEngine[_T]
+    value: Optional[_T]
+
+    _is_crud = False
+    _is_bind_parameter = True
+    _key_is_anon = False
+
+    # bindparam implements its own _gen_cache_key() method however
+    # we check subclasses for this flag, else no cache key is generated
+    inherit_cache = True
+
+    def __init__(
+        self,
+        key: Optional[str],
+        value: Any = _NoArg.NO_ARG,
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+        unique: bool = False,
+        required: Union[bool, Literal[_NoArg.NO_ARG]] = _NoArg.NO_ARG,
+        quote: Optional[bool] = None,
+        callable_: Optional[Callable[[], Any]] = None,
+        expanding: bool = False,
+        isoutparam: bool = False,
+        literal_execute: bool = False,
+        _compared_to_operator: Optional[OperatorType] = None,
+        _compared_to_type: Optional[TypeEngine[Any]] = None,
+        _is_crud: bool = False,
+    ):
+        if required is _NoArg.NO_ARG:
+            required = value is _NoArg.NO_ARG and callable_ is None
+        if value is _NoArg.NO_ARG:
+            value = None
+
+        if quote is not None:
+            key = quoted_name.construct(key, quote)
+
+        if unique:
+            self.key = _anonymous_label.safe_construct(
+                id(self),
+                (
+                    key
+                    if key is not None
+                    and not isinstance(key, _anonymous_label)
+                    else "param"
+                ),
+                sanitize_key=True,
+            )
+            self._key_is_anon = True
+        elif key:
+            self.key = key
+        else:
+            self.key = _anonymous_label.safe_construct(id(self), "param")
+            self._key_is_anon = True
+
+        # identifying key that won't change across
+        # clones, used to identify the bind's logical
+        # identity
+        self._identifying_key = self.key
+
+        # key that was passed in the first place, used to
+        # generate new keys
+        self._orig_key = key or "param"
+
+        self.unique = unique
+        self.value = value
+        self.callable = callable_
+        self.isoutparam = isoutparam
+        self.required = required
+
+        # indicate an "expanding" parameter; the compiler sets this
+        # automatically in the compiler _render_in_expr_w_bindparam method
+        # for an IN expression
+        self.expanding = expanding
+
+        # this is another hint to help w/ expanding and is typically
+        # set in the compiler _render_in_expr_w_bindparam method for an
+        # IN expression
+        self.expand_op = None
+
+        self.literal_execute = literal_execute
+        if _is_crud:
+            self._is_crud = True
+
+        if type_ is None:
+            if expanding:
+                if value:
+                    check_value = value[0]
+                else:
+                    check_value = type_api._NO_VALUE_IN_LIST
+            else:
+                check_value = value
+            if _compared_to_type is not None:
+                self.type = _compared_to_type.coerce_compared_value(
+                    _compared_to_operator, check_value
+                )
+            else:
+                self.type = type_api._resolve_value_to_type(check_value)
+        elif isinstance(type_, type):
+            self.type = type_()
+        elif is_tuple_type(type_):
+            if value:
+                if expanding:
+                    check_value = value[0]
+                else:
+                    check_value = value
+                cast("BindParameter[typing_Tuple[Any, ...]]", self).type = (
+                    type_._resolve_values_to_types(check_value)
+                )
+            else:
+                cast("BindParameter[typing_Tuple[Any, ...]]", self).type = (
+                    type_
+                )
+        else:
+            self.type = type_
+
+    def _with_value(self, value, maintain_key=False, required=NO_ARG):
+        """Return a copy of this :class:`.BindParameter` with the given value
+        set.
+        """
+        cloned = self._clone(maintain_key=maintain_key)
+        cloned.value = value
+        cloned.callable = None
+        cloned.required = required if required is not NO_ARG else self.required
+        if cloned.type is type_api.NULLTYPE:
+            cloned.type = type_api._resolve_value_to_type(value)
+        return cloned
+
+    @property
+    def effective_value(self) -> Optional[_T]:
+        """Return the value of this bound parameter,
+        taking into account if the ``callable`` parameter
+        was set.
+
+        The ``callable`` value will be evaluated
+        and returned if present, else ``value``.
+
+        """
+        if self.callable:
+            # TODO: set up protocol for bind parameter callable
+            return self.callable()  # type: ignore
+        else:
+            return self.value
+
+    def render_literal_execute(self) -> BindParameter[_T]:
+        """Produce a copy of this bound parameter that will enable the
+        :paramref:`_sql.BindParameter.literal_execute` flag.
+
+        The :paramref:`_sql.BindParameter.literal_execute` flag will
+        have the effect of the parameter rendered in the compiled SQL
+        string using ``[POSTCOMPILE]`` form, which is a special form that
+        is converted to be a rendering of the literal value of the parameter
+        at SQL execution time.    The rationale is to support caching
+        of SQL statement strings that can embed per-statement literal values,
+        such as LIMIT and OFFSET parameters, in the final SQL string that
+        is passed to the DBAPI.   Dialects in particular may want to use
+        this method within custom compilation schemes.
+
+        .. versionadded:: 1.4.5
+
+        .. seealso::
+
+            :ref:`engine_thirdparty_caching`
+
+        """
+        c = ClauseElement._clone(self)
+        c.literal_execute = True
+        return c
+
+    def _negate_in_binary(self, negated_op, original_op):
+        if self.expand_op is original_op:
+            bind = self._clone()
+            bind.expand_op = negated_op
+            return bind
+        else:
+            return self
+
+    def _with_binary_element_type(self, type_):
+        c = ClauseElement._clone(self)
+        c.type = type_
+        return c
+
+    def _clone(self, maintain_key: bool = False, **kw: Any) -> Self:
+        c = ClauseElement._clone(self, **kw)
+        # ensure all the BindParameter objects stay in cloned set.
+        # in #7823, we changed "clone" so that a clone only keeps a reference
+        # to the "original" element, since for column correspondence, that's
+        # all we need.   However, for BindParam, _cloned_set is used by
+        # the "cache key bind match" lookup, which means if any of those
+        # interim BindParameter objects became part of a cache key in the
+        # cache, we need it.  So here, make sure all clones keep carrying
+        # forward.
+        c._cloned_set.update(self._cloned_set)
+        if not maintain_key and self.unique:
+            c.key = _anonymous_label.safe_construct(
+                id(c), c._orig_key or "param", sanitize_key=True
+            )
+        return c
+
+    def _gen_cache_key(self, anon_map, bindparams):
+        _gen_cache_ok = self.__class__.__dict__.get("inherit_cache", False)
+
+        if not _gen_cache_ok:
+            if anon_map is not None:
+                anon_map[NO_CACHE] = True
+            return None
+
+        id_, found = anon_map.get_anon(self)
+        if found:
+            return (id_, self.__class__)
+
+        if bindparams is not None:
+            bindparams.append(self)
+
+        return (
+            id_,
+            self.__class__,
+            self.type._static_cache_key,
+            self.key % anon_map if self._key_is_anon else self.key,
+            self.literal_execute,
+        )
+
+    def _convert_to_unique(self):
+        if not self.unique:
+            self.unique = True
+            self.key = _anonymous_label.safe_construct(
+                id(self), self._orig_key or "param", sanitize_key=True
+            )
+
+    def __getstate__(self):
+        """execute a deferred value for serialization purposes."""
+
+        d = self.__dict__.copy()
+        v = self.value
+        if self.callable:
+            v = self.callable()
+            d["callable"] = None
+        d["value"] = v
+        return d
+
+    def __setstate__(self, state):
+        if state.get("unique", False):
+            state["key"] = _anonymous_label.safe_construct(
+                id(self), state.get("_orig_key", "param"), sanitize_key=True
+            )
+        self.__dict__.update(state)
+
+    def __repr__(self):
+        return "%s(%r, %r, type_=%r)" % (
+            self.__class__.__name__,
+            self.key,
+            self.value,
+            self.type,
+        )
+
+
+class TypeClause(DQLDMLClauseElement):
+    """Handle a type keyword in a SQL statement.
+
+    Used by the ``Case`` statement.
+
+    """
+
+    __visit_name__ = "typeclause"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("type", InternalTraversal.dp_type)
+    ]
+
+    def __init__(self, type_):
+        self.type = type_
+
+
+class TextClause(
+    roles.DDLConstraintColumnRole,
+    roles.DDLExpressionRole,
+    roles.StatementOptionRole,
+    roles.WhereHavingRole,
+    roles.OrderByRole,
+    roles.FromClauseRole,
+    roles.SelectStatementRole,
+    roles.InElementRole,
+    Generative,
+    Executable,
+    DQLDMLClauseElement,
+    roles.BinaryElementRole[Any],
+    inspection.Inspectable["TextClause"],
+):
+    """Represent a literal SQL text fragment.
+
+    E.g.::
+
+        from sqlalchemy import text
+
+        t = text("SELECT * FROM users")
+        result = connection.execute(t)
+
+    The :class:`_expression.TextClause` construct is produced using the
+    :func:`_expression.text`
+    function; see that function for full documentation.
+
+    .. seealso::
+
+        :func:`_expression.text`
+
+    """
+
+    __visit_name__ = "textclause"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("_bindparams", InternalTraversal.dp_string_clauseelement_dict),
+        ("text", InternalTraversal.dp_string),
+    ]
+
+    _is_text_clause = True
+
+    _is_textual = True
+
+    _bind_params_regex = re.compile(r"(?<![:\w\x5c]):(\w+)(?!:)", re.UNICODE)
+    _is_implicitly_boolean = False
+
+    _render_label_in_columns_clause = False
+
+    _omit_from_statements = False
+
+    _is_collection_aggregate = False
+
+    @property
+    def _hide_froms(self) -> Iterable[FromClause]:
+        return ()
+
+    def __and__(self, other):
+        # support use in select.where(), query.filter()
+        return and_(self, other)
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        return (self,)
+
+    # help in those cases where text() is
+    # interpreted in a column expression situation
+    key: Optional[str] = None
+    _label: Optional[str] = None
+
+    _allow_label_resolve = False
+
+    @property
+    def _is_star(self):
+        return self.text == "*"
+
+    def __init__(self, text: str):
+        self._bindparams: Dict[str, BindParameter[Any]] = {}
+
+        def repl(m):
+            self._bindparams[m.group(1)] = BindParameter(m.group(1))
+            return ":%s" % m.group(1)
+
+        # scan the string and search for bind parameter names, add them
+        # to the list of bindparams
+        self.text = self._bind_params_regex.sub(repl, text)
+
+    @_generative
+    def bindparams(
+        self,
+        *binds: BindParameter[Any],
+        **names_to_values: Any,
+    ) -> Self:
+        """Establish the values and/or types of bound parameters within
+        this :class:`_expression.TextClause` construct.
+
+        Given a text construct such as::
+
+            from sqlalchemy import text
+
+            stmt = text(
+                "SELECT id, name FROM user WHERE name=:name AND timestamp=:timestamp"
+            )
+
+        the :meth:`_expression.TextClause.bindparams`
+        method can be used to establish
+        the initial value of ``:name`` and ``:timestamp``,
+        using simple keyword arguments::
+
+            stmt = stmt.bindparams(
+                name="jack", timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
+            )
+
+        Where above, new :class:`.BindParameter` objects
+        will be generated with the names ``name`` and ``timestamp``, and
+        values of ``jack`` and ``datetime.datetime(2012, 10, 8, 15, 12, 5)``,
+        respectively.  The types will be
+        inferred from the values given, in this case :class:`.String` and
+        :class:`.DateTime`.
+
+        When specific typing behavior is needed, the positional ``*binds``
+        argument can be used in which to specify :func:`.bindparam` constructs
+        directly.  These constructs must include at least the ``key``
+        argument, then an optional value and type::
+
+            from sqlalchemy import bindparam
+
+            stmt = stmt.bindparams(
+                bindparam("name", value="jack", type_=String),
+                bindparam("timestamp", type_=DateTime),
+            )
+
+        Above, we specified the type of :class:`.DateTime` for the
+        ``timestamp`` bind, and the type of :class:`.String` for the ``name``
+        bind.  In the case of ``name`` we also set the default value of
+        ``"jack"``.
+
+        Additional bound parameters can be supplied at statement execution
+        time, e.g.::
+
+            result = connection.execute(
+                stmt, timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
+            )
+
+        The :meth:`_expression.TextClause.bindparams`
+        method can be called repeatedly,
+        where it will re-use existing :class:`.BindParameter` objects to add
+        new information.  For example, we can call
+        :meth:`_expression.TextClause.bindparams`
+        first with typing information, and a
+        second time with value information, and it will be combined::
+
+            stmt = text(
+                "SELECT id, name FROM user WHERE name=:name "
+                "AND timestamp=:timestamp"
+            )
+            stmt = stmt.bindparams(
+                bindparam("name", type_=String), bindparam("timestamp", type_=DateTime)
+            )
+            stmt = stmt.bindparams(
+                name="jack", timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
+            )
+
+        The :meth:`_expression.TextClause.bindparams`
+        method also supports the concept of
+        **unique** bound parameters.  These are parameters that are
+        "uniquified" on name at statement compilation time, so that  multiple
+        :func:`_expression.text`
+        constructs may be combined together without the names
+        conflicting.  To use this feature, specify the
+        :paramref:`.BindParameter.unique` flag on each :func:`.bindparam`
+        object::
+
+            stmt1 = text("select id from table where name=:name").bindparams(
+                bindparam("name", value="name1", unique=True)
+            )
+            stmt2 = text("select id from table where name=:name").bindparams(
+                bindparam("name", value="name2", unique=True)
+            )
+
+            union = union_all(stmt1.columns(column("id")), stmt2.columns(column("id")))
+
+        The above statement will render as:
+
+        .. sourcecode:: sql
+
+            select id from table where name=:name_1
+            UNION ALL select id from table where name=:name_2
+
+        .. versionadded:: 1.3.11  Added support for the
+           :paramref:`.BindParameter.unique` flag to work with
+           :func:`_expression.text`
+           constructs.
+
+        """  # noqa: E501
+        self._bindparams = new_params = self._bindparams.copy()
+
+        for bind in binds:
+            try:
+                # the regex used for text() currently will not match
+                # a unique/anonymous key in any case, so use the _orig_key
+                # so that a text() construct can support unique parameters
+                existing = new_params[bind._orig_key]
+            except KeyError as err:
+                raise exc.ArgumentError(
+                    "This text() construct doesn't define a "
+                    "bound parameter named %r" % bind._orig_key
+                ) from err
+            else:
+                new_params[existing._orig_key] = bind
+
+        for key, value in names_to_values.items():
+            try:
+                existing = new_params[key]
+            except KeyError as err:
+                raise exc.ArgumentError(
+                    "This text() construct doesn't define a "
+                    "bound parameter named %r" % key
+                ) from err
+            else:
+                new_params[key] = existing._with_value(value, required=False)
+        return self
+
+    @util.preload_module("sqlalchemy.sql.selectable")
+    def columns(
+        self,
+        *cols: _ColumnExpressionArgument[Any],
+        **types: _TypeEngineArgument[Any],
+    ) -> TextualSelect:
+        r"""Turn this :class:`_expression.TextClause` object into a
+        :class:`_expression.TextualSelect`
+        object that serves the same role as a SELECT
+        statement.
+
+        The :class:`_expression.TextualSelect` is part of the
+        :class:`_expression.SelectBase`
+        hierarchy and can be embedded into another statement by using the
+        :meth:`_expression.TextualSelect.subquery` method to produce a
+        :class:`.Subquery`
+        object, which can then be SELECTed from.
+
+        This function essentially bridges the gap between an entirely
+        textual SELECT statement and the SQL expression language concept
+        of a "selectable"::
+
+            from sqlalchemy.sql import column, text
+
+            stmt = text("SELECT id, name FROM some_table")
+            stmt = stmt.columns(column("id"), column("name")).subquery("st")
+
+            stmt = (
+                select(mytable)
+                .select_from(mytable.join(stmt, mytable.c.name == stmt.c.name))
+                .where(stmt.c.id > 5)
+            )
+
+        Above, we pass a series of :func:`_expression.column` elements to the
+        :meth:`_expression.TextClause.columns` method positionally.  These
+        :func:`_expression.column`
+        elements now become first class elements upon the
+        :attr:`_expression.TextualSelect.selected_columns` column collection,
+        which then
+        become part of the :attr:`.Subquery.c` collection after
+        :meth:`_expression.TextualSelect.subquery` is invoked.
+
+        The column expressions we pass to
+        :meth:`_expression.TextClause.columns` may
+        also be typed; when we do so, these :class:`.TypeEngine` objects become
+        the effective return type of the column, so that SQLAlchemy's
+        result-set-processing systems may be used on the return values.
+        This is often needed for types such as date or boolean types, as well
+        as for unicode processing on some dialect configurations::
+
+            stmt = text("SELECT id, name, timestamp FROM some_table")
+            stmt = stmt.columns(
+                column("id", Integer),
+                column("name", Unicode),
+                column("timestamp", DateTime),
+            )
+
+            for id, name, timestamp in connection.execute(stmt):
+                print(id, name, timestamp)
+
+        As a shortcut to the above syntax, keyword arguments referring to
+        types alone may be used, if only type conversion is needed::
+
+            stmt = text("SELECT id, name, timestamp FROM some_table")
+            stmt = stmt.columns(id=Integer, name=Unicode, timestamp=DateTime)
+
+            for id, name, timestamp in connection.execute(stmt):
+                print(id, name, timestamp)
+
+        The positional form of :meth:`_expression.TextClause.columns`
+        also provides the
+        unique feature of **positional column targeting**, which is
+        particularly useful when using the ORM with complex textual queries. If
+        we specify the columns from our model to
+        :meth:`_expression.TextClause.columns`,
+        the result set will match to those columns positionally, meaning the
+        name or origin of the column in the textual SQL doesn't matter::
+
+            stmt = text(
+                "SELECT users.id, addresses.id, users.id, "
+                "users.name, addresses.email_address AS email "
+                "FROM users JOIN addresses ON users.id=addresses.user_id "
+                "WHERE users.id = 1"
+            ).columns(
+                User.id,
+                Address.id,
+                Address.user_id,
+                User.name,
+                Address.email_address,
+            )
+
+            query = (
+                session.query(User)
+                .from_statement(stmt)
+                .options(contains_eager(User.addresses))
+            )
+
+        The :meth:`_expression.TextClause.columns` method provides a direct
+        route to calling :meth:`_expression.FromClause.subquery` as well as
+        :meth:`_expression.SelectBase.cte`
+        against a textual SELECT statement::
+
+            stmt = stmt.columns(id=Integer, name=String).cte("st")
+
+            stmt = select(sometable).where(sometable.c.id == stmt.c.id)
+
+        :param \*cols: A series of :class:`_expression.ColumnElement` objects,
+         typically
+         :class:`_schema.Column` objects from a :class:`_schema.Table`
+         or ORM level
+         column-mapped attributes, representing a set of columns that this
+         textual string will SELECT from.
+
+        :param \**types: A mapping of string names to :class:`.TypeEngine`
+         type objects indicating the datatypes to use for names that are
+         SELECTed from the textual string.  Prefer to use the ``*cols``
+         argument as it also indicates positional ordering.
+
+        """
+        selectable = util.preloaded.sql_selectable
+
+        input_cols: List[NamedColumn[Any]] = [
+            coercions.expect(roles.LabeledColumnExprRole, col) for col in cols
+        ]
+
+        positional_input_cols = [
+            (
+                ColumnClause(col.key, types.pop(col.key))
+                if col.key in types
+                else col
+            )
+            for col in input_cols
+        ]
+        keyed_input_cols: List[NamedColumn[Any]] = [
+            ColumnClause(key, type_) for key, type_ in types.items()
+        ]
+
+        elem = selectable.TextualSelect.__new__(selectable.TextualSelect)
+        elem._init(
+            self,
+            positional_input_cols + keyed_input_cols,
+            positional=bool(positional_input_cols) and not keyed_input_cols,
+        )
+        return elem
+
+    @property
+    def type(self) -> TypeEngine[Any]:
+        return type_api.NULLTYPE
+
+    @property
+    def comparator(self):
+        # TODO: this seems wrong, it seems like we might not
+        # be using this method.
+        return self.type.comparator_factory(self)  # type: ignore
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[Self, Grouping[Any]]:
+        if against is operators.in_op:
+            return Grouping(self)
+        else:
+            return self
+
+
+class Null(SingletonConstant, roles.ConstExprRole[None], ColumnElement[None]):
+    """Represent the NULL keyword in a SQL statement.
+
+    :class:`.Null` is accessed as a constant via the
+    :func:`.null` function.
+
+    """
+
+    __visit_name__ = "null"
+
+    _traverse_internals: _TraverseInternalsType = []
+    _singleton: Null
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            return type_api.NULLTYPE
+
+    @classmethod
+    def _instance(cls) -> Null:
+        """Return a constant :class:`.Null` construct."""
+
+        return Null._singleton
+
+
+Null._create_singleton()
+
+
+class False_(
+    SingletonConstant, roles.ConstExprRole[bool], ColumnElement[bool]
+):
+    """Represent the ``false`` keyword, or equivalent, in a SQL statement.
+
+    :class:`.False_` is accessed as a constant via the
+    :func:`.false` function.
+
+    """
+
+    __visit_name__ = "false"
+    _traverse_internals: _TraverseInternalsType = []
+    _singleton: False_
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            return type_api.BOOLEANTYPE
+
+    def _negate(self) -> True_:
+        return True_._singleton
+
+    @classmethod
+    def _instance(cls) -> False_:
+        return False_._singleton
+
+
+False_._create_singleton()
+
+
+class True_(SingletonConstant, roles.ConstExprRole[bool], ColumnElement[bool]):
+    """Represent the ``true`` keyword, or equivalent, in a SQL statement.
+
+    :class:`.True_` is accessed as a constant via the
+    :func:`.true` function.
+
+    """
+
+    __visit_name__ = "true"
+
+    _traverse_internals: _TraverseInternalsType = []
+    _singleton: True_
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            return type_api.BOOLEANTYPE
+
+    def _negate(self) -> False_:
+        return False_._singleton
+
+    @classmethod
+    def _ifnone(
+        cls, other: Optional[ColumnElement[Any]]
+    ) -> ColumnElement[Any]:
+        if other is None:
+            return cls._instance()
+        else:
+            return other
+
+    @classmethod
+    def _instance(cls) -> True_:
+        return True_._singleton
+
+
+True_._create_singleton()
+
+
+class ClauseList(
+    roles.InElementRole,
+    roles.OrderByRole,
+    roles.ColumnsClauseRole,
+    roles.DMLColumnRole,
+    DQLDMLClauseElement,
+):
+    """Describe a list of clauses, separated by an operator.
+
+    By default, is comma-separated, such as a column listing.
+
+    """
+
+    __visit_name__ = "clauselist"
+
+    # this is used only by the ORM in a legacy use case for
+    # composite attributes
+    _is_clause_list = True
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("clauses", InternalTraversal.dp_clauseelement_list),
+        ("operator", InternalTraversal.dp_operator),
+    ]
+
+    clauses: List[ColumnElement[Any]]
+
+    def __init__(
+        self,
+        *clauses: _ColumnExpressionArgument[Any],
+        operator: OperatorType = operators.comma_op,
+        group: bool = True,
+        group_contents: bool = True,
+        _literal_as_text_role: Type[roles.SQLRole] = roles.WhereHavingRole,
+    ):
+        self.operator = operator
+        self.group = group
+        self.group_contents = group_contents
+        clauses_iterator: Iterable[_ColumnExpressionArgument[Any]] = clauses
+        text_converter_role: Type[roles.SQLRole] = _literal_as_text_role
+        self._text_converter_role = text_converter_role
+
+        if self.group_contents:
+            self.clauses = [
+                coercions.expect(
+                    text_converter_role, clause, apply_propagate_attrs=self
+                ).self_group(against=self.operator)
+                for clause in clauses_iterator
+            ]
+        else:
+            self.clauses = [
+                coercions.expect(
+                    text_converter_role, clause, apply_propagate_attrs=self
+                )
+                for clause in clauses_iterator
+            ]
+        self._is_implicitly_boolean = operators.is_boolean(self.operator)
+
+    @classmethod
+    def _construct_raw(
+        cls,
+        operator: OperatorType,
+        clauses: Optional[Sequence[ColumnElement[Any]]] = None,
+    ) -> ClauseList:
+        self = cls.__new__(cls)
+        self.clauses = list(clauses) if clauses else []
+        self.group = True
+        self.operator = operator
+        self.group_contents = True
+        self._is_implicitly_boolean = False
+        return self
+
+    def __iter__(self) -> Iterator[ColumnElement[Any]]:
+        return iter(self.clauses)
+
+    def __len__(self) -> int:
+        return len(self.clauses)
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        return itertools.chain.from_iterable(
+            [elem._select_iterable for elem in self.clauses]
+        )
+
+    def append(self, clause):
+        if self.group_contents:
+            self.clauses.append(
+                coercions.expect(self._text_converter_role, clause).self_group(
+                    against=self.operator
+                )
+            )
+        else:
+            self.clauses.append(
+                coercions.expect(self._text_converter_role, clause)
+            )
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return list(itertools.chain(*[c._from_objects for c in self.clauses]))
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[Self, Grouping[Any]]:
+        if self.group and operators.is_precedent(self.operator, against):
+            return Grouping(self)
+        else:
+            return self
+
+
+class OperatorExpression(ColumnElement[_T]):
+    """base for expressions that contain an operator and operands
+
+    .. versionadded:: 2.0
+
+    """
+
+    operator: OperatorType
+    type: TypeEngine[_T]
+
+    group: bool = True
+
+    @property
+    def is_comparison(self):
+        return operators.is_comparison(self.operator)
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[Self, Grouping[_T]]:
+        if (
+            self.group
+            and operators.is_precedent(self.operator, against)
+            or (
+                # a negate against a non-boolean operator
+                # doesn't make too much sense but we should
+                # group for that
+                against is operators.inv
+                and not operators.is_boolean(self.operator)
+            )
+        ):
+            return Grouping(self)
+        else:
+            return self
+
+    @property
+    def _flattened_operator_clauses(
+        self,
+    ) -> typing_Tuple[ColumnElement[Any], ...]:
+        raise NotImplementedError()
+
+    @classmethod
+    def _construct_for_op(
+        cls,
+        left: ColumnElement[Any],
+        right: ColumnElement[Any],
+        op: OperatorType,
+        *,
+        type_: TypeEngine[_T],
+        negate: Optional[OperatorType] = None,
+        modifiers: Optional[Mapping[str, Any]] = None,
+    ) -> OperatorExpression[_T]:
+        if operators.is_associative(op):
+            assert (
+                negate is None
+            ), f"negate not supported for associative operator {op}"
+
+            multi = False
+            if getattr(
+                left, "operator", None
+            ) is op and type_._compare_type_affinity(left.type):
+                multi = True
+                left_flattened = left._flattened_operator_clauses
+            else:
+                left_flattened = (left,)
+
+            if getattr(
+                right, "operator", None
+            ) is op and type_._compare_type_affinity(right.type):
+                multi = True
+                right_flattened = right._flattened_operator_clauses
+            else:
+                right_flattened = (right,)
+
+            if multi:
+                return ExpressionClauseList._construct_for_list(
+                    op,
+                    type_,
+                    *(left_flattened + right_flattened),
+                )
+
+        if right._is_collection_aggregate:
+            negate = None
+
+        return BinaryExpression(
+            left, right, op, type_=type_, negate=negate, modifiers=modifiers
+        )
+
+
+class ExpressionClauseList(OperatorExpression[_T]):
+    """Describe a list of clauses, separated by an operator,
+    in a column expression context.
+
+    :class:`.ExpressionClauseList` differs from :class:`.ClauseList` in that
+    it represents a column-oriented DQL expression only, not an open ended
+    list of anything comma separated.
+
+    .. versionadded:: 2.0
+
+    """
+
+    __visit_name__ = "expression_clauselist"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("clauses", InternalTraversal.dp_clauseelement_tuple),
+        ("operator", InternalTraversal.dp_operator),
+    ]
+
+    clauses: typing_Tuple[ColumnElement[Any], ...]
+
+    group: bool
+
+    def __init__(
+        self,
+        operator: OperatorType,
+        *clauses: _ColumnExpressionArgument[Any],
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+    ):
+        self.operator = operator
+
+        self.clauses = tuple(
+            coercions.expect(
+                roles.ExpressionElementRole, clause, apply_propagate_attrs=self
+            )
+            for clause in clauses
+        )
+        self._is_implicitly_boolean = operators.is_boolean(self.operator)
+        self.type = type_api.to_instance(type_)  # type: ignore
+
+    @property
+    def _flattened_operator_clauses(
+        self,
+    ) -> typing_Tuple[ColumnElement[Any], ...]:
+        return self.clauses
+
+    def __iter__(self) -> Iterator[ColumnElement[Any]]:
+        return iter(self.clauses)
+
+    def __len__(self) -> int:
+        return len(self.clauses)
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        return (self,)
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return list(itertools.chain(*[c._from_objects for c in self.clauses]))
+
+    def _append_inplace(self, clause: ColumnElement[Any]) -> None:
+        self.clauses += (clause,)
+
+    @classmethod
+    def _construct_for_list(
+        cls,
+        operator: OperatorType,
+        type_: TypeEngine[_T],
+        *clauses: ColumnElement[Any],
+        group: bool = True,
+    ) -> ExpressionClauseList[_T]:
+        self = cls.__new__(cls)
+        self.group = group
+        if group:
+            self.clauses = tuple(
+                c.self_group(against=operator) for c in clauses
+            )
+        else:
+            self.clauses = clauses
+        self.operator = operator
+        self.type = type_
+        for c in clauses:
+            if c._propagate_attrs:
+                self._propagate_attrs = c._propagate_attrs
+                break
+        return self
+
+    def _negate(self) -> Any:
+        grouped = self.self_group(against=operators.inv)
+        assert isinstance(grouped, ColumnElement)
+        return UnaryExpression(
+            grouped, operator=operators.inv, wraps_column_expression=True
+        )
+
+
+class BooleanClauseList(ExpressionClauseList[bool]):
+    __visit_name__ = "expression_clauselist"
+    inherit_cache = True
+
+    def __init__(self, *arg, **kw):
+        raise NotImplementedError(
+            "BooleanClauseList has a private constructor"
+        )
+
+    @classmethod
+    def _process_clauses_for_boolean(
+        cls,
+        operator: OperatorType,
+        continue_on: Any,
+        skip_on: Any,
+        clauses: Iterable[ColumnElement[Any]],
+    ) -> typing_Tuple[int, List[ColumnElement[Any]]]:
+        has_continue_on = None
+
+        convert_clauses = []
+
+        against = operators._asbool
+        lcc = 0
+
+        for clause in clauses:
+            if clause is continue_on:
+                # instance of continue_on, like and_(x, y, True, z), store it
+                # if we didn't find one already, we will use it if there
+                # are no other expressions here.
+                has_continue_on = clause
+            elif clause is skip_on:
+                # instance of skip_on, e.g. and_(x, y, False, z), cancels
+                # the rest out
+                convert_clauses = [clause]
+                lcc = 1
+                break
+            else:
+                if not lcc:
+                    lcc = 1
+                else:
+                    against = operator
+                    # technically this would be len(convert_clauses) + 1
+                    # however this only needs to indicate "greater than one"
+                    lcc = 2
+                convert_clauses.append(clause)
+
+        if not convert_clauses and has_continue_on is not None:
+            convert_clauses = [has_continue_on]
+            lcc = 1
+
+        return lcc, [c.self_group(against=against) for c in convert_clauses]
+
+    @classmethod
+    def _construct(
+        cls,
+        operator: OperatorType,
+        continue_on: Any,
+        skip_on: Any,
+        initial_clause: Any = _NoArg.NO_ARG,
+        *clauses: Any,
+        **kw: Any,
+    ) -> ColumnElement[Any]:
+        if initial_clause is _NoArg.NO_ARG:
+            # no elements period.  deprecated use case.  return an empty
+            # ClauseList construct that generates nothing unless it has
+            # elements added to it.
+            name = operator.__name__
+
+            util.warn_deprecated(
+                f"Invoking {name}() without arguments is deprecated, and "
+                f"will be disallowed in a future release.   For an empty "
+                f"""{name}() construct, use '{name}({
+                    'true()' if continue_on is True_._singleton else 'false()'
+                }, *args)' """
+                f"""or '{name}({
+                    'True' if continue_on is True_._singleton else 'False'
+                }, *args)'.""",
+                version="1.4",
+            )
+            return cls._construct_raw(operator)
+
+        lcc, convert_clauses = cls._process_clauses_for_boolean(
+            operator,
+            continue_on,
+            skip_on,
+            [
+                coercions.expect(roles.WhereHavingRole, clause)
+                for clause in util.coerce_generator_arg(
+                    (initial_clause,) + clauses
+                )
+            ],
+        )
+
+        if lcc > 1:
+            # multiple elements.  Return regular BooleanClauseList
+            # which will link elements against the operator.
+
+            flattened_clauses = itertools.chain.from_iterable(
+                (
+                    (c for c in to_flat._flattened_operator_clauses)
+                    if getattr(to_flat, "operator", None) is operator
+                    else (to_flat,)
+                )
+                for to_flat in convert_clauses
+            )
+
+            return cls._construct_raw(operator, flattened_clauses)  # type: ignore # noqa: E501
+        else:
+            assert lcc
+            # just one element.  return it as a single boolean element,
+            # not a list and discard the operator.
+            return convert_clauses[0]
+
+    @classmethod
+    def _construct_for_whereclause(
+        cls, clauses: Iterable[ColumnElement[Any]]
+    ) -> Optional[ColumnElement[bool]]:
+        operator, continue_on, skip_on = (
+            operators.and_,
+            True_._singleton,
+            False_._singleton,
+        )
+
+        lcc, convert_clauses = cls._process_clauses_for_boolean(
+            operator,
+            continue_on,
+            skip_on,
+            clauses,  # these are assumed to be coerced already
+        )
+
+        if lcc > 1:
+            # multiple elements.  Return regular BooleanClauseList
+            # which will link elements against the operator.
+            return cls._construct_raw(operator, convert_clauses)
+        elif lcc == 1:
+            # just one element.  return it as a single boolean element,
+            # not a list and discard the operator.
+            return convert_clauses[0]
+        else:
+            return None
+
+    @classmethod
+    def _construct_raw(
+        cls,
+        operator: OperatorType,
+        clauses: Optional[Sequence[ColumnElement[Any]]] = None,
+    ) -> BooleanClauseList:
+        self = cls.__new__(cls)
+        self.clauses = tuple(clauses) if clauses else ()
+        self.group = True
+        self.operator = operator
+        self.type = type_api.BOOLEANTYPE
+        self._is_implicitly_boolean = True
+        return self
+
+    @classmethod
+    def and_(
+        cls,
+        initial_clause: Union[
+            Literal[True], _ColumnExpressionArgument[bool], _NoArg
+        ] = _NoArg.NO_ARG,
+        *clauses: _ColumnExpressionArgument[bool],
+    ) -> ColumnElement[bool]:
+        r"""Produce a conjunction of expressions joined by ``AND``.
+
+        See :func:`_sql.and_` for full documentation.
+        """
+        return cls._construct(
+            operators.and_,
+            True_._singleton,
+            False_._singleton,
+            initial_clause,
+            *clauses,
+        )
+
+    @classmethod
+    def or_(
+        cls,
+        initial_clause: Union[
+            Literal[False], _ColumnExpressionArgument[bool], _NoArg
+        ] = _NoArg.NO_ARG,
+        *clauses: _ColumnExpressionArgument[bool],
+    ) -> ColumnElement[bool]:
+        """Produce a conjunction of expressions joined by ``OR``.
+
+        See :func:`_sql.or_` for full documentation.
+        """
+        return cls._construct(
+            operators.or_,
+            False_._singleton,
+            True_._singleton,
+            initial_clause,
+            *clauses,
+        )
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        return (self,)
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[Self, Grouping[bool]]:
+        if not self.clauses:
+            return self
+        else:
+            return super().self_group(against=against)
+
+
+and_ = BooleanClauseList.and_
+or_ = BooleanClauseList.or_
+
+
+class Tuple(ClauseList, ColumnElement[typing_Tuple[Any, ...]]):
+    """Represent a SQL tuple."""
+
+    __visit_name__ = "tuple"
+
+    _traverse_internals: _TraverseInternalsType = (
+        ClauseList._traverse_internals + []
+    )
+
+    type: TupleType
+
+    @util.preload_module("sqlalchemy.sql.sqltypes")
+    def __init__(
+        self,
+        *clauses: _ColumnExpressionArgument[Any],
+        types: Optional[Sequence[_TypeEngineArgument[Any]]] = None,
+    ):
+        sqltypes = util.preloaded.sql_sqltypes
+
+        if types is None:
+            init_clauses: List[ColumnElement[Any]] = [
+                coercions.expect(roles.ExpressionElementRole, c)
+                for c in clauses
+            ]
+        else:
+            if len(types) != len(clauses):
+                raise exc.ArgumentError(
+                    "Wrong number of elements for %d-tuple: %r "
+                    % (len(types), clauses)
+                )
+            init_clauses = [
+                coercions.expect(
+                    roles.ExpressionElementRole,
+                    c,
+                    type_=typ if not typ._isnull else None,
+                )
+                for typ, c in zip(types, clauses)
+            ]
+
+        self.type = sqltypes.TupleType(*[arg.type for arg in init_clauses])
+        super().__init__(*init_clauses)
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        return (self,)
+
+    def _bind_param(self, operator, obj, type_=None, expanding=False):
+        if expanding:
+            return BindParameter(
+                None,
+                value=obj,
+                _compared_to_operator=operator,
+                unique=True,
+                expanding=True,
+                type_=type_,
+                _compared_to_type=self.type,
+            )
+        else:
+            return Tuple(
+                *[
+                    BindParameter(
+                        None,
+                        o,
+                        _compared_to_operator=operator,
+                        _compared_to_type=compared_to_type,
+                        unique=True,
+                        type_=type_,
+                    )
+                    for o, compared_to_type in zip(obj, self.type.types)
+                ]
+            )
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Self:
+        # Tuple is parenthesized by definition.
+        return self
+
+
+class Case(ColumnElement[_T]):
+    """Represent a ``CASE`` expression.
+
+    :class:`.Case` is produced using the :func:`.case` factory function,
+    as in::
+
+        from sqlalchemy import case
+
+        stmt = select(users_table).where(
+            case(
+                (users_table.c.name == "wendy", "W"),
+                (users_table.c.name == "jack", "J"),
+                else_="E",
+            )
+        )
+
+    Details on :class:`.Case` usage is at :func:`.case`.
+
+    .. seealso::
+
+        :func:`.case`
+
+    """
+
+    __visit_name__ = "case"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("value", InternalTraversal.dp_clauseelement),
+        ("whens", InternalTraversal.dp_clauseelement_tuples),
+        ("else_", InternalTraversal.dp_clauseelement),
+    ]
+
+    # for case(), the type is derived from the whens.  so for the moment
+    # users would have to cast() the case to get a specific type
+
+    whens: List[typing_Tuple[ColumnElement[bool], ColumnElement[_T]]]
+    else_: Optional[ColumnElement[_T]]
+    value: Optional[ColumnElement[Any]]
+
+    def __init__(
+        self,
+        *whens: Union[
+            typing_Tuple[_ColumnExpressionArgument[bool], Any],
+            Mapping[Any, Any],
+        ],
+        value: Optional[Any] = None,
+        else_: Optional[Any] = None,
+    ):
+        new_whens: Iterable[Any] = coercions._expression_collection_was_a_list(
+            "whens", "case", whens
+        )
+        try:
+            new_whens = util.dictlike_iteritems(new_whens)
+        except TypeError:
+            pass
+
+        self.whens = [
+            (
+                coercions.expect(
+                    roles.ExpressionElementRole,
+                    c,
+                    apply_propagate_attrs=self,
+                ).self_group(),
+                coercions.expect(roles.ExpressionElementRole, r),
+            )
+            for (c, r) in new_whens
+        ]
+
+        if value is None:
+            self.value = None
+        else:
+            self.value = coercions.expect(roles.ExpressionElementRole, value)
+
+        if else_ is not None:
+            self.else_ = coercions.expect(roles.ExpressionElementRole, else_)
+        else:
+            self.else_ = None
+
+        type_ = next(
+            (
+                then.type
+                # Iterate `whens` in reverse to match previous behaviour
+                # where type of final element took priority
+                for *_, then in reversed(self.whens)
+                if not then.type._isnull
+            ),
+            self.else_.type if self.else_ is not None else type_api.NULLTYPE,
+        )
+        self.type = cast(_T, type_)
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return list(
+            itertools.chain(*[x._from_objects for x in self.get_children()])
+        )
+
+
+class Cast(WrapsColumnExpression[_T]):
+    """Represent a ``CAST`` expression.
+
+    :class:`.Cast` is produced using the :func:`.cast` factory function,
+    as in::
+
+        from sqlalchemy import cast, Numeric
+
+        stmt = select(cast(product_table.c.unit_price, Numeric(10, 4)))
+
+    Details on :class:`.Cast` usage is at :func:`.cast`.
+
+    .. seealso::
+
+        :ref:`tutorial_casts`
+
+        :func:`.cast`
+
+        :func:`.try_cast`
+
+        :func:`.type_coerce` - an alternative to CAST that coerces the type
+        on the Python side only, which is often sufficient to generate the
+        correct SQL and data coercion.
+
+    """
+
+    __visit_name__ = "cast"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("clause", InternalTraversal.dp_clauseelement),
+        ("type", InternalTraversal.dp_type),
+    ]
+
+    clause: ColumnElement[Any]
+    type: TypeEngine[_T]
+    typeclause: TypeClause
+
+    def __init__(
+        self,
+        expression: _ColumnExpressionArgument[Any],
+        type_: _TypeEngineArgument[_T],
+    ):
+        self.type = type_api.to_instance(type_)
+        self.clause = coercions.expect(
+            roles.ExpressionElementRole,
+            expression,
+            type_=self.type,
+            apply_propagate_attrs=self,
+        )
+        self.typeclause = TypeClause(self.type)
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.clause._from_objects
+
+    @property
+    def wrapped_column_expression(self):
+        return self.clause
+
+
+class TryCast(Cast[_T]):
+    """Represent a TRY_CAST expression.
+
+    Details on :class:`.TryCast` usage is at :func:`.try_cast`.
+
+    .. seealso::
+
+        :func:`.try_cast`
+
+        :ref:`tutorial_casts`
+    """
+
+    __visit_name__ = "try_cast"
+    inherit_cache = True
+
+
+class TypeCoerce(WrapsColumnExpression[_T]):
+    """Represent a Python-side type-coercion wrapper.
+
+    :class:`.TypeCoerce` supplies the :func:`_expression.type_coerce`
+    function; see that function for usage details.
+
+    .. seealso::
+
+        :func:`_expression.type_coerce`
+
+        :func:`.cast`
+
+    """
+
+    __visit_name__ = "type_coerce"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("clause", InternalTraversal.dp_clauseelement),
+        ("type", InternalTraversal.dp_type),
+    ]
+
+    clause: ColumnElement[Any]
+    type: TypeEngine[_T]
+
+    def __init__(
+        self,
+        expression: _ColumnExpressionArgument[Any],
+        type_: _TypeEngineArgument[_T],
+    ):
+        self.type = type_api.to_instance(type_)
+        self.clause = coercions.expect(
+            roles.ExpressionElementRole,
+            expression,
+            type_=self.type,
+            apply_propagate_attrs=self,
+        )
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.clause._from_objects
+
+    @HasMemoized.memoized_attribute
+    def typed_expression(self):
+        if isinstance(self.clause, BindParameter):
+            bp = self.clause._clone()
+            bp.type = self.type
+            return bp
+        else:
+            return self.clause
+
+    @property
+    def wrapped_column_expression(self):
+        return self.clause
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> TypeCoerce[_T]:
+        grouped = self.clause.self_group(against=against)
+        if grouped is not self.clause:
+            return TypeCoerce(grouped, self.type)
+        else:
+            return self
+
+
+class Extract(ColumnElement[int]):
+    """Represent a SQL EXTRACT clause, ``extract(field FROM expr)``."""
+
+    __visit_name__ = "extract"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("expr", InternalTraversal.dp_clauseelement),
+        ("field", InternalTraversal.dp_string),
+    ]
+
+    expr: ColumnElement[Any]
+    field: str
+
+    def __init__(self, field: str, expr: _ColumnExpressionArgument[Any]):
+        self.type = type_api.INTEGERTYPE
+        self.field = field
+        self.expr = coercions.expect(roles.ExpressionElementRole, expr)
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.expr._from_objects
+
+
+class _label_reference(ColumnElement[_T]):
+    """Wrap a column expression as it appears in a 'reference' context.
+
+    This expression is any that includes an _order_by_label_element,
+    which is a Label, or a DESC / ASC construct wrapping a Label.
+
+    The production of _label_reference() should occur when an expression
+    is added to this context; this includes the ORDER BY or GROUP BY of a
+    SELECT statement, as well as a few other places, such as the ORDER BY
+    within an OVER clause.
+
+    """
+
+    __visit_name__ = "label_reference"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement)
+    ]
+
+    element: ColumnElement[_T]
+
+    def __init__(self, element: ColumnElement[_T]):
+        self.element = element
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return []
+
+
+class _textual_label_reference(ColumnElement[Any]):
+    __visit_name__ = "textual_label_reference"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_string)
+    ]
+
+    def __init__(self, element: str):
+        self.element = element
+
+    @util.memoized_property
+    def _text_clause(self) -> TextClause:
+        return TextClause(self.element)
+
+
+class UnaryExpression(ColumnElement[_T]):
+    """Define a 'unary' expression.
+
+    A unary expression has a single column expression
+    and an operator.  The operator can be placed on the left
+    (where it is called the 'operator') or right (where it is called the
+    'modifier') of the column expression.
+
+    :class:`.UnaryExpression` is the basis for several unary operators
+    including those used by :func:`.desc`, :func:`.asc`, :func:`.distinct`,
+    :func:`.nulls_first` and :func:`.nulls_last`.
+
+    """
+
+    __visit_name__ = "unary"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("operator", InternalTraversal.dp_operator),
+        ("modifier", InternalTraversal.dp_operator),
+    ]
+
+    element: ClauseElement
+
+    def __init__(
+        self,
+        element: ColumnElement[Any],
+        operator: Optional[OperatorType] = None,
+        modifier: Optional[OperatorType] = None,
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+        wraps_column_expression: bool = False,
+    ):
+        self.operator = operator
+        self.modifier = modifier
+        self._propagate_attrs = element._propagate_attrs
+        self.element = element.self_group(
+            against=self.operator or self.modifier
+        )
+
+        # if type is None, we get NULLTYPE, which is our _T.  But I don't
+        # know how to get the overloads to express that correctly
+        self.type = type_api.to_instance(type_)  # type: ignore
+
+        self.wraps_column_expression = wraps_column_expression
+
+    @classmethod
+    def _create_nulls_first(
+        cls,
+        column: _ColumnExpressionArgument[_T],
+    ) -> UnaryExpression[_T]:
+        return UnaryExpression(
+            coercions.expect(roles.ByOfRole, column),
+            modifier=operators.nulls_first_op,
+            wraps_column_expression=False,
+        )
+
+    @classmethod
+    def _create_nulls_last(
+        cls,
+        column: _ColumnExpressionArgument[_T],
+    ) -> UnaryExpression[_T]:
+        return UnaryExpression(
+            coercions.expect(roles.ByOfRole, column),
+            modifier=operators.nulls_last_op,
+            wraps_column_expression=False,
+        )
+
+    @classmethod
+    def _create_desc(
+        cls, column: _ColumnExpressionOrStrLabelArgument[_T]
+    ) -> UnaryExpression[_T]:
+        return UnaryExpression(
+            coercions.expect(roles.ByOfRole, column),
+            modifier=operators.desc_op,
+            wraps_column_expression=False,
+        )
+
+    @classmethod
+    def _create_asc(
+        cls,
+        column: _ColumnExpressionOrStrLabelArgument[_T],
+    ) -> UnaryExpression[_T]:
+        return UnaryExpression(
+            coercions.expect(roles.ByOfRole, column),
+            modifier=operators.asc_op,
+            wraps_column_expression=False,
+        )
+
+    @classmethod
+    def _create_distinct(
+        cls,
+        expr: _ColumnExpressionArgument[_T],
+    ) -> UnaryExpression[_T]:
+        col_expr: ColumnElement[_T] = coercions.expect(
+            roles.ExpressionElementRole, expr
+        )
+        return UnaryExpression(
+            col_expr,
+            operator=operators.distinct_op,
+            type_=col_expr.type,
+            wraps_column_expression=False,
+        )
+
+    @classmethod
+    def _create_bitwise_not(
+        cls,
+        expr: _ColumnExpressionArgument[_T],
+    ) -> UnaryExpression[_T]:
+        col_expr: ColumnElement[_T] = coercions.expect(
+            roles.ExpressionElementRole, expr
+        )
+        return UnaryExpression(
+            col_expr,
+            operator=operators.bitwise_not_op,
+            type_=col_expr.type,
+            wraps_column_expression=False,
+        )
+
+    @property
+    def _order_by_label_element(self) -> Optional[Label[Any]]:
+        if operators.is_order_by_modifier(self.modifier):
+            return self.element._order_by_label_element
+        else:
+            return None
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.element._from_objects
+
+    def _negate(self):
+        if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+            return UnaryExpression(
+                self.self_group(against=operators.inv),
+                operator=operators.inv,
+                type_=type_api.BOOLEANTYPE,
+                wraps_column_expression=self.wraps_column_expression,
+            )
+        else:
+            return ClauseElement._negate(self)
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[Self, Grouping[_T]]:
+        if self.operator and operators.is_precedent(self.operator, against):
+            return Grouping(self)
+        else:
+            return self
+
+
+class CollectionAggregate(UnaryExpression[_T]):
+    """Forms the basis for right-hand collection operator modifiers
+    ANY and ALL.
+
+    The ANY and ALL keywords are available in different ways on different
+    backends.  On PostgreSQL, they only work for an ARRAY type.  On
+    MySQL, they only work for subqueries.
+
+    """
+
+    inherit_cache = True
+    _is_collection_aggregate = True
+
+    @classmethod
+    def _create_any(
+        cls, expr: _ColumnExpressionArgument[_T]
+    ) -> CollectionAggregate[bool]:
+        col_expr: ColumnElement[_T] = coercions.expect(
+            roles.ExpressionElementRole,
+            expr,
+        )
+        col_expr = col_expr.self_group()
+        return CollectionAggregate(
+            col_expr,
+            operator=operators.any_op,
+            type_=type_api.BOOLEANTYPE,
+            wraps_column_expression=False,
+        )
+
+    @classmethod
+    def _create_all(
+        cls, expr: _ColumnExpressionArgument[_T]
+    ) -> CollectionAggregate[bool]:
+        col_expr: ColumnElement[_T] = coercions.expect(
+            roles.ExpressionElementRole,
+            expr,
+        )
+        col_expr = col_expr.self_group()
+        return CollectionAggregate(
+            col_expr,
+            operator=operators.all_op,
+            type_=type_api.BOOLEANTYPE,
+            wraps_column_expression=False,
+        )
+
+    # operate and reverse_operate are hardwired to
+    # dispatch onto the type comparator directly, so that we can
+    # ensure "reversed" behavior.
+    def operate(self, op, *other, **kwargs):
+        if not operators.is_comparison(op):
+            raise exc.ArgumentError(
+                "Only comparison operators may be used with ANY/ALL"
+            )
+        kwargs["reverse"] = True
+        return self.comparator.operate(operators.mirror(op), *other, **kwargs)
+
+    def reverse_operate(self, op, other, **kwargs):
+        # comparison operators should never call reverse_operate
+        assert not operators.is_comparison(op)
+        raise exc.ArgumentError(
+            "Only comparison operators may be used with ANY/ALL"
+        )
+
+
+class AsBoolean(WrapsColumnExpression[bool], UnaryExpression[bool]):
+    inherit_cache = True
+
+    def __init__(self, element, operator, negate):
+        self.element = element
+        self.type = type_api.BOOLEANTYPE
+        self.operator = operator
+        self.negate = negate
+        self.modifier = None
+        self.wraps_column_expression = True
+        self._is_implicitly_boolean = element._is_implicitly_boolean
+
+    @property
+    def wrapped_column_expression(self):
+        return self.element
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Self:
+        return self
+
+    def _negate(self):
+        if isinstance(self.element, (True_, False_)):
+            return self.element._negate()
+        else:
+            return AsBoolean(self.element, self.negate, self.operator)
+
+
+class BinaryExpression(OperatorExpression[_T]):
+    """Represent an expression that is ``LEFT <operator> RIGHT``.
+
+    A :class:`.BinaryExpression` is generated automatically
+    whenever two column expressions are used in a Python binary expression:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy.sql import column
+        >>> column("a") + column("b")
+        <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+        >>> print(column("a") + column("b"))
+        {printsql}a + b
+
+    """
+
+    __visit_name__ = "binary"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("left", InternalTraversal.dp_clauseelement),
+        ("right", InternalTraversal.dp_clauseelement),
+        ("operator", InternalTraversal.dp_operator),
+        ("negate", InternalTraversal.dp_operator),
+        ("modifiers", InternalTraversal.dp_plain_dict),
+        (
+            "type",
+            InternalTraversal.dp_type,
+        ),
+    ]
+
+    _cache_key_traversal = [
+        ("left", InternalTraversal.dp_clauseelement),
+        ("right", InternalTraversal.dp_clauseelement),
+        ("operator", InternalTraversal.dp_operator),
+        ("modifiers", InternalTraversal.dp_plain_dict),
+        # "type" affects JSON CAST operators, so while redundant in most cases,
+        # is needed for that one
+        (
+            "type",
+            InternalTraversal.dp_type,
+        ),
+    ]
+
+    _is_implicitly_boolean = True
+    """Indicates that any database will know this is a boolean expression
+    even if the database does not have an explicit boolean datatype.
+
+    """
+
+    modifiers: Optional[Mapping[str, Any]]
+
+    left: ColumnElement[Any]
+    right: ColumnElement[Any]
+
+    def __init__(
+        self,
+        left: ColumnElement[Any],
+        right: ColumnElement[Any],
+        operator: OperatorType,
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+        negate: Optional[OperatorType] = None,
+        modifiers: Optional[Mapping[str, Any]] = None,
+    ):
+        # allow compatibility with libraries that
+        # refer to BinaryExpression directly and pass strings
+        if isinstance(operator, str):
+            operator = operators.custom_op(operator)
+        self._orig = (left.__hash__(), right.__hash__())
+        self._propagate_attrs = left._propagate_attrs or right._propagate_attrs
+        self.left = left.self_group(against=operator)
+        self.right = right.self_group(against=operator)
+        self.operator = operator
+
+        # if type is None, we get NULLTYPE, which is our _T.  But I don't
+        # know how to get the overloads to express that correctly
+        self.type = type_api.to_instance(type_)  # type: ignore
+
+        self.negate = negate
+        self._is_implicitly_boolean = operators.is_boolean(operator)
+
+        if modifiers is None:
+            self.modifiers = {}
+        else:
+            self.modifiers = modifiers
+
+    @property
+    def _flattened_operator_clauses(
+        self,
+    ) -> typing_Tuple[ColumnElement[Any], ...]:
+        return (self.left, self.right)
+
+    def __bool__(self):
+        """Implement Python-side "bool" for BinaryExpression as a
+        simple "identity" check for the left and right attributes,
+        if the operator is "eq" or "ne".  Otherwise the expression
+        continues to not support "bool" like all other column expressions.
+
+        The rationale here is so that ColumnElement objects can be hashable.
+        What?  Well, suppose you do this::
+
+            c1, c2 = column("x"), column("y")
+            s1 = set([c1, c2])
+
+        We do that **a lot**, columns inside of sets is an extremely basic
+        thing all over the ORM for example.
+
+        So what happens if we do this? ::
+
+            c1 in s1
+
+        Hashing means it will normally use ``__hash__()`` of the object,
+        but in case of hash collision, it's going to also do ``c1 == c1``
+        and/or ``c1 == c2`` inside.  Those operations need to return a
+        True/False value.   But because we override ``==`` and ``!=``, they're
+        going to get a BinaryExpression.  Hence we implement ``__bool__`` here
+        so that these comparisons behave in this particular context mostly
+        like regular object comparisons.  Thankfully Python is OK with
+        that!  Otherwise we'd have to use special set classes for columns
+        (which we used to do, decades ago).
+
+        """
+        if self.operator in (operators.eq, operators.ne):
+            # this is using the eq/ne operator given int hash values,
+            # rather than Operator, so that "bool" can be based on
+            # identity
+            return self.operator(*self._orig)  # type: ignore
+        else:
+            raise TypeError("Boolean value of this clause is not defined")
+
+    if typing.TYPE_CHECKING:
+
+        def __invert__(
+            self: BinaryExpression[_T],
+        ) -> BinaryExpression[_T]: ...
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.left._from_objects + self.right._from_objects
+
+    def _negate(self):
+        if self.negate is not None:
+            return BinaryExpression(
+                self.left,
+                self.right._negate_in_binary(self.negate, self.operator),
+                self.negate,
+                negate=self.operator,
+                type_=self.type,
+                modifiers=self.modifiers,
+            )
+        else:
+            return self.self_group()._negate()
+
+
+class Slice(ColumnElement[Any]):
+    """Represent SQL for a Python array-slice object.
+
+    This is not a specific SQL construct at this level, but
+    may be interpreted by specific dialects, e.g. PostgreSQL.
+
+    """
+
+    __visit_name__ = "slice"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("start", InternalTraversal.dp_clauseelement),
+        ("stop", InternalTraversal.dp_clauseelement),
+        ("step", InternalTraversal.dp_clauseelement),
+    ]
+
+    def __init__(self, start, stop, step, _name=None):
+        self.start = coercions.expect(
+            roles.ExpressionElementRole,
+            start,
+            name=_name,
+            type_=type_api.INTEGERTYPE,
+        )
+        self.stop = coercions.expect(
+            roles.ExpressionElementRole,
+            stop,
+            name=_name,
+            type_=type_api.INTEGERTYPE,
+        )
+        self.step = coercions.expect(
+            roles.ExpressionElementRole,
+            step,
+            name=_name,
+            type_=type_api.INTEGERTYPE,
+        )
+        self.type = type_api.NULLTYPE
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Self:
+        assert against is operator.getitem
+        return self
+
+
+class IndexExpression(BinaryExpression[Any]):
+    """Represent the class of expressions that are like an "index"
+    operation."""
+
+    inherit_cache = True
+
+
+class GroupedElement(DQLDMLClauseElement):
+    """Represent any parenthesized expression"""
+
+    __visit_name__ = "grouping"
+
+    element: ClauseElement
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Self:
+        return self
+
+    def _ungroup(self):
+        return self.element._ungroup()
+
+
+class Grouping(GroupedElement, ColumnElement[_T]):
+    """Represent a grouping within a column expression"""
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("type", InternalTraversal.dp_type),
+    ]
+
+    _cache_key_traversal = [
+        ("element", InternalTraversal.dp_clauseelement),
+    ]
+
+    element: Union[TextClause, ClauseList, ColumnElement[_T]]
+
+    def __init__(
+        self, element: Union[TextClause, ClauseList, ColumnElement[_T]]
+    ):
+        self.element = element
+
+        # nulltype assignment issue
+        self.type = getattr(element, "type", type_api.NULLTYPE)  # type: ignore
+        self._propagate_attrs = element._propagate_attrs
+
+    def _with_binary_element_type(self, type_):
+        return self.__class__(self.element._with_binary_element_type(type_))
+
+    @util.memoized_property
+    def _is_implicitly_boolean(self):
+        return self.element._is_implicitly_boolean
+
+    @util.non_memoized_property
+    def _tq_label(self) -> Optional[str]:
+        return (
+            getattr(self.element, "_tq_label", None) or self._anon_name_label
+        )
+
+    @util.non_memoized_property
+    def _proxies(self) -> List[ColumnElement[Any]]:
+        if isinstance(self.element, ColumnElement):
+            return [self.element]
+        else:
+            return []
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.element._from_objects
+
+    def __getattr__(self, attr):
+        return getattr(self.element, attr)
+
+    def __getstate__(self):
+        return {"element": self.element, "type": self.type}
+
+    def __setstate__(self, state):
+        self.element = state["element"]
+        self.type = state["type"]
+
+    if TYPE_CHECKING:
+
+        def self_group(
+            self, against: Optional[OperatorType] = None
+        ) -> Self: ...
+
+
+class _OverrideBinds(Grouping[_T]):
+    """used by cache_key->_apply_params_to_element to allow compilation /
+    execution of a SQL element that's been cached, using an alternate set of
+    bound parameter values.
+
+    This is used by the ORM to swap new parameter values into expressions
+    that are embedded into loader options like with_expression(),
+    selectinload().  Previously, this task was accomplished using the
+    .params() method which would perform a deep-copy instead.  This deep
+    copy proved to be too expensive for more complex expressions.
+
+    See #11085
+
+    """
+
+    __visit_name__ = "override_binds"
+
+    def __init__(
+        self,
+        element: ColumnElement[_T],
+        bindparams: Sequence[BindParameter[Any]],
+        replaces_params: Sequence[BindParameter[Any]],
+    ):
+        self.element = element
+        self.translate = {
+            k.key: v.value for k, v in zip(replaces_params, bindparams)
+        }
+
+    def _gen_cache_key(
+        self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
+    ) -> Optional[typing_Tuple[Any, ...]]:
+        """generate a cache key for the given element, substituting its bind
+        values for the translation values present."""
+
+        existing_bps: List[BindParameter[Any]] = []
+        ck = self.element._gen_cache_key(anon_map, existing_bps)
+
+        bindparams.extend(
+            (
+                bp._with_value(
+                    self.translate[bp.key], maintain_key=True, required=False
+                )
+                if bp.key in self.translate
+                else bp
+            )
+            for bp in existing_bps
+        )
+
+        return ck
+
+
+class _OverRange(Enum):
+    RANGE_UNBOUNDED = 0
+    RANGE_CURRENT = 1
+
+
+RANGE_UNBOUNDED = _OverRange.RANGE_UNBOUNDED
+RANGE_CURRENT = _OverRange.RANGE_CURRENT
+
+_IntOrRange = Union[int, _OverRange]
+
+
+class Over(ColumnElement[_T]):
+    """Represent an OVER clause.
+
+    This is a special operator against a so-called
+    "window" function, as well as any aggregate function,
+    which produces results relative to the result set
+    itself.  Most modern SQL backends now support window functions.
+
+    """
+
+    __visit_name__ = "over"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("order_by", InternalTraversal.dp_clauseelement),
+        ("partition_by", InternalTraversal.dp_clauseelement),
+        ("range_", InternalTraversal.dp_plain_obj),
+        ("rows", InternalTraversal.dp_plain_obj),
+    ]
+
+    order_by: Optional[ClauseList] = None
+    partition_by: Optional[ClauseList] = None
+
+    element: ColumnElement[_T]
+    """The underlying expression object to which this :class:`.Over`
+    object refers."""
+
+    range_: Optional[typing_Tuple[_IntOrRange, _IntOrRange]]
+    rows: Optional[typing_Tuple[_IntOrRange, _IntOrRange]]
+
+    def __init__(
+        self,
+        element: ColumnElement[_T],
+        partition_by: Optional[_ByArgument] = None,
+        order_by: Optional[_ByArgument] = None,
+        range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+        rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+    ):
+        self.element = element
+        if order_by is not None:
+            self.order_by = ClauseList(
+                *util.to_list(order_by), _literal_as_text_role=roles.ByOfRole
+            )
+        if partition_by is not None:
+            self.partition_by = ClauseList(
+                *util.to_list(partition_by),
+                _literal_as_text_role=roles.ByOfRole,
+            )
+
+        if range_:
+            self.range_ = self._interpret_range(range_)
+            if rows:
+                raise exc.ArgumentError(
+                    "'range_' and 'rows' are mutually exclusive"
+                )
+            else:
+                self.rows = None
+        elif rows:
+            self.rows = self._interpret_range(rows)
+            self.range_ = None
+        else:
+            self.rows = self.range_ = None
+
+    def __reduce__(self):
+        return self.__class__, (
+            self.element,
+            self.partition_by,
+            self.order_by,
+            self.range_,
+            self.rows,
+        )
+
+    def _interpret_range(
+        self,
+        range_: typing_Tuple[Optional[_IntOrRange], Optional[_IntOrRange]],
+    ) -> typing_Tuple[_IntOrRange, _IntOrRange]:
+        if not isinstance(range_, tuple) or len(range_) != 2:
+            raise exc.ArgumentError("2-tuple expected for range/rows")
+
+        r0, r1 = range_
+
+        lower: _IntOrRange
+        upper: _IntOrRange
+
+        if r0 is None:
+            lower = RANGE_UNBOUNDED
+        elif isinstance(r0, _OverRange):
+            lower = r0
+        else:
+            try:
+                lower = int(r0)
+            except ValueError as err:
+                raise exc.ArgumentError(
+                    "Integer or None expected for range value"
+                ) from err
+            else:
+                if lower == 0:
+                    lower = RANGE_CURRENT
+
+        if r1 is None:
+            upper = RANGE_UNBOUNDED
+        elif isinstance(r1, _OverRange):
+            upper = r1
+        else:
+            try:
+                upper = int(r1)
+            except ValueError as err:
+                raise exc.ArgumentError(
+                    "Integer or None expected for range value"
+                ) from err
+            else:
+                if upper == 0:
+                    upper = RANGE_CURRENT
+
+        return lower, upper
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            return self.element.type
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return list(
+            itertools.chain(
+                *[
+                    c._from_objects
+                    for c in (self.element, self.partition_by, self.order_by)
+                    if c is not None
+                ]
+            )
+        )
+
+
+class WithinGroup(ColumnElement[_T]):
+    """Represent a WITHIN GROUP (ORDER BY) clause.
+
+    This is a special operator against so-called
+    "ordered set aggregate" and "hypothetical
+    set aggregate" functions, including ``percentile_cont()``,
+    ``rank()``, ``dense_rank()``, etc.
+
+    It's supported only by certain database backends, such as PostgreSQL,
+    Oracle Database and MS SQL Server.
+
+    The :class:`.WithinGroup` construct extracts its type from the
+    method :meth:`.FunctionElement.within_group_type`.  If this returns
+    ``None``, the function's ``.type`` is used.
+
+    """
+
+    __visit_name__ = "withingroup"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("order_by", InternalTraversal.dp_clauseelement),
+    ]
+
+    order_by: Optional[ClauseList] = None
+
+    def __init__(
+        self,
+        element: Union[FunctionElement[_T], FunctionFilter[_T]],
+        *order_by: _ColumnExpressionArgument[Any],
+    ):
+        self.element = element
+        if order_by is not None:
+            self.order_by = ClauseList(
+                *util.to_list(order_by), _literal_as_text_role=roles.ByOfRole
+            )
+
+    def __reduce__(self):
+        return self.__class__, (self.element,) + (
+            tuple(self.order_by) if self.order_by is not None else ()
+        )
+
+    def over(
+        self,
+        *,
+        partition_by: Optional[_ByArgument] = None,
+        order_by: Optional[_ByArgument] = None,
+        rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+        range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+    ) -> Over[_T]:
+        """Produce an OVER clause against this :class:`.WithinGroup`
+        construct.
+
+        This function has the same signature as that of
+        :meth:`.FunctionElement.over`.
+
+        """
+        return Over(
+            self,
+            partition_by=partition_by,
+            order_by=order_by,
+            range_=range_,
+            rows=rows,
+        )
+
+    @overload
+    def filter(self) -> Self: ...
+
+    @overload
+    def filter(
+        self,
+        __criterion0: _ColumnExpressionArgument[bool],
+        *criterion: _ColumnExpressionArgument[bool],
+    ) -> FunctionFilter[_T]: ...
+
+    def filter(
+        self, *criterion: _ColumnExpressionArgument[bool]
+    ) -> Union[Self, FunctionFilter[_T]]:
+        """Produce a FILTER clause against this function."""
+        if not criterion:
+            return self
+        return FunctionFilter(self, *criterion)
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            wgt = self.element.within_group_type(self)
+            if wgt is not None:
+                return wgt
+            else:
+                return self.element.type
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return list(
+            itertools.chain(
+                *[
+                    c._from_objects
+                    for c in (self.element, self.order_by)
+                    if c is not None
+                ]
+            )
+        )
+
+
+class FunctionFilter(Generative, ColumnElement[_T]):
+    """Represent a function FILTER clause.
+
+    This is a special operator against aggregate and window functions,
+    which controls which rows are passed to it.
+    It's supported only by certain database backends.
+
+    Invocation of :class:`.FunctionFilter` is via
+    :meth:`.FunctionElement.filter`::
+
+        func.count(1).filter(True)
+
+    .. seealso::
+
+        :meth:`.FunctionElement.filter`
+
+    """
+
+    __visit_name__ = "funcfilter"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("func", InternalTraversal.dp_clauseelement),
+        ("criterion", InternalTraversal.dp_clauseelement),
+    ]
+
+    criterion: Optional[ColumnElement[bool]] = None
+
+    def __init__(
+        self,
+        func: Union[FunctionElement[_T], WithinGroup[_T]],
+        *criterion: _ColumnExpressionArgument[bool],
+    ):
+        self.func = func
+        self.filter.non_generative(self, *criterion)  # type: ignore
+
+    @_generative
+    def filter(self, *criterion: _ColumnExpressionArgument[bool]) -> Self:
+        """Produce an additional FILTER against the function.
+
+        This method adds additional criteria to the initial criteria
+        set up by :meth:`.FunctionElement.filter`.
+
+        Multiple criteria are joined together at SQL render time
+        via ``AND``.
+
+
+        """
+
+        for crit in list(criterion):
+            crit = coercions.expect(roles.WhereHavingRole, crit)
+
+            if self.criterion is not None:
+                self.criterion = self.criterion & crit
+            else:
+                self.criterion = crit
+
+        return self
+
+    def over(
+        self,
+        partition_by: Optional[
+            Union[
+                Iterable[_ColumnExpressionArgument[Any]],
+                _ColumnExpressionArgument[Any],
+            ]
+        ] = None,
+        order_by: Optional[
+            Union[
+                Iterable[_ColumnExpressionArgument[Any]],
+                _ColumnExpressionArgument[Any],
+            ]
+        ] = None,
+        range_: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+        rows: Optional[typing_Tuple[Optional[int], Optional[int]]] = None,
+    ) -> Over[_T]:
+        """Produce an OVER clause against this filtered function.
+
+        Used against aggregate or so-called "window" functions,
+        for database backends that support window functions.
+
+        The expression::
+
+            func.rank().filter(MyClass.y > 5).over(order_by="x")
+
+        is shorthand for::
+
+            from sqlalchemy import over, funcfilter
+
+            over(funcfilter(func.rank(), MyClass.y > 5), order_by="x")
+
+        See :func:`_expression.over` for a full description.
+
+        """
+        return Over(
+            self,
+            partition_by=partition_by,
+            order_by=order_by,
+            range_=range_,
+            rows=rows,
+        )
+
+    def within_group(
+        self, *order_by: _ColumnExpressionArgument[Any]
+    ) -> WithinGroup[_T]:
+        """Produce a WITHIN GROUP (ORDER BY expr) clause against
+        this function.
+        """
+        return WithinGroup(self, *order_by)
+
+    def within_group_type(
+        self, within_group: WithinGroup[_T]
+    ) -> Optional[TypeEngine[_T]]:
+        return None
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[Self, Grouping[_T]]:
+        if operators.is_precedent(operators.filter_op, against):
+            return Grouping(self)
+        else:
+            return self
+
+    if not TYPE_CHECKING:
+
+        @util.memoized_property
+        def type(self) -> TypeEngine[_T]:  # noqa: A001
+            return self.func.type
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return list(
+            itertools.chain(
+                *[
+                    c._from_objects
+                    for c in (self.func, self.criterion)
+                    if c is not None
+                ]
+            )
+        )
+
+
+class NamedColumn(KeyedColumnElement[_T]):
+    is_literal = False
+    table: Optional[FromClause] = None
+    name: str
+    key: str
+
+    def _compare_name_for_result(self, other):
+        return (hasattr(other, "name") and self.name == other.name) or (
+            hasattr(other, "_label") and self._label == other._label
+        )
+
+    @util.ro_memoized_property
+    def description(self) -> str:
+        return self.name
+
+    @HasMemoized.memoized_attribute
+    def _tq_key_label(self) -> Optional[str]:
+        """table qualified label based on column key.
+
+        for table-bound columns this is <tablename>_<column key/proxy key>;
+
+        all other expressions it resolves to key/proxy key.
+
+        """
+        proxy_key = self._proxy_key
+        if proxy_key and proxy_key != self.name:
+            return self._gen_tq_label(proxy_key)
+        else:
+            return self._tq_label
+
+    @HasMemoized.memoized_attribute
+    def _tq_label(self) -> Optional[str]:
+        """table qualified label based on column name.
+
+        for table-bound columns this is <tablename>_<columnname>; all other
+        expressions it resolves to .name.
+
+        """
+        return self._gen_tq_label(self.name)
+
+    @HasMemoized.memoized_attribute
+    def _render_label_in_columns_clause(self):
+        return True
+
+    @HasMemoized.memoized_attribute
+    def _non_anon_label(self):
+        return self.name
+
+    def _gen_tq_label(
+        self, name: str, dedupe_on_key: bool = True
+    ) -> Optional[str]:
+        return name
+
+    def _bind_param(
+        self,
+        operator: OperatorType,
+        obj: Any,
+        type_: Optional[TypeEngine[_T]] = None,
+        expanding: bool = False,
+    ) -> BindParameter[_T]:
+        return BindParameter(
+            self.key,
+            obj,
+            _compared_to_operator=operator,
+            _compared_to_type=self.type,
+            type_=type_,
+            unique=True,
+            expanding=expanding,
+        )
+
+    def _make_proxy(
+        self,
+        selectable: FromClause,
+        *,
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        name: Optional[str] = None,
+        key: Optional[str] = None,
+        name_is_truncatable: bool = False,
+        compound_select_cols: Optional[Sequence[ColumnElement[Any]]] = None,
+        disallow_is_literal: bool = False,
+        **kw: Any,
+    ) -> typing_Tuple[str, ColumnClause[_T]]:
+        c = ColumnClause(
+            (
+                coercions.expect(roles.TruncatedLabelRole, name or self.name)
+                if name_is_truncatable
+                else (name or self.name)
+            ),
+            type_=self.type,
+            _selectable=selectable,
+            is_literal=False,
+        )
+
+        c._propagate_attrs = selectable._propagate_attrs
+        if name is None:
+            c.key = self.key
+        if compound_select_cols:
+            c._proxies = list(compound_select_cols)
+        else:
+            c._proxies = [self]
+
+        if selectable._is_clone_of is not None:
+            c._is_clone_of = selectable._is_clone_of.columns.get(c.key)
+        return c.key, c
+
+
+_PS = ParamSpec("_PS")
+
+
+class Label(roles.LabeledColumnExprRole[_T], NamedColumn[_T]):
+    """Represents a column label (AS).
+
+    Represent a label, as typically applied to any column-level
+    element using the ``AS`` sql keyword.
+
+    """
+
+    __visit_name__ = "label"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("_element", InternalTraversal.dp_clauseelement),
+    ]
+
+    _cache_key_traversal = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("_element", InternalTraversal.dp_clauseelement),
+    ]
+
+    _element: ColumnElement[_T]
+    name: str
+
+    def __init__(
+        self,
+        name: Optional[str],
+        element: _ColumnExpressionArgument[_T],
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+    ):
+        orig_element = element
+        element = coercions.expect(
+            roles.ExpressionElementRole,
+            element,
+            apply_propagate_attrs=self,
+        )
+        while isinstance(element, Label):
+            # TODO: this is only covered in test_text.py, but nothing
+            # fails if it's removed.  determine rationale
+            element = element.element
+
+        if name:
+            self.name = name
+        else:
+            self.name = _anonymous_label.safe_construct(
+                id(self), getattr(element, "name", "anon")
+            )
+            if isinstance(orig_element, Label):
+                # TODO: no coverage for this block, again would be in
+                # test_text.py where the resolve_label concept is important
+                self._resolve_label = orig_element._label
+
+        self.key = self._tq_label = self._tq_key_label = self.name
+        self._element = element
+
+        self.type = (
+            type_api.to_instance(type_)
+            if type_ is not None
+            else self._element.type
+        )
+
+        self._proxies = [element]
+
+    def __reduce__(self):
+        return self.__class__, (self.name, self._element, self.type)
+
+    @HasMemoized.memoized_attribute
+    def _render_label_in_columns_clause(self):
+        return True
+
+    def _bind_param(self, operator, obj, type_=None, expanding=False):
+        return BindParameter(
+            None,
+            obj,
+            _compared_to_operator=operator,
+            type_=type_,
+            _compared_to_type=self.type,
+            unique=True,
+            expanding=expanding,
+        )
+
+    @util.memoized_property
+    def _is_implicitly_boolean(self):
+        return self.element._is_implicitly_boolean
+
+    @HasMemoized.memoized_attribute
+    def _allow_label_resolve(self):
+        return self.element._allow_label_resolve
+
+    @property
+    def _order_by_label_element(self):
+        return self
+
+    @HasMemoized.memoized_attribute
+    def element(self) -> ColumnElement[_T]:
+        return self._element.self_group(against=operators.as_)
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Label[_T]:
+        return self._apply_to_inner(self._element.self_group, against=against)
+
+    def _negate(self):
+        return self._apply_to_inner(self._element._negate)
+
+    def _apply_to_inner(
+        self,
+        fn: Callable[_PS, ColumnElement[_T]],
+        *arg: _PS.args,
+        **kw: _PS.kwargs,
+    ) -> Label[_T]:
+        sub_element = fn(*arg, **kw)
+        if sub_element is not self._element:
+            return Label(self.name, sub_element, type_=self.type)
+        else:
+            return self
+
+    @property
+    def primary_key(self):
+        return self.element.primary_key
+
+    @property
+    def foreign_keys(self):
+        return self.element.foreign_keys
+
+    def _copy_internals(
+        self,
+        *,
+        clone: _CloneCallableType = _clone,
+        anonymize_labels: bool = False,
+        **kw: Any,
+    ) -> None:
+        self._reset_memoizations()
+        self._element = clone(self._element, **kw)
+        if anonymize_labels:
+            self.name = _anonymous_label.safe_construct(
+                id(self), getattr(self.element, "name", "anon")
+            )
+            self.key = self._tq_label = self._tq_key_label = self.name
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.element._from_objects
+
+    def _make_proxy(
+        self,
+        selectable: FromClause,
+        *,
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        name: Optional[str] = None,
+        compound_select_cols: Optional[Sequence[ColumnElement[Any]]] = None,
+        **kw: Any,
+    ) -> typing_Tuple[str, ColumnClause[_T]]:
+        name = self.name if not name else name
+
+        key, e = self.element._make_proxy(
+            selectable,
+            name=name,
+            disallow_is_literal=True,
+            name_is_truncatable=isinstance(name, _truncated_label),
+            compound_select_cols=compound_select_cols,
+            primary_key=primary_key,
+            foreign_keys=foreign_keys,
+        )
+
+        # there was a note here to remove this assertion, which was here
+        # to determine if we later could support a use case where
+        # the key and name of a label are separate.  But I don't know what
+        # that case was.  For now, this is an unexpected case that occurs
+        # when a label name conflicts with other columns and select()
+        # is attempting to disambiguate an explicit label, which is not what
+        # the user would want.   See issue #6090.
+        if key != self.name and not isinstance(self.name, _anonymous_label):
+            raise exc.InvalidRequestError(
+                "Label name %s is being renamed to an anonymous label due "
+                "to disambiguation "
+                "which is not supported right now.  Please use unique names "
+                "for explicit labels." % (self.name)
+            )
+
+        e._propagate_attrs = selectable._propagate_attrs
+        e._proxies.append(self)
+        if self.type is not None:
+            e.type = self.type
+
+        return self.key, e
+
+
+class ColumnClause(
+    roles.DDLReferredColumnRole,
+    roles.LabeledColumnExprRole[_T],
+    roles.StrAsPlainColumnRole,
+    Immutable,
+    NamedColumn[_T],
+):
+    """Represents a column expression from any textual string.
+
+    The :class:`.ColumnClause`, a lightweight analogue to the
+    :class:`_schema.Column` class, is typically invoked using the
+    :func:`_expression.column` function, as in::
+
+        from sqlalchemy import column
+
+        id, name = column("id"), column("name")
+        stmt = select(id, name).select_from("user")
+
+    The above statement would produce SQL like:
+
+    .. sourcecode:: sql
+
+        SELECT id, name FROM user
+
+    :class:`.ColumnClause` is the immediate superclass of the schema-specific
+    :class:`_schema.Column` object.  While the :class:`_schema.Column`
+    class has all the
+    same capabilities as :class:`.ColumnClause`, the :class:`.ColumnClause`
+    class is usable by itself in those cases where behavioral requirements
+    are limited to simple SQL expression generation.  The object has none of
+    the associations with schema-level metadata or with execution-time
+    behavior that :class:`_schema.Column` does,
+    so in that sense is a "lightweight"
+    version of :class:`_schema.Column`.
+
+    Full details on :class:`.ColumnClause` usage is at
+    :func:`_expression.column`.
+
+    .. seealso::
+
+        :func:`_expression.column`
+
+        :class:`_schema.Column`
+
+    """
+
+    table: Optional[FromClause]
+    is_literal: bool
+
+    __visit_name__ = "column"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("table", InternalTraversal.dp_clauseelement),
+        ("is_literal", InternalTraversal.dp_boolean),
+    ]
+
+    onupdate: Optional[DefaultGenerator] = None
+    default: Optional[DefaultGenerator] = None
+    server_default: Optional[FetchedValue] = None
+    server_onupdate: Optional[FetchedValue] = None
+
+    _is_multiparam_column = False
+
+    @property
+    def _is_star(self):
+        return self.is_literal and self.name == "*"
+
+    def __init__(
+        self,
+        text: str,
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+        is_literal: bool = False,
+        _selectable: Optional[FromClause] = None,
+    ):
+        self.key = self.name = text
+        self.table = _selectable
+
+        # if type is None, we get NULLTYPE, which is our _T.  But I don't
+        # know how to get the overloads to express that correctly
+        self.type = type_api.to_instance(type_)  # type: ignore
+
+        self.is_literal = is_literal
+
+    def get_children(self, *, column_tables=False, **kw):
+        # override base get_children() to not return the Table
+        # or selectable that is parent to this column.  Traversals
+        # expect the columns of tables and subqueries to be leaf nodes.
+        return []
+
+    @property
+    def entity_namespace(self):
+        if self.table is not None:
+            return self.table.entity_namespace
+        else:
+            return super().entity_namespace
+
+    def _clone(self, detect_subquery_cols=False, **kw):
+        if (
+            detect_subquery_cols
+            and self.table is not None
+            and self.table._is_subquery
+        ):
+            clone = kw.pop("clone")
+            table = clone(self.table, **kw)
+            new = table.c.corresponding_column(self)
+            return new
+
+        return super()._clone(**kw)
+
+    @HasMemoized_ro_memoized_attribute
+    def _from_objects(self) -> List[FromClause]:
+        t = self.table
+        if t is not None:
+            return [t]
+        else:
+            return []
+
+    @HasMemoized.memoized_attribute
+    def _render_label_in_columns_clause(self):
+        return self.table is not None
+
+    @property
+    def _ddl_label(self):
+        return self._gen_tq_label(self.name, dedupe_on_key=False)
+
+    def _compare_name_for_result(self, other):
+        if (
+            self.is_literal
+            or self.table is None
+            or self.table._is_textual
+            or not hasattr(other, "proxy_set")
+            or (
+                isinstance(other, ColumnClause)
+                and (
+                    other.is_literal
+                    or other.table is None
+                    or other.table._is_textual
+                )
+            )
+        ):
+            return (hasattr(other, "name") and self.name == other.name) or (
+                hasattr(other, "_tq_label")
+                and self._tq_label == other._tq_label
+            )
+        else:
+            return other.proxy_set.intersection(self.proxy_set)
+
+    def _gen_tq_label(
+        self, name: str, dedupe_on_key: bool = True
+    ) -> Optional[str]:
+        """generate table-qualified label
+
+        for a table-bound column this is <tablename>_<columnname>.
+
+        used primarily for LABEL_STYLE_TABLENAME_PLUS_COL
+        as well as the .columns collection on a Join object.
+
+        """
+        label: str
+        t = self.table
+        if self.is_literal:
+            return None
+        elif t is not None and is_named_from_clause(t):
+            if has_schema_attr(t) and t.schema:
+                label = t.schema.replace(".", "_") + "_" + t.name + "_" + name
+            else:
+                assert not TYPE_CHECKING or isinstance(t, NamedFromClause)
+                label = t.name + "_" + name
+
+            # propagate name quoting rules for labels.
+            if is_quoted_name(name) and name.quote is not None:
+                if is_quoted_name(label):
+                    label.quote = name.quote
+                else:
+                    label = quoted_name(label, name.quote)
+            elif is_quoted_name(t.name) and t.name.quote is not None:
+                # can't get this situation to occur, so let's
+                # assert false on it for now
+                assert not isinstance(label, quoted_name)
+                label = quoted_name(label, t.name.quote)
+
+            if dedupe_on_key:
+                # ensure the label name doesn't conflict with that of an
+                # existing column.   note that this implies that any Column
+                # must **not** set up its _label before its parent table has
+                # all of its other Column objects set up.  There are several
+                # tables in the test suite which will fail otherwise; example:
+                # table "owner" has columns "name" and "owner_name".  Therefore
+                # column owner.name cannot use the label "owner_name", it has
+                # to be "owner_name_1".
+                if label in t.c:
+                    _label = label
+                    counter = 1
+                    while _label in t.c:
+                        _label = label + "_" + str(counter)
+                        counter += 1
+                    label = _label
+
+            return coercions.expect(roles.TruncatedLabelRole, label)
+
+        else:
+            return name
+
+    def _make_proxy(
+        self,
+        selectable: FromClause,
+        *,
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        name: Optional[str] = None,
+        key: Optional[str] = None,
+        name_is_truncatable: bool = False,
+        compound_select_cols: Optional[Sequence[ColumnElement[Any]]] = None,
+        disallow_is_literal: bool = False,
+        **kw: Any,
+    ) -> typing_Tuple[str, ColumnClause[_T]]:
+        # the "is_literal" flag normally should never be propagated; a proxied
+        # column is always a SQL identifier and never the actual expression
+        # being evaluated. however, there is a case where the "is_literal" flag
+        # might be used to allow the given identifier to have a fixed quoting
+        # pattern already, so maintain the flag for the proxy unless a
+        # :class:`.Label` object is creating the proxy.  See [ticket:4730].
+        is_literal = (
+            not disallow_is_literal
+            and self.is_literal
+            and (
+                # note this does not accommodate for quoted_name differences
+                # right now
+                name is None
+                or name == self.name
+            )
+        )
+        c = self._constructor(
+            (
+                coercions.expect(roles.TruncatedLabelRole, name or self.name)
+                if name_is_truncatable
+                else (name or self.name)
+            ),
+            type_=self.type,
+            _selectable=selectable,
+            is_literal=is_literal,
+        )
+        c._propagate_attrs = selectable._propagate_attrs
+        if name is None:
+            c.key = self.key
+        if compound_select_cols:
+            c._proxies = list(compound_select_cols)
+        else:
+            c._proxies = [self]
+
+        if selectable._is_clone_of is not None:
+            c._is_clone_of = selectable._is_clone_of.columns.get(c.key)
+        return c.key, c
+
+
+class TableValuedColumn(NamedColumn[_T]):
+    __visit_name__ = "table_valued_column"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("scalar_alias", InternalTraversal.dp_clauseelement),
+    ]
+
+    def __init__(self, scalar_alias: NamedFromClause, type_: TypeEngine[_T]):
+        self.scalar_alias = scalar_alias
+        self.key = self.name = scalar_alias.name
+        self.type = type_
+
+    def _copy_internals(
+        self, clone: _CloneCallableType = _clone, **kw: Any
+    ) -> None:
+        self.scalar_alias = clone(self.scalar_alias, **kw)
+        self.key = self.name = self.scalar_alias.name
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return [self.scalar_alias]
+
+
+class CollationClause(ColumnElement[str]):
+    __visit_name__ = "collation"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("collation", InternalTraversal.dp_string)
+    ]
+
+    @classmethod
+    @util.preload_module("sqlalchemy.sql.sqltypes")
+    def _create_collation_expression(
+        cls, expression: _ColumnExpressionArgument[str], collation: str
+    ) -> BinaryExpression[str]:
+
+        sqltypes = util.preloaded.sql_sqltypes
+
+        expr = coercions.expect(roles.ExpressionElementRole[str], expression)
+
+        if expr.type._type_affinity is sqltypes.String:
+            collate_type = expr.type._with_collation(collation)
+        else:
+            collate_type = expr.type
+
+        return BinaryExpression(
+            expr,
+            CollationClause(collation),
+            operators.collate,
+            type_=collate_type,
+        )
+
+    def __init__(self, collation):
+        self.collation = collation
+
+
+class _IdentifiedClause(Executable, ClauseElement):
+    __visit_name__ = "identified"
+
+    def __init__(self, ident):
+        self.ident = ident
+
+
+class SavepointClause(_IdentifiedClause):
+    __visit_name__ = "savepoint"
+    inherit_cache = False
+
+
+class RollbackToSavepointClause(_IdentifiedClause):
+    __visit_name__ = "rollback_to_savepoint"
+    inherit_cache = False
+
+
+class ReleaseSavepointClause(_IdentifiedClause):
+    __visit_name__ = "release_savepoint"
+    inherit_cache = False
+
+
+class quoted_name(util.MemoizedSlots, str):
+    """Represent a SQL identifier combined with quoting preferences.
+
+    :class:`.quoted_name` is a Python unicode/str subclass which
+    represents a particular identifier name along with a
+    ``quote`` flag.  This ``quote`` flag, when set to
+    ``True`` or ``False``, overrides automatic quoting behavior
+    for this identifier in order to either unconditionally quote
+    or to not quote the name.  If left at its default of ``None``,
+    quoting behavior is applied to the identifier on a per-backend basis
+    based on an examination of the token itself.
+
+    A :class:`.quoted_name` object with ``quote=True`` is also
+    prevented from being modified in the case of a so-called
+    "name normalize" option.  Certain database backends, such as
+    Oracle Database, Firebird, and DB2 "normalize" case-insensitive names
+    as uppercase.  The SQLAlchemy dialects for these backends
+    convert from SQLAlchemy's lower-case-means-insensitive convention
+    to the upper-case-means-insensitive conventions of those backends.
+    The ``quote=True`` flag here will prevent this conversion from occurring
+    to support an identifier that's quoted as all lower case against
+    such a backend.
+
+    The :class:`.quoted_name` object is normally created automatically
+    when specifying the name for key schema constructs such as
+    :class:`_schema.Table`, :class:`_schema.Column`, and others.
+    The class can also be
+    passed explicitly as the name to any function that receives a name which
+    can be quoted.  Such as to use the :meth:`_engine.Engine.has_table`
+    method with
+    an unconditionally quoted name::
+
+        from sqlalchemy import create_engine
+        from sqlalchemy import inspect
+        from sqlalchemy.sql import quoted_name
+
+        engine = create_engine("oracle+oracledb://some_dsn")
+        print(inspect(engine).has_table(quoted_name("some_table", True)))
+
+    The above logic will run the "has table" logic against the Oracle Database
+    backend, passing the name exactly as ``"some_table"`` without converting to
+    upper case.
+
+    .. versionchanged:: 1.2 The :class:`.quoted_name` construct is now
+       importable from ``sqlalchemy.sql``, in addition to the previous
+       location of ``sqlalchemy.sql.elements``.
+
+    """
+
+    __slots__ = "quote", "lower", "upper"
+
+    quote: Optional[bool]
+
+    @overload
+    @classmethod
+    def construct(cls, value: str, quote: Optional[bool]) -> quoted_name: ...
+
+    @overload
+    @classmethod
+    def construct(cls, value: None, quote: Optional[bool]) -> None: ...
+
+    @classmethod
+    def construct(
+        cls, value: Optional[str], quote: Optional[bool]
+    ) -> Optional[quoted_name]:
+        if value is None:
+            return None
+        else:
+            return quoted_name(value, quote)
+
+    def __new__(cls, value: str, quote: Optional[bool]) -> quoted_name:
+        assert (
+            value is not None
+        ), "use quoted_name.construct() for None passthrough"
+        if isinstance(value, cls) and (quote is None or value.quote == quote):
+            return value
+        self = super().__new__(cls, value)
+
+        self.quote = quote
+        return self
+
+    def __reduce__(self):
+        return quoted_name, (str(self), self.quote)
+
+    def _memoized_method_lower(self):
+        if self.quote:
+            return self
+        else:
+            return str(self).lower()
+
+    def _memoized_method_upper(self):
+        if self.quote:
+            return self
+        else:
+            return str(self).upper()
+
+
+def _find_columns(clause: ClauseElement) -> Set[ColumnClause[Any]]:
+    """locate Column objects within the given expression."""
+
+    cols: Set[ColumnClause[Any]] = set()
+    traverse(clause, {}, {"column": cols.add})
+    return cols
+
+
+def _type_from_args(args: Sequence[ColumnElement[_T]]) -> TypeEngine[_T]:
+    for a in args:
+        if not a.type._isnull:
+            return a.type
+    else:
+        return type_api.NULLTYPE  # type: ignore
+
+
+def _corresponding_column_or_error(fromclause, column, require_embedded=False):
+    c = fromclause.corresponding_column(
+        column, require_embedded=require_embedded
+    )
+    if c is None:
+        raise exc.InvalidRequestError(
+            "Given column '%s', attached to table '%s', "
+            "failed to locate a corresponding column from table '%s'"
+            % (column, getattr(column, "table", None), fromclause.description)
+        )
+    return c
+
+
+class _memoized_property_but_not_nulltype(
+    util.memoized_property["TypeEngine[_T]"]
+):
+    """memoized property, but dont memoize NullType"""
+
+    def __get__(self, obj, cls):
+        if obj is None:
+            return self
+        result = self.fget(obj)
+        if not result._isnull:
+            obj.__dict__[self.__name__] = result
+        return result
+
+
+class AnnotatedColumnElement(Annotated):
+    _Annotated__element: ColumnElement[Any]
+
+    def __init__(self, element, values):
+        Annotated.__init__(self, element, values)
+        for attr in (
+            "comparator",
+            "_proxy_key",
+            "_tq_key_label",
+            "_tq_label",
+            "_non_anon_label",
+            "type",
+        ):
+            self.__dict__.pop(attr, None)
+        for attr in ("name", "key", "table"):
+            if self.__dict__.get(attr, False) is None:
+                self.__dict__.pop(attr)
+
+    def _with_annotations(self, values):
+        clone = super()._with_annotations(values)
+        for attr in (
+            "comparator",
+            "_proxy_key",
+            "_tq_key_label",
+            "_tq_label",
+            "_non_anon_label",
+        ):
+            clone.__dict__.pop(attr, None)
+        return clone
+
+    @util.memoized_property
+    def name(self):
+        """pull 'name' from parent, if not present"""
+        return self._Annotated__element.name
+
+    @_memoized_property_but_not_nulltype
+    def type(self):
+        """pull 'type' from parent and don't cache if null.
+
+        type is routinely changed on existing columns within the
+        mapped_column() initialization process, and "type" is also consulted
+        during the creation of SQL expressions.  Therefore it can change after
+        it was already retrieved.  At the same time we don't want annotated
+        objects having overhead when expressions are produced, so continue
+        to memoize, but only when we have a non-null type.
+
+        """
+        return self._Annotated__element.type
+
+    @util.memoized_property
+    def table(self):
+        """pull 'table' from parent, if not present"""
+        return self._Annotated__element.table
+
+    @util.memoized_property
+    def key(self):
+        """pull 'key' from parent, if not present"""
+        return self._Annotated__element.key
+
+    @util.memoized_property
+    def info(self) -> _InfoType:
+        if TYPE_CHECKING:
+            assert isinstance(self._Annotated__element, Column)
+        return self._Annotated__element.info
+
+    @util.memoized_property
+    def _anon_name_label(self) -> str:
+        return self._Annotated__element._anon_name_label
+
+
+class _truncated_label(quoted_name):
+    """A unicode subclass used to identify symbolic "
+    "names that may require truncation."""
+
+    __slots__ = ()
+
+    def __new__(cls, value: str, quote: Optional[bool] = None) -> Any:
+        quote = getattr(value, "quote", quote)
+        # return super(_truncated_label, cls).__new__(cls, value, quote, True)
+        return super().__new__(cls, value, quote)
+
+    def __reduce__(self) -> Any:
+        return self.__class__, (str(self), self.quote)
+
+    def apply_map(self, map_: Mapping[str, Any]) -> str:
+        return self
+
+
+class conv(_truncated_label):
+    """Mark a string indicating that a name has already been converted
+    by a naming convention.
+
+    This is a string subclass that indicates a name that should not be
+    subject to any further naming conventions.
+
+    E.g. when we create a :class:`.Constraint` using a naming convention
+    as follows::
+
+        m = MetaData(
+            naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"}
+        )
+        t = Table(
+            "t", m, Column("x", Integer), CheckConstraint("x > 5", name="x5")
+        )
+
+    The name of the above constraint will be rendered as ``"ck_t_x5"``.
+    That is, the existing name ``x5`` is used in the naming convention as the
+    ``constraint_name`` token.
+
+    In some situations, such as in migration scripts, we may be rendering
+    the above :class:`.CheckConstraint` with a name that's already been
+    converted.  In order to make sure the name isn't double-modified, the
+    new name is applied using the :func:`_schema.conv` marker.  We can
+    use this explicitly as follows::
+
+
+        m = MetaData(
+            naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"}
+        )
+        t = Table(
+            "t",
+            m,
+            Column("x", Integer),
+            CheckConstraint("x > 5", name=conv("ck_t_x5")),
+        )
+
+    Where above, the :func:`_schema.conv` marker indicates that the constraint
+    name here is final, and the name will render as ``"ck_t_x5"`` and not
+    ``"ck_t_ck_t_x5"``
+
+    .. seealso::
+
+        :ref:`constraint_naming_conventions`
+
+    """
+
+    __slots__ = ()
+
+
+# for backwards compatibility in case
+# someone is re-implementing the
+# _truncated_identifier() sequence in a custom
+# compiler
+_generated_label = _truncated_label
+
+
+class _anonymous_label(_truncated_label):
+    """A unicode subclass used to identify anonymously
+    generated names."""
+
+    __slots__ = ()
+
+    @classmethod
+    def safe_construct(
+        cls,
+        seed: int,
+        body: str,
+        enclosing_label: Optional[str] = None,
+        sanitize_key: bool = False,
+    ) -> _anonymous_label:
+        # need to escape chars that interfere with format
+        # strings in any case, issue #8724
+        body = re.sub(r"[%\(\) \$]+", "_", body)
+
+        if sanitize_key:
+            # sanitize_key is then an extra step used by BindParameter
+            body = body.strip("_")
+
+        label = "%%(%d %s)s" % (seed, body.replace("%", "%%"))
+        if enclosing_label:
+            label = "%s%s" % (enclosing_label, label)
+
+        return _anonymous_label(label)
+
+    def __add__(self, other):
+        if "%" in other and not isinstance(other, _anonymous_label):
+            other = str(other).replace("%", "%%")
+        else:
+            other = str(other)
+
+        return _anonymous_label(
+            quoted_name(
+                str.__add__(self, other),
+                self.quote,
+            )
+        )
+
+    def __radd__(self, other):
+        if "%" in other and not isinstance(other, _anonymous_label):
+            other = str(other).replace("%", "%%")
+        else:
+            other = str(other)
+
+        return _anonymous_label(
+            quoted_name(
+                str.__add__(other, self),
+                self.quote,
+            )
+        )
+
+    def apply_map(self, map_):
+        if self.quote is not None:
+            # preserve quoting only if necessary
+            return quoted_name(self % map_, self.quote)
+        else:
+            # else skip the constructor call
+            return self % map_
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/events.py
new file mode 100644
index 00000000..601092fd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/events.py
@@ -0,0 +1,458 @@
+# sql/events.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+from typing import Any
+from typing import TYPE_CHECKING
+
+from .base import SchemaEventTarget
+from .. import event
+
+if TYPE_CHECKING:
+    from .schema import Column
+    from .schema import Constraint
+    from .schema import SchemaItem
+    from .schema import Table
+    from ..engine.base import Connection
+    from ..engine.interfaces import ReflectedColumn
+    from ..engine.reflection import Inspector
+
+
+class DDLEvents(event.Events[SchemaEventTarget]):
+    """
+    Define event listeners for schema objects,
+    that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
+    subclasses, including :class:`_schema.MetaData`, :class:`_schema.Table`,
+    :class:`_schema.Column`, etc.
+
+    **Create / Drop Events**
+
+    Events emitted when CREATE and DROP commands are emitted to the database.
+    The event hooks in this category include :meth:`.DDLEvents.before_create`,
+    :meth:`.DDLEvents.after_create`, :meth:`.DDLEvents.before_drop`, and
+    :meth:`.DDLEvents.after_drop`.
+
+    These events are emitted when using schema-level methods such as
+    :meth:`.MetaData.create_all` and :meth:`.MetaData.drop_all`. Per-object
+    create/drop methods such as :meth:`.Table.create`, :meth:`.Table.drop`,
+    :meth:`.Index.create` are also included, as well as dialect-specific
+    methods such as :meth:`_postgresql.ENUM.create`.
+
+    .. versionadded:: 2.0 :class:`.DDLEvents` event hooks now take place
+       for non-table objects including constraints, indexes, and
+       dialect-specific schema types.
+
+    Event hooks may be attached directly to a :class:`_schema.Table` object or
+    to a :class:`_schema.MetaData` collection, as well as to any
+    :class:`.SchemaItem` class or object that can be individually created and
+    dropped using a distinct SQL command. Such classes include :class:`.Index`,
+    :class:`.Sequence`, and dialect-specific classes such as
+    :class:`_postgresql.ENUM`.
+
+    Example using the :meth:`.DDLEvents.after_create` event, where a custom
+    event hook will emit an ``ALTER TABLE`` command on the current connection,
+    after ``CREATE TABLE`` is emitted::
+
+        from sqlalchemy import create_engine
+        from sqlalchemy import event
+        from sqlalchemy import Table, Column, Metadata, Integer
+
+        m = MetaData()
+        some_table = Table("some_table", m, Column("data", Integer))
+
+
+        @event.listens_for(some_table, "after_create")
+        def after_create(target, connection, **kw):
+            connection.execute(
+                text("ALTER TABLE %s SET name=foo_%s" % (target.name, target.name))
+            )
+
+
+        some_engine = create_engine("postgresql://scott:tiger@host/test")
+
+        # will emit "CREATE TABLE some_table" as well as the above
+        # "ALTER TABLE" statement afterwards
+        m.create_all(some_engine)
+
+    Constraint objects such as :class:`.ForeignKeyConstraint`,
+    :class:`.UniqueConstraint`, :class:`.CheckConstraint` may also be
+    subscribed to these events, however they will **not** normally produce
+    events as these objects are usually rendered inline within an
+    enclosing ``CREATE TABLE`` statement and implicitly dropped from a
+    ``DROP TABLE`` statement.
+
+    For the :class:`.Index` construct, the event hook will be emitted
+    for ``CREATE INDEX``, however SQLAlchemy does not normally emit
+    ``DROP INDEX`` when dropping tables as this is again implicit within the
+    ``DROP TABLE`` statement.
+
+    .. versionadded:: 2.0 Support for :class:`.SchemaItem` objects
+       for create/drop events was expanded from its previous support for
+       :class:`.MetaData` and :class:`.Table` to also include
+       :class:`.Constraint` and all subclasses, :class:`.Index`,
+       :class:`.Sequence` and some type-related constructs such as
+       :class:`_postgresql.ENUM`.
+
+    .. note:: These event hooks are only emitted within the scope of
+       SQLAlchemy's create/drop methods; they are not necessarily supported
+       by tools such as `alembic <https://alembic.sqlalchemy.org>`_.
+
+
+    **Attachment Events**
+
+    Attachment events are provided to customize
+    behavior whenever a child schema element is associated
+    with a parent, such as when a :class:`_schema.Column` is associated
+    with its :class:`_schema.Table`, when a
+    :class:`_schema.ForeignKeyConstraint`
+    is associated with a :class:`_schema.Table`, etc.  These events include
+    :meth:`.DDLEvents.before_parent_attach` and
+    :meth:`.DDLEvents.after_parent_attach`.
+
+    **Reflection Events**
+
+    The :meth:`.DDLEvents.column_reflect` event is used to intercept
+    and modify the in-Python definition of database columns when
+    :term:`reflection` of database tables proceeds.
+
+    **Use with Generic DDL**
+
+    DDL events integrate closely with the
+    :class:`.DDL` class and the :class:`.ExecutableDDLElement` hierarchy
+    of DDL clause constructs, which are themselves appropriate
+    as listener callables::
+
+        from sqlalchemy import DDL
+
+        event.listen(
+            some_table,
+            "after_create",
+            DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"),
+        )
+
+    **Event Propagation to MetaData Copies**
+
+    For all :class:`.DDLEvent` events, the ``propagate=True`` keyword argument
+    will ensure that a given event handler is propagated to copies of the
+    object, which are made when using the :meth:`_schema.Table.to_metadata`
+    method::
+
+        from sqlalchemy import DDL
+
+        metadata = MetaData()
+        some_table = Table("some_table", metadata, Column("data", Integer))
+
+        event.listen(
+            some_table,
+            "after_create",
+            DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"),
+            propagate=True,
+        )
+
+        new_metadata = MetaData()
+        new_table = some_table.to_metadata(new_metadata)
+
+    The above :class:`.DDL` object will be associated with the
+    :meth:`.DDLEvents.after_create` event for both the ``some_table`` and
+    the ``new_table`` :class:`.Table` objects.
+
+    .. seealso::
+
+        :ref:`event_toplevel`
+
+        :class:`.ExecutableDDLElement`
+
+        :class:`.DDL`
+
+        :ref:`schema_ddl_sequences`
+
+    """  # noqa: E501
+
+    _target_class_doc = "SomeSchemaClassOrObject"
+    _dispatch_target = SchemaEventTarget
+
+    def before_create(
+        self, target: SchemaEventTarget, connection: Connection, **kw: Any
+    ) -> None:
+        r"""Called before CREATE statements are emitted.
+
+        :param target: the :class:`.SchemaObject`, such as a
+         :class:`_schema.MetaData` or :class:`_schema.Table`
+         but also including all create/drop objects such as
+         :class:`.Index`, :class:`.Sequence`, etc.,
+         object which is the target of the event.
+
+         .. versionadded:: 2.0 Support for all :class:`.SchemaItem` objects
+            was added.
+
+        :param connection: the :class:`_engine.Connection` where the
+         CREATE statement or statements will be emitted.
+        :param \**kw: additional keyword arguments relevant
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other
+         elements used by internal events.
+
+        :func:`.event.listen` accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        :func:`.event.listen` accepts the ``insert=True``
+        modifier for this event; when True, the listener function will
+        be prepended to the internal list of events upon discovery, and execute
+        before registered listener functions that do not pass this argument.
+
+        """
+
+    def after_create(
+        self, target: SchemaEventTarget, connection: Connection, **kw: Any
+    ) -> None:
+        r"""Called after CREATE statements are emitted.
+
+        :param target: the :class:`.SchemaObject`, such as a
+         :class:`_schema.MetaData` or :class:`_schema.Table`
+         but also including all create/drop objects such as
+         :class:`.Index`, :class:`.Sequence`, etc.,
+         object which is the target of the event.
+
+         .. versionadded:: 2.0 Support for all :class:`.SchemaItem` objects
+            was added.
+
+        :param connection: the :class:`_engine.Connection` where the
+         CREATE statement or statements have been emitted.
+        :param \**kw: additional keyword arguments relevant
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other
+         elements used by internal events.
+
+        :func:`.event.listen` also accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        """
+
+    def before_drop(
+        self, target: SchemaEventTarget, connection: Connection, **kw: Any
+    ) -> None:
+        r"""Called before DROP statements are emitted.
+
+        :param target: the :class:`.SchemaObject`, such as a
+         :class:`_schema.MetaData` or :class:`_schema.Table`
+         but also including all create/drop objects such as
+         :class:`.Index`, :class:`.Sequence`, etc.,
+         object which is the target of the event.
+
+         .. versionadded:: 2.0 Support for all :class:`.SchemaItem` objects
+            was added.
+
+        :param connection: the :class:`_engine.Connection` where the
+         DROP statement or statements will be emitted.
+        :param \**kw: additional keyword arguments relevant
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other
+         elements used by internal events.
+
+        :func:`.event.listen` also accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        """
+
+    def after_drop(
+        self, target: SchemaEventTarget, connection: Connection, **kw: Any
+    ) -> None:
+        r"""Called after DROP statements are emitted.
+
+        :param target: the :class:`.SchemaObject`, such as a
+         :class:`_schema.MetaData` or :class:`_schema.Table`
+         but also including all create/drop objects such as
+         :class:`.Index`, :class:`.Sequence`, etc.,
+         object which is the target of the event.
+
+         .. versionadded:: 2.0 Support for all :class:`.SchemaItem` objects
+            was added.
+
+        :param connection: the :class:`_engine.Connection` where the
+         DROP statement or statements have been emitted.
+        :param \**kw: additional keyword arguments relevant
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other
+         elements used by internal events.
+
+        :func:`.event.listen` also accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        """
+
+    def before_parent_attach(
+        self, target: SchemaEventTarget, parent: SchemaItem
+    ) -> None:
+        """Called before a :class:`.SchemaItem` is associated with
+        a parent :class:`.SchemaItem`.
+
+        :param target: the target object
+        :param parent: the parent to which the target is being attached.
+
+        :func:`.event.listen` also accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        """
+
+    def after_parent_attach(
+        self, target: SchemaEventTarget, parent: SchemaItem
+    ) -> None:
+        """Called after a :class:`.SchemaItem` is associated with
+        a parent :class:`.SchemaItem`.
+
+        :param target: the target object
+        :param parent: the parent to which the target is being attached.
+
+        :func:`.event.listen` also accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        """
+
+    def _sa_event_column_added_to_pk_constraint(
+        self, const: Constraint, col: Column[Any]
+    ) -> None:
+        """internal event hook used for primary key naming convention
+        updates.
+
+        """
+
+    def column_reflect(
+        self, inspector: Inspector, table: Table, column_info: ReflectedColumn
+    ) -> None:
+        """Called for each unit of 'column info' retrieved when
+        a :class:`_schema.Table` is being reflected.
+
+        This event is most easily used by applying it to a specific
+        :class:`_schema.MetaData` instance, where it will take effect for
+        all :class:`_schema.Table` objects within that
+        :class:`_schema.MetaData` that undergo reflection::
+
+            metadata = MetaData()
+
+
+            @event.listens_for(metadata, "column_reflect")
+            def receive_column_reflect(inspector, table, column_info):
+                # receives for all Table objects that are reflected
+                # under this MetaData
+                ...
+
+
+            # will use the above event hook
+            my_table = Table("my_table", metadata, autoload_with=some_engine)
+
+        .. versionadded:: 1.4.0b2 The :meth:`_events.DDLEvents.column_reflect`
+           hook may now be applied to a :class:`_schema.MetaData` object as
+           well as the :class:`_schema.MetaData` class itself where it will
+           take place for all :class:`_schema.Table` objects associated with
+           the targeted :class:`_schema.MetaData`.
+
+        It may also be applied to the :class:`_schema.Table` class across
+        the board::
+
+            from sqlalchemy import Table
+
+
+            @event.listens_for(Table, "column_reflect")
+            def receive_column_reflect(inspector, table, column_info):
+                # receives for all Table objects that are reflected
+                ...
+
+        It can also be applied to a specific :class:`_schema.Table` at the
+        point that one is being reflected using the
+        :paramref:`_schema.Table.listeners` parameter::
+
+            t1 = Table(
+                "my_table",
+                autoload_with=some_engine,
+                listeners=[("column_reflect", receive_column_reflect)],
+            )
+
+        The dictionary of column information as returned by the
+        dialect is passed, and can be modified.  The dictionary
+        is that returned in each element of the list returned
+        by :meth:`.reflection.Inspector.get_columns`:
+
+            * ``name`` - the column's name, is applied to the
+              :paramref:`_schema.Column.name` parameter
+
+            * ``type`` - the type of this column, which should be an instance
+              of :class:`~sqlalchemy.types.TypeEngine`, is applied to the
+              :paramref:`_schema.Column.type` parameter
+
+            * ``nullable`` - boolean flag if the column is NULL or NOT NULL,
+              is applied to the :paramref:`_schema.Column.nullable` parameter
+
+            * ``default`` - the column's server default value.  This is
+              normally specified as a plain string SQL expression, however the
+              event can pass a :class:`.FetchedValue`, :class:`.DefaultClause`,
+              or :func:`_expression.text` object as well.  Is applied to the
+              :paramref:`_schema.Column.server_default` parameter
+
+        The event is called before any action is taken against
+        this dictionary, and the contents can be modified; the following
+        additional keys may be added to the dictionary to further modify
+        how the :class:`_schema.Column` is constructed:
+
+
+            * ``key`` - the string key that will be used to access this
+              :class:`_schema.Column` in the ``.c`` collection; will be applied
+              to the :paramref:`_schema.Column.key` parameter. Is also used
+              for ORM mapping.  See the section
+              :ref:`mapper_automated_reflection_schemes` for an example.
+
+            * ``quote`` - force or un-force quoting on the column name;
+              is applied to the :paramref:`_schema.Column.quote` parameter.
+
+            * ``info`` - a dictionary of arbitrary data to follow along with
+              the :class:`_schema.Column`, is applied to the
+              :paramref:`_schema.Column.info` parameter.
+
+        :func:`.event.listen` also accepts the ``propagate=True``
+        modifier for this event; when True, the listener function will
+        be established for any copies made of the target object,
+        i.e. those copies that are generated when
+        :meth:`_schema.Table.to_metadata` is used.
+
+        .. seealso::
+
+            :ref:`mapper_automated_reflection_schemes` -
+            in the ORM mapping documentation
+
+            :ref:`automap_intercepting_columns` -
+            in the :ref:`automap_toplevel` documentation
+
+            :ref:`metadata_reflection_dbagnostic_types` - in
+            the :ref:`metadata_reflection_toplevel` documentation
+
+        """
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/expression.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/expression.py
new file mode 100644
index 00000000..f8ac3a9e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/expression.py
@@ -0,0 +1,162 @@
+# sql/expression.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Defines the public namespace for SQL expression constructs.
+
+
+"""
+
+
+from __future__ import annotations
+
+from ._dml_constructors import delete as delete
+from ._dml_constructors import insert as insert
+from ._dml_constructors import update as update
+from ._elements_constructors import all_ as all_
+from ._elements_constructors import and_ as and_
+from ._elements_constructors import any_ as any_
+from ._elements_constructors import asc as asc
+from ._elements_constructors import between as between
+from ._elements_constructors import bindparam as bindparam
+from ._elements_constructors import bitwise_not as bitwise_not
+from ._elements_constructors import case as case
+from ._elements_constructors import cast as cast
+from ._elements_constructors import collate as collate
+from ._elements_constructors import column as column
+from ._elements_constructors import desc as desc
+from ._elements_constructors import distinct as distinct
+from ._elements_constructors import extract as extract
+from ._elements_constructors import false as false
+from ._elements_constructors import funcfilter as funcfilter
+from ._elements_constructors import label as label
+from ._elements_constructors import not_ as not_
+from ._elements_constructors import null as null
+from ._elements_constructors import nulls_first as nulls_first
+from ._elements_constructors import nulls_last as nulls_last
+from ._elements_constructors import or_ as or_
+from ._elements_constructors import outparam as outparam
+from ._elements_constructors import over as over
+from ._elements_constructors import text as text
+from ._elements_constructors import true as true
+from ._elements_constructors import try_cast as try_cast
+from ._elements_constructors import tuple_ as tuple_
+from ._elements_constructors import type_coerce as type_coerce
+from ._elements_constructors import within_group as within_group
+from ._selectable_constructors import alias as alias
+from ._selectable_constructors import cte as cte
+from ._selectable_constructors import except_ as except_
+from ._selectable_constructors import except_all as except_all
+from ._selectable_constructors import exists as exists
+from ._selectable_constructors import intersect as intersect
+from ._selectable_constructors import intersect_all as intersect_all
+from ._selectable_constructors import join as join
+from ._selectable_constructors import lateral as lateral
+from ._selectable_constructors import outerjoin as outerjoin
+from ._selectable_constructors import select as select
+from ._selectable_constructors import table as table
+from ._selectable_constructors import tablesample as tablesample
+from ._selectable_constructors import union as union
+from ._selectable_constructors import union_all as union_all
+from ._selectable_constructors import values as values
+from ._typing import ColumnExpressionArgument as ColumnExpressionArgument
+from .base import _from_objects as _from_objects
+from .base import _select_iterables as _select_iterables
+from .base import ColumnCollection as ColumnCollection
+from .base import Executable as Executable
+from .cache_key import CacheKey as CacheKey
+from .dml import Delete as Delete
+from .dml import Insert as Insert
+from .dml import Update as Update
+from .dml import UpdateBase as UpdateBase
+from .dml import ValuesBase as ValuesBase
+from .elements import _truncated_label as _truncated_label
+from .elements import BinaryExpression as BinaryExpression
+from .elements import BindParameter as BindParameter
+from .elements import BooleanClauseList as BooleanClauseList
+from .elements import Case as Case
+from .elements import Cast as Cast
+from .elements import ClauseElement as ClauseElement
+from .elements import ClauseList as ClauseList
+from .elements import CollectionAggregate as CollectionAggregate
+from .elements import ColumnClause as ColumnClause
+from .elements import ColumnElement as ColumnElement
+from .elements import ExpressionClauseList as ExpressionClauseList
+from .elements import Extract as Extract
+from .elements import False_ as False_
+from .elements import FunctionFilter as FunctionFilter
+from .elements import Grouping as Grouping
+from .elements import Label as Label
+from .elements import literal as literal
+from .elements import literal_column as literal_column
+from .elements import Null as Null
+from .elements import Over as Over
+from .elements import quoted_name as quoted_name
+from .elements import ReleaseSavepointClause as ReleaseSavepointClause
+from .elements import RollbackToSavepointClause as RollbackToSavepointClause
+from .elements import SavepointClause as SavepointClause
+from .elements import SQLColumnExpression as SQLColumnExpression
+from .elements import TextClause as TextClause
+from .elements import True_ as True_
+from .elements import TryCast as TryCast
+from .elements import Tuple as Tuple
+from .elements import TypeClause as TypeClause
+from .elements import TypeCoerce as TypeCoerce
+from .elements import UnaryExpression as UnaryExpression
+from .elements import WithinGroup as WithinGroup
+from .functions import func as func
+from .functions import Function as Function
+from .functions import FunctionElement as FunctionElement
+from .functions import modifier as modifier
+from .lambdas import lambda_stmt as lambda_stmt
+from .lambdas import LambdaElement as LambdaElement
+from .lambdas import StatementLambdaElement as StatementLambdaElement
+from .operators import ColumnOperators as ColumnOperators
+from .operators import custom_op as custom_op
+from .operators import Operators as Operators
+from .selectable import Alias as Alias
+from .selectable import AliasedReturnsRows as AliasedReturnsRows
+from .selectable import CompoundSelect as CompoundSelect
+from .selectable import CTE as CTE
+from .selectable import Exists as Exists
+from .selectable import FromClause as FromClause
+from .selectable import FromGrouping as FromGrouping
+from .selectable import GenerativeSelect as GenerativeSelect
+from .selectable import HasCTE as HasCTE
+from .selectable import HasPrefixes as HasPrefixes
+from .selectable import HasSuffixes as HasSuffixes
+from .selectable import Join as Join
+from .selectable import LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT
+from .selectable import (
+    LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY,
+)
+from .selectable import LABEL_STYLE_NONE as LABEL_STYLE_NONE
+from .selectable import (
+    LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL,
+)
+from .selectable import Lateral as Lateral
+from .selectable import ReturnsRows as ReturnsRows
+from .selectable import ScalarSelect as ScalarSelect
+from .selectable import ScalarValues as ScalarValues
+from .selectable import Select as Select
+from .selectable import Selectable as Selectable
+from .selectable import SelectBase as SelectBase
+from .selectable import SelectLabelStyle as SelectLabelStyle
+from .selectable import Subquery as Subquery
+from .selectable import TableClause as TableClause
+from .selectable import TableSample as TableSample
+from .selectable import TableValuedAlias as TableValuedAlias
+from .selectable import TextAsFrom as TextAsFrom
+from .selectable import TextualSelect as TextualSelect
+from .selectable import Values as Values
+from .visitors import Visitable as Visitable
+
+nullsfirst = nulls_first
+"""Synonym for the :func:`.nulls_first` function."""
+
+
+nullslast = nulls_last
+"""Synonym for the :func:`.nulls_last` function."""
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/functions.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/functions.py
new file mode 100644
index 00000000..ea02279d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/functions.py
@@ -0,0 +1,2064 @@
+# sql/functions.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+
+"""SQL function API, factories, and built-in functions.
+
+"""
+
+from __future__ import annotations
+
+import datetime
+import decimal
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import annotation
+from . import coercions
+from . import operators
+from . import roles
+from . import schema
+from . import sqltypes
+from . import type_api
+from . import util as sqlutil
+from ._typing import is_table_value_type
+from .base import _entity_namespace
+from .base import ColumnCollection
+from .base import Executable
+from .base import Generative
+from .base import HasMemoized
+from .elements import _type_from_args
+from .elements import BinaryExpression
+from .elements import BindParameter
+from .elements import Cast
+from .elements import ClauseList
+from .elements import ColumnElement
+from .elements import Extract
+from .elements import FunctionFilter
+from .elements import Grouping
+from .elements import literal_column
+from .elements import NamedColumn
+from .elements import Over
+from .elements import WithinGroup
+from .selectable import FromClause
+from .selectable import Select
+from .selectable import TableValuedAlias
+from .sqltypes import TableValueType
+from .type_api import TypeEngine
+from .visitors import InternalTraversal
+from .. import util
+
+
+if TYPE_CHECKING:
+    from ._typing import _ByArgument
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _ColumnExpressionOrLiteralArgument
+    from ._typing import _ColumnExpressionOrStrLabelArgument
+    from ._typing import _StarOrOne
+    from ._typing import _TypeEngineArgument
+    from .base import _EntityNamespace
+    from .elements import ClauseElement
+    from .elements import KeyedColumnElement
+    from .elements import TableValuedColumn
+    from .operators import OperatorType
+    from ..engine.base import Connection
+    from ..engine.cursor import CursorResult
+    from ..engine.interfaces import _CoreMultiExecuteParams
+    from ..engine.interfaces import CoreExecuteOptionsParameter
+    from ..util.typing import Self
+
+_T = TypeVar("_T", bound=Any)
+_S = TypeVar("_S", bound=Any)
+
+_registry: util.defaultdict[str, Dict[str, Type[Function[Any]]]] = (
+    util.defaultdict(dict)
+)
+
+
+def register_function(
+    identifier: str, fn: Type[Function[Any]], package: str = "_default"
+) -> None:
+    """Associate a callable with a particular func. name.
+
+    This is normally called by GenericFunction, but is also
+    available by itself so that a non-Function construct
+    can be associated with the :data:`.func` accessor (i.e.
+    CAST, EXTRACT).
+
+    """
+    reg = _registry[package]
+
+    identifier = str(identifier).lower()
+
+    # Check if a function with the same identifier is registered.
+    if identifier in reg:
+        util.warn(
+            "The GenericFunction '{}' is already registered and "
+            "is going to be overridden.".format(identifier)
+        )
+    reg[identifier] = fn
+
+
+class FunctionElement(Executable, ColumnElement[_T], FromClause, Generative):
+    """Base for SQL function-oriented constructs.
+
+    This is a `generic type <https://peps.python.org/pep-0484/#generics>`_,
+    meaning that type checkers and IDEs can be instructed on the types to
+    expect in a :class:`_engine.Result` for this function. See
+    :class:`.GenericFunction` for an example of how this is done.
+
+    .. seealso::
+
+        :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
+
+        :class:`.Function` - named SQL function.
+
+        :data:`.func` - namespace which produces registered or ad-hoc
+        :class:`.Function` instances.
+
+        :class:`.GenericFunction` - allows creation of registered function
+        types.
+
+    """
+
+    _traverse_internals = [
+        ("clause_expr", InternalTraversal.dp_clauseelement),
+        ("_with_ordinality", InternalTraversal.dp_boolean),
+        ("_table_value_type", InternalTraversal.dp_has_cache_key),
+    ]
+
+    packagenames: Tuple[str, ...] = ()
+
+    _has_args = False
+    _with_ordinality = False
+    _table_value_type: Optional[TableValueType] = None
+
+    # some attributes that are defined between both ColumnElement and
+    # FromClause are set to Any here to avoid typing errors
+    primary_key: Any
+    _is_clone_of: Any
+
+    clause_expr: Grouping[Any]
+
+    def __init__(self, *clauses: _ColumnExpressionOrLiteralArgument[Any]):
+        r"""Construct a :class:`.FunctionElement`.
+
+        :param \*clauses: list of column expressions that form the arguments
+         of the SQL function call.
+
+        :param \**kwargs:  additional kwargs are typically consumed by
+         subclasses.
+
+        .. seealso::
+
+            :data:`.func`
+
+            :class:`.Function`
+
+        """
+        args: Sequence[_ColumnExpressionArgument[Any]] = [
+            coercions.expect(
+                roles.ExpressionElementRole,
+                c,
+                name=getattr(self, "name", None),
+                apply_propagate_attrs=self,
+            )
+            for c in clauses
+        ]
+        self._has_args = self._has_args or bool(args)
+        self.clause_expr = Grouping(
+            ClauseList(operator=operators.comma_op, group_contents=True, *args)
+        )
+
+    _non_anon_label = None
+
+    @property
+    def _proxy_key(self) -> Any:
+        return super()._proxy_key or getattr(self, "name", None)
+
+    def _execute_on_connection(
+        self,
+        connection: Connection,
+        distilled_params: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> CursorResult[Any]:
+        return connection._execute_function(
+            self, distilled_params, execution_options
+        )
+
+    def scalar_table_valued(
+        self, name: str, type_: Optional[_TypeEngineArgument[_T]] = None
+    ) -> ScalarFunctionColumn[_T]:
+        """Return a column expression that's against this
+        :class:`_functions.FunctionElement` as a scalar
+        table-valued expression.
+
+        The returned expression is similar to that returned by a single column
+        accessed off of a :meth:`_functions.FunctionElement.table_valued`
+        construct, except no FROM clause is generated; the function is rendered
+        in the similar way as a scalar subquery.
+
+        E.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> from sqlalchemy import func, select
+            >>> fn = func.jsonb_each("{'k', 'v'}").scalar_table_valued("key")
+            >>> print(select(fn))
+            {printsql}SELECT (jsonb_each(:jsonb_each_1)).key
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :meth:`_functions.FunctionElement.table_valued`
+
+            :meth:`_functions.FunctionElement.alias`
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+        """  # noqa: E501
+
+        return ScalarFunctionColumn(self, name, type_)
+
+    def table_valued(
+        self, *expr: _ColumnExpressionOrStrLabelArgument[Any], **kw: Any
+    ) -> TableValuedAlias:
+        r"""Return a :class:`_sql.TableValuedAlias` representation of this
+        :class:`_functions.FunctionElement` with table-valued expressions added.
+
+        e.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> fn = func.generate_series(1, 5).table_valued(
+            ...     "value", "start", "stop", "step"
+            ... )
+
+            >>> print(select(fn))
+            {printsql}SELECT anon_1.value, anon_1.start, anon_1.stop, anon_1.step
+            FROM generate_series(:generate_series_1, :generate_series_2) AS anon_1{stop}
+
+            >>> print(select(fn.c.value, fn.c.stop).where(fn.c.value > 2))
+            {printsql}SELECT anon_1.value, anon_1.stop
+            FROM generate_series(:generate_series_1, :generate_series_2) AS anon_1
+            WHERE anon_1.value > :value_1{stop}
+
+        A WITH ORDINALITY expression may be generated by passing the keyword
+        argument "with_ordinality":
+
+        .. sourcecode:: pycon+sql
+
+            >>> fn = func.generate_series(4, 1, -1).table_valued(
+            ...     "gen", with_ordinality="ordinality"
+            ... )
+            >>> print(select(fn))
+            {printsql}SELECT anon_1.gen, anon_1.ordinality
+            FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) WITH ORDINALITY AS anon_1
+
+        :param \*expr: A series of string column names that will be added to the
+         ``.c`` collection of the resulting :class:`_sql.TableValuedAlias`
+         construct as columns.  :func:`_sql.column` objects with or without
+         datatypes may also be used.
+
+        :param name: optional name to assign to the alias name that's generated.
+         If omitted, a unique anonymizing name is used.
+
+        :param with_ordinality: string name that when present results in the
+         ``WITH ORDINALITY`` clause being added to the alias, and the given
+         string name will be added as a column to the .c collection
+         of the resulting :class:`_sql.TableValuedAlias`.
+
+        :param joins_implicitly: when True, the table valued function may be
+         used in the FROM clause without any explicit JOIN to other tables
+         in the SQL query, and no "cartesian product" warning will be generated.
+         May be useful for SQL functions such as ``func.json_each()``.
+
+         .. versionadded:: 1.4.33
+
+        .. versionadded:: 1.4.0b2
+
+
+        .. seealso::
+
+            :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial`
+
+            :ref:`postgresql_table_valued` - in the :ref:`postgresql_toplevel` documentation
+
+            :meth:`_functions.FunctionElement.scalar_table_valued` - variant of
+            :meth:`_functions.FunctionElement.table_valued` which delivers the
+            complete table valued expression as a scalar column expression
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+            :meth:`_sql.TableValuedAlias.render_derived` - renders the alias
+            using a derived column clause, e.g. ``AS name(col1, col2, ...)``
+
+        """  # noqa: 501
+
+        new_func = self._generate()
+
+        with_ordinality = kw.pop("with_ordinality", None)
+        joins_implicitly = kw.pop("joins_implicitly", None)
+        name = kw.pop("name", None)
+
+        if with_ordinality:
+            expr += (with_ordinality,)
+            new_func._with_ordinality = True
+
+        new_func.type = new_func._table_value_type = TableValueType(*expr)
+
+        return new_func.alias(name=name, joins_implicitly=joins_implicitly)
+
+    def column_valued(
+        self, name: Optional[str] = None, joins_implicitly: bool = False
+    ) -> TableValuedColumn[_T]:
+        """Return this :class:`_functions.FunctionElement` as a column expression that
+        selects from itself as a FROM clause.
+
+        E.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> from sqlalchemy import select, func
+            >>> gs = func.generate_series(1, 5, -1).column_valued()
+            >>> print(select(gs))
+            {printsql}SELECT anon_1
+            FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) AS anon_1
+
+        This is shorthand for::
+
+            gs = func.generate_series(1, 5, -1).alias().column
+
+        :param name: optional name to assign to the alias name that's generated.
+         If omitted, a unique anonymizing name is used.
+
+        :param joins_implicitly: when True, the "table" portion of the column
+         valued function may be a member of the FROM clause without any
+         explicit JOIN to other tables in the SQL query, and no "cartesian
+         product" warning will be generated. May be useful for SQL functions
+         such as ``func.json_array_elements()``.
+
+         .. versionadded:: 1.4.46
+
+        .. seealso::
+
+            :ref:`tutorial_functions_column_valued` - in the :ref:`unified_tutorial`
+
+            :ref:`postgresql_column_valued` - in the :ref:`postgresql_toplevel` documentation
+
+            :meth:`_functions.FunctionElement.table_valued`
+
+        """  # noqa: 501
+
+        return self.alias(name=name, joins_implicitly=joins_implicitly).column
+
+    @util.ro_non_memoized_property
+    def columns(self) -> ColumnCollection[str, KeyedColumnElement[Any]]:  # type: ignore[override]  # noqa: E501
+        r"""The set of columns exported by this :class:`.FunctionElement`.
+
+        This is a placeholder collection that allows the function to be
+        placed in the FROM clause of a statement:
+
+        .. sourcecode:: pycon+sql
+
+            >>> from sqlalchemy import column, select, func
+            >>> stmt = select(column("x"), column("y")).select_from(func.myfunction())
+            >>> print(stmt)
+            {printsql}SELECT x, y FROM myfunction()
+
+        The above form is a legacy feature that is now superseded by the
+        fully capable :meth:`_functions.FunctionElement.table_valued`
+        method; see that method for details.
+
+        .. seealso::
+
+            :meth:`_functions.FunctionElement.table_valued` - generates table-valued
+            SQL function expressions.
+
+        """  # noqa: E501
+        return self.c
+
+    @util.ro_memoized_property
+    def c(self) -> ColumnCollection[str, KeyedColumnElement[Any]]:  # type: ignore[override]  # noqa: E501
+        """synonym for :attr:`.FunctionElement.columns`."""
+
+        return ColumnCollection(
+            columns=[(col.key, col) for col in self._all_selected_columns]
+        )
+
+    @property
+    def _all_selected_columns(self) -> Sequence[KeyedColumnElement[Any]]:
+        if is_table_value_type(self.type):
+            # TODO: this might not be fully accurate
+            cols = cast(
+                "Sequence[KeyedColumnElement[Any]]", self.type._elements
+            )
+        else:
+            cols = [self.label(None)]
+
+        return cols
+
+    @property
+    def exported_columns(  # type: ignore[override]
+        self,
+    ) -> ColumnCollection[str, KeyedColumnElement[Any]]:
+        return self.columns
+
+    @HasMemoized.memoized_attribute
+    def clauses(self) -> ClauseList:
+        """Return the underlying :class:`.ClauseList` which contains
+        the arguments for this :class:`.FunctionElement`.
+
+        """
+        return cast(ClauseList, self.clause_expr.element)
+
+    def over(
+        self,
+        *,
+        partition_by: Optional[_ByArgument] = None,
+        order_by: Optional[_ByArgument] = None,
+        rows: Optional[Tuple[Optional[int], Optional[int]]] = None,
+        range_: Optional[Tuple[Optional[int], Optional[int]]] = None,
+    ) -> Over[_T]:
+        """Produce an OVER clause against this function.
+
+        Used against aggregate or so-called "window" functions,
+        for database backends that support window functions.
+
+        The expression::
+
+            func.row_number().over(order_by="x")
+
+        is shorthand for::
+
+            from sqlalchemy import over
+
+            over(func.row_number(), order_by="x")
+
+        See :func:`_expression.over` for a full description.
+
+        .. seealso::
+
+            :func:`_expression.over`
+
+            :ref:`tutorial_window_functions` - in the :ref:`unified_tutorial`
+
+        """
+        return Over(
+            self,
+            partition_by=partition_by,
+            order_by=order_by,
+            rows=rows,
+            range_=range_,
+        )
+
+    def within_group(
+        self, *order_by: _ColumnExpressionArgument[Any]
+    ) -> WithinGroup[_T]:
+        """Produce a WITHIN GROUP (ORDER BY expr) clause against this function.
+
+        Used against so-called "ordered set aggregate" and "hypothetical
+        set aggregate" functions, including :class:`.percentile_cont`,
+        :class:`.rank`, :class:`.dense_rank`, etc.
+
+        See :func:`_expression.within_group` for a full description.
+
+        .. seealso::
+
+            :ref:`tutorial_functions_within_group` -
+            in the :ref:`unified_tutorial`
+
+
+        """
+        return WithinGroup(self, *order_by)
+
+    @overload
+    def filter(self) -> Self: ...
+
+    @overload
+    def filter(
+        self,
+        __criterion0: _ColumnExpressionArgument[bool],
+        *criterion: _ColumnExpressionArgument[bool],
+    ) -> FunctionFilter[_T]: ...
+
+    def filter(
+        self, *criterion: _ColumnExpressionArgument[bool]
+    ) -> Union[Self, FunctionFilter[_T]]:
+        """Produce a FILTER clause against this function.
+
+        Used against aggregate and window functions,
+        for database backends that support the "FILTER" clause.
+
+        The expression::
+
+            func.count(1).filter(True)
+
+        is shorthand for::
+
+            from sqlalchemy import funcfilter
+
+            funcfilter(func.count(1), True)
+
+        .. seealso::
+
+            :ref:`tutorial_functions_within_group` -
+            in the :ref:`unified_tutorial`
+
+            :class:`.FunctionFilter`
+
+            :func:`.funcfilter`
+
+
+        """
+        if not criterion:
+            return self
+        return FunctionFilter(self, *criterion)
+
+    def as_comparison(
+        self, left_index: int, right_index: int
+    ) -> FunctionAsBinary:
+        """Interpret this expression as a boolean comparison between two
+        values.
+
+        This method is used for an ORM use case described at
+        :ref:`relationship_custom_operator_sql_function`.
+
+        A hypothetical SQL function "is_equal()" which compares to values
+        for equality would be written in the Core expression language as::
+
+            expr = func.is_equal("a", "b")
+
+        If "is_equal()" above is comparing "a" and "b" for equality, the
+        :meth:`.FunctionElement.as_comparison` method would be invoked as::
+
+            expr = func.is_equal("a", "b").as_comparison(1, 2)
+
+        Where above, the integer value "1" refers to the first argument of the
+        "is_equal()" function and the integer value "2" refers to the second.
+
+        This would create a :class:`.BinaryExpression` that is equivalent to::
+
+            BinaryExpression("a", "b", operator=op.eq)
+
+        However, at the SQL level it would still render as
+        "is_equal('a', 'b')".
+
+        The ORM, when it loads a related object or collection, needs to be able
+        to manipulate the "left" and "right" sides of the ON clause of a JOIN
+        expression. The purpose of this method is to provide a SQL function
+        construct that can also supply this information to the ORM, when used
+        with the :paramref:`_orm.relationship.primaryjoin` parameter. The
+        return value is a containment object called :class:`.FunctionAsBinary`.
+
+        An ORM example is as follows::
+
+            class Venue(Base):
+                __tablename__ = "venue"
+                id = Column(Integer, primary_key=True)
+                name = Column(String)
+
+                descendants = relationship(
+                    "Venue",
+                    primaryjoin=func.instr(
+                        remote(foreign(name)), name + "/"
+                    ).as_comparison(1, 2)
+                    == 1,
+                    viewonly=True,
+                    order_by=name,
+                )
+
+        Above, the "Venue" class can load descendant "Venue" objects by
+        determining if the name of the parent Venue is contained within the
+        start of the hypothetical descendant value's name, e.g. "parent1" would
+        match up to "parent1/child1", but not to "parent2/child1".
+
+        Possible use cases include the "materialized path" example given above,
+        as well as making use of special SQL functions such as geometric
+        functions to create join conditions.
+
+        :param left_index: the integer 1-based index of the function argument
+         that serves as the "left" side of the expression.
+        :param right_index: the integer 1-based index of the function argument
+         that serves as the "right" side of the expression.
+
+        .. versionadded:: 1.3
+
+        .. seealso::
+
+            :ref:`relationship_custom_operator_sql_function` -
+            example use within the ORM
+
+        """
+        return FunctionAsBinary(self, left_index, right_index)
+
+    @property
+    def _from_objects(self) -> Any:
+        return self.clauses._from_objects
+
+    def within_group_type(
+        self, within_group: WithinGroup[_S]
+    ) -> Optional[TypeEngine[_S]]:
+        """For types that define their return type as based on the criteria
+        within a WITHIN GROUP (ORDER BY) expression, called by the
+        :class:`.WithinGroup` construct.
+
+        Returns None by default, in which case the function's normal ``.type``
+        is used.
+
+        """
+
+        return None
+
+    def alias(
+        self, name: Optional[str] = None, joins_implicitly: bool = False
+    ) -> TableValuedAlias:
+        r"""Produce a :class:`_expression.Alias` construct against this
+        :class:`.FunctionElement`.
+
+        .. tip::
+
+            The :meth:`_functions.FunctionElement.alias` method is part of the
+            mechanism by which "table valued" SQL functions are created.
+            However, most use cases are covered by higher level methods on
+            :class:`_functions.FunctionElement` including
+            :meth:`_functions.FunctionElement.table_valued`, and
+            :meth:`_functions.FunctionElement.column_valued`.
+
+        This construct wraps the function in a named alias which
+        is suitable for the FROM clause, in the style accepted for example
+        by PostgreSQL.  A column expression is also provided using the
+        special ``.column`` attribute, which may
+        be used to refer to the output of the function as a scalar value
+        in the columns or where clause, for a backend such as PostgreSQL.
+
+        For a full table-valued expression, use the
+        :meth:`_functions.FunctionElement.table_valued` method first to
+        establish named columns.
+
+        e.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> from sqlalchemy import func, select, column
+            >>> data_view = func.unnest([1, 2, 3]).alias("data_view")
+            >>> print(select(data_view.column))
+            {printsql}SELECT data_view
+            FROM unnest(:unnest_1) AS data_view
+
+        The :meth:`_functions.FunctionElement.column_valued` method provides
+        a shortcut for the above pattern:
+
+        .. sourcecode:: pycon+sql
+
+            >>> data_view = func.unnest([1, 2, 3]).column_valued("data_view")
+            >>> print(select(data_view))
+            {printsql}SELECT data_view
+            FROM unnest(:unnest_1) AS data_view
+
+        .. versionadded:: 1.4.0b2  Added the ``.column`` accessor
+
+        :param name: alias name, will be rendered as ``AS <name>`` in the
+         FROM clause
+
+        :param joins_implicitly: when True, the table valued function may be
+         used in the FROM clause without any explicit JOIN to other tables
+         in the SQL query, and no "cartesian product" warning will be
+         generated.  May be useful for SQL functions such as
+         ``func.json_each()``.
+
+         .. versionadded:: 1.4.33
+
+        .. seealso::
+
+            :ref:`tutorial_functions_table_valued` -
+            in the :ref:`unified_tutorial`
+
+            :meth:`_functions.FunctionElement.table_valued`
+
+            :meth:`_functions.FunctionElement.scalar_table_valued`
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+
+        """
+
+        return TableValuedAlias._construct(
+            self,
+            name=name,
+            table_value_type=self.type,
+            joins_implicitly=joins_implicitly,
+        )
+
+    def select(self) -> Select[Tuple[_T]]:
+        """Produce a :func:`_expression.select` construct
+        against this :class:`.FunctionElement`.
+
+        This is shorthand for::
+
+            s = select(function_element)
+
+        """
+        s: Select[Any] = Select(self)
+        if self._execution_options:
+            s = s.execution_options(**self._execution_options)
+        return s
+
+    def _bind_param(
+        self,
+        operator: OperatorType,
+        obj: Any,
+        type_: Optional[TypeEngine[_T]] = None,
+        expanding: bool = False,
+        **kw: Any,
+    ) -> BindParameter[_T]:
+        return BindParameter(
+            None,
+            obj,
+            _compared_to_operator=operator,
+            _compared_to_type=self.type,
+            unique=True,
+            type_=type_,
+            expanding=expanding,
+            **kw,
+        )
+
+    def self_group(self, against: Optional[OperatorType] = None) -> ClauseElement:  # type: ignore[override]  # noqa E501
+        # for the moment, we are parenthesizing all array-returning
+        # expressions against getitem.  This may need to be made
+        # more portable if in the future we support other DBs
+        # besides postgresql.
+        if against is operators.getitem and isinstance(
+            self.type, sqltypes.ARRAY
+        ):
+            return Grouping(self)
+        else:
+            return super().self_group(against=against)
+
+    @property
+    def entity_namespace(self) -> _EntityNamespace:
+        """overrides FromClause.entity_namespace as functions are generally
+        column expressions and not FromClauses.
+
+        """
+        # ideally functions would not be fromclauses but we failed to make
+        # this adjustment in 1.4
+        return _entity_namespace(self.clause_expr)
+
+
+class FunctionAsBinary(BinaryExpression[Any]):
+    _traverse_internals = [
+        ("sql_function", InternalTraversal.dp_clauseelement),
+        ("left_index", InternalTraversal.dp_plain_obj),
+        ("right_index", InternalTraversal.dp_plain_obj),
+        ("modifiers", InternalTraversal.dp_plain_dict),
+    ]
+
+    sql_function: FunctionElement[Any]
+    left_index: int
+    right_index: int
+
+    def _gen_cache_key(self, anon_map: Any, bindparams: Any) -> Any:
+        return ColumnElement._gen_cache_key(self, anon_map, bindparams)
+
+    def __init__(
+        self, fn: FunctionElement[Any], left_index: int, right_index: int
+    ):
+        self.sql_function = fn
+        self.left_index = left_index
+        self.right_index = right_index
+
+        self.operator = operators.function_as_comparison_op
+        self.type = sqltypes.BOOLEANTYPE
+        self.negate = None
+        self._is_implicitly_boolean = True
+        self.modifiers = {}
+
+    @property
+    def left_expr(self) -> ColumnElement[Any]:
+        return self.sql_function.clauses.clauses[self.left_index - 1]
+
+    @left_expr.setter
+    def left_expr(self, value: ColumnElement[Any]) -> None:
+        self.sql_function.clauses.clauses[self.left_index - 1] = value
+
+    @property
+    def right_expr(self) -> ColumnElement[Any]:
+        return self.sql_function.clauses.clauses[self.right_index - 1]
+
+    @right_expr.setter
+    def right_expr(self, value: ColumnElement[Any]) -> None:
+        self.sql_function.clauses.clauses[self.right_index - 1] = value
+
+    if not TYPE_CHECKING:
+        # mypy can't accommodate @property to replace an instance
+        # variable
+
+        left = left_expr
+        right = right_expr
+
+
+class ScalarFunctionColumn(NamedColumn[_T]):
+    __visit_name__ = "scalar_function_column"
+
+    _traverse_internals = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("fn", InternalTraversal.dp_clauseelement),
+    ]
+
+    is_literal = False
+    table = None
+
+    def __init__(
+        self,
+        fn: FunctionElement[_T],
+        name: str,
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+    ):
+        self.fn = fn
+        self.name = name
+
+        # if type is None, we get NULLTYPE, which is our _T.  But I don't
+        # know how to get the overloads to express that correctly
+        self.type = type_api.to_instance(type_)  # type: ignore
+
+
+class _FunctionGenerator:
+    """Generate SQL function expressions.
+
+    :data:`.func` is a special object instance which generates SQL
+    functions based on name-based attributes, e.g.:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(func.count(1))
+        {printsql}count(:param_1)
+
+    The returned object is an instance of :class:`.Function`, and  is a
+    column-oriented SQL element like any other, and is used in that way:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(func.count(table.c.id)))
+        {printsql}SELECT count(sometable.id) FROM sometable
+
+    Any name can be given to :data:`.func`. If the function name is unknown to
+    SQLAlchemy, it will be rendered exactly as is. For common SQL functions
+    which SQLAlchemy is aware of, the name may be interpreted as a *generic
+    function* which will be compiled appropriately to the target database:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(func.current_timestamp())
+        {printsql}CURRENT_TIMESTAMP
+
+    To call functions which are present in dot-separated packages,
+    specify them in the same manner:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(func.stats.yield_curve(5, 10))
+        {printsql}stats.yield_curve(:yield_curve_1, :yield_curve_2)
+
+    SQLAlchemy can be made aware of the return type of functions to enable
+    type-specific lexical and result-based behavior. For example, to ensure
+    that a string-based function returns a Unicode value and is similarly
+    treated as a string in expressions, specify
+    :class:`~sqlalchemy.types.Unicode` as the type:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(
+        ...     func.my_string("hi", type_=Unicode)
+        ...     + " "
+        ...     + func.my_string("there", type_=Unicode)
+        ... )
+        {printsql}my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
+
+    The object returned by a :data:`.func` call is usually an instance of
+    :class:`.Function`.
+    This object meets the "column" interface, including comparison and labeling
+    functions.  The object can also be passed the :meth:`~.Connectable.execute`
+    method of a :class:`_engine.Connection` or :class:`_engine.Engine`,
+    where it will be
+    wrapped inside of a SELECT statement first::
+
+        print(connection.execute(func.current_timestamp()).scalar())
+
+    In a few exception cases, the :data:`.func` accessor
+    will redirect a name to a built-in expression such as :func:`.cast`
+    or :func:`.extract`, as these names have well-known meaning
+    but are not exactly the same as "functions" from a SQLAlchemy
+    perspective.
+
+    Functions which are interpreted as "generic" functions know how to
+    calculate their return type automatically. For a listing of known generic
+    functions, see :ref:`generic_functions`.
+
+    .. note::
+
+        The :data:`.func` construct has only limited support for calling
+        standalone "stored procedures", especially those with special
+        parameterization concerns.
+
+        See the section :ref:`stored_procedures` for details on how to use
+        the DBAPI-level ``callproc()`` method for fully traditional stored
+        procedures.
+
+    .. seealso::
+
+        :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
+
+        :class:`.Function`
+
+    """  # noqa
+
+    def __init__(self, **opts: Any):
+        self.__names: List[str] = []
+        self.opts = opts
+
+    def __getattr__(self, name: str) -> _FunctionGenerator:
+        # passthru __ attributes; fixes pydoc
+        if name.startswith("__"):
+            try:
+                return self.__dict__[name]  # type: ignore
+            except KeyError:
+                raise AttributeError(name)
+
+        elif name.endswith("_"):
+            name = name[0:-1]
+        f = _FunctionGenerator(**self.opts)
+        f.__names = list(self.__names) + [name]
+        return f
+
+    @overload
+    def __call__(
+        self, *c: Any, type_: _TypeEngineArgument[_T], **kwargs: Any
+    ) -> Function[_T]: ...
+
+    @overload
+    def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]: ...
+
+    def __call__(self, *c: Any, **kwargs: Any) -> Function[Any]:
+        o = self.opts.copy()
+        o.update(kwargs)
+
+        tokens = len(self.__names)
+
+        if tokens == 2:
+            package, fname = self.__names
+        elif tokens == 1:
+            package, fname = "_default", self.__names[0]
+        else:
+            package = None
+
+        if package is not None:
+            func = _registry[package].get(fname.lower())
+            if func is not None:
+                return func(*c, **o)
+
+        return Function(
+            self.__names[-1], packagenames=tuple(self.__names[0:-1]), *c, **o
+        )
+
+    if TYPE_CHECKING:
+        # START GENERATED FUNCTION ACCESSORS
+
+        # code within this block is **programmatically,
+        # statically generated** by tools/generate_sql_functions.py
+
+        @property
+        def aggregate_strings(self) -> Type[aggregate_strings]: ...
+
+        @property
+        def ansifunction(self) -> Type[AnsiFunction[Any]]: ...
+
+        @property
+        def array_agg(self) -> Type[array_agg[Any]]: ...
+
+        @property
+        def cast(self) -> Type[Cast[Any]]: ...
+
+        @property
+        def char_length(self) -> Type[char_length]: ...
+
+        # set ColumnElement[_T] as a separate overload, to appease mypy
+        # which seems to not want to accept _T from _ColumnExpressionArgument.
+        # this is even if all non-generic types are removed from it, so
+        # reasons remain unclear for why this does not work
+
+        @overload
+        def coalesce(
+            self,
+            col: ColumnElement[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> coalesce[_T]: ...
+
+        @overload
+        def coalesce(
+            self,
+            col: _ColumnExpressionArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> coalesce[_T]: ...
+
+        @overload
+        def coalesce(
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> coalesce[_T]: ...
+
+        def coalesce(
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> coalesce[_T]: ...
+
+        @property
+        def concat(self) -> Type[concat]: ...
+
+        @property
+        def count(self) -> Type[count]: ...
+
+        @property
+        def cube(self) -> Type[cube[Any]]: ...
+
+        @property
+        def cume_dist(self) -> Type[cume_dist]: ...
+
+        @property
+        def current_date(self) -> Type[current_date]: ...
+
+        @property
+        def current_time(self) -> Type[current_time]: ...
+
+        @property
+        def current_timestamp(self) -> Type[current_timestamp]: ...
+
+        @property
+        def current_user(self) -> Type[current_user]: ...
+
+        @property
+        def dense_rank(self) -> Type[dense_rank]: ...
+
+        @property
+        def extract(self) -> Type[Extract]: ...
+
+        @property
+        def grouping_sets(self) -> Type[grouping_sets[Any]]: ...
+
+        @property
+        def localtime(self) -> Type[localtime]: ...
+
+        @property
+        def localtimestamp(self) -> Type[localtimestamp]: ...
+
+        # set ColumnElement[_T] as a separate overload, to appease mypy
+        # which seems to not want to accept _T from _ColumnExpressionArgument.
+        # this is even if all non-generic types are removed from it, so
+        # reasons remain unclear for why this does not work
+
+        @overload
+        def max(  # noqa: A001
+            self,
+            col: ColumnElement[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> max[_T]: ...
+
+        @overload
+        def max(  # noqa: A001
+            self,
+            col: _ColumnExpressionArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> max[_T]: ...
+
+        @overload
+        def max(  # noqa: A001
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> max[_T]: ...
+
+        def max(  # noqa: A001
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> max[_T]: ...
+
+        # set ColumnElement[_T] as a separate overload, to appease mypy
+        # which seems to not want to accept _T from _ColumnExpressionArgument.
+        # this is even if all non-generic types are removed from it, so
+        # reasons remain unclear for why this does not work
+
+        @overload
+        def min(  # noqa: A001
+            self,
+            col: ColumnElement[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> min[_T]: ...
+
+        @overload
+        def min(  # noqa: A001
+            self,
+            col: _ColumnExpressionArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> min[_T]: ...
+
+        @overload
+        def min(  # noqa: A001
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> min[_T]: ...
+
+        def min(  # noqa: A001
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> min[_T]: ...
+
+        @property
+        def mode(self) -> Type[mode[Any]]: ...
+
+        @property
+        def next_value(self) -> Type[next_value]: ...
+
+        @property
+        def now(self) -> Type[now]: ...
+
+        @property
+        def orderedsetagg(self) -> Type[OrderedSetAgg[Any]]: ...
+
+        @property
+        def percent_rank(self) -> Type[percent_rank]: ...
+
+        @property
+        def percentile_cont(self) -> Type[percentile_cont[Any]]: ...
+
+        @property
+        def percentile_disc(self) -> Type[percentile_disc[Any]]: ...
+
+        @property
+        def random(self) -> Type[random]: ...
+
+        @property
+        def rank(self) -> Type[rank]: ...
+
+        @property
+        def rollup(self) -> Type[rollup[Any]]: ...
+
+        @property
+        def session_user(self) -> Type[session_user]: ...
+
+        # set ColumnElement[_T] as a separate overload, to appease mypy
+        # which seems to not want to accept _T from _ColumnExpressionArgument.
+        # this is even if all non-generic types are removed from it, so
+        # reasons remain unclear for why this does not work
+
+        @overload
+        def sum(  # noqa: A001
+            self,
+            col: ColumnElement[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> sum[_T]: ...
+
+        @overload
+        def sum(  # noqa: A001
+            self,
+            col: _ColumnExpressionArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> sum[_T]: ...
+
+        @overload
+        def sum(  # noqa: A001
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> sum[_T]: ...
+
+        def sum(  # noqa: A001
+            self,
+            col: _ColumnExpressionOrLiteralArgument[_T],
+            *args: _ColumnExpressionOrLiteralArgument[Any],
+            **kwargs: Any,
+        ) -> sum[_T]: ...
+
+        @property
+        def sysdate(self) -> Type[sysdate]: ...
+
+        @property
+        def user(self) -> Type[user]: ...
+
+        # END GENERATED FUNCTION ACCESSORS
+
+
+func = _FunctionGenerator()
+func.__doc__ = _FunctionGenerator.__doc__
+
+modifier = _FunctionGenerator(group=False)
+
+
+class Function(FunctionElement[_T]):
+    r"""Describe a named SQL function.
+
+    The :class:`.Function` object is typically generated from the
+    :data:`.func` generation object.
+
+
+    :param \*clauses: list of column expressions that form the arguments
+     of the SQL function call.
+
+    :param type\_: optional :class:`.TypeEngine` datatype object that will be
+     used as the return value of the column expression generated by this
+     function call.
+
+    :param packagenames: a string which indicates package prefix names
+     to be prepended to the function name when the SQL is generated.
+     The :data:`.func` generator creates these when it is called using
+     dotted format, e.g.::
+
+        func.mypackage.some_function(col1, col2)
+
+    .. seealso::
+
+        :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
+
+        :data:`.func` - namespace which produces registered or ad-hoc
+        :class:`.Function` instances.
+
+        :class:`.GenericFunction` - allows creation of registered function
+        types.
+
+    """
+
+    __visit_name__ = "function"
+
+    _traverse_internals = FunctionElement._traverse_internals + [
+        ("packagenames", InternalTraversal.dp_plain_obj),
+        ("name", InternalTraversal.dp_string),
+        ("type", InternalTraversal.dp_type),
+    ]
+
+    name: str
+
+    identifier: str
+
+    type: TypeEngine[_T]
+    """A :class:`_types.TypeEngine` object which refers to the SQL return
+    type represented by this SQL function.
+
+    This datatype may be configured when generating a
+    :class:`_functions.Function` object by passing the
+    :paramref:`_functions.Function.type_` parameter, e.g.::
+
+        >>> select(func.lower("some VALUE", type_=String))
+
+    The small number of built-in classes of :class:`_functions.Function` come
+    with a built-in datatype that's appropriate to the class of function and
+    its arguments. For functions that aren't known, the type defaults to the
+    "null type".
+
+    """
+
+    @overload
+    def __init__(
+        self,
+        name: str,
+        *clauses: _ColumnExpressionOrLiteralArgument[_T],
+        type_: None = ...,
+        packagenames: Optional[Tuple[str, ...]] = ...,
+    ): ...
+
+    @overload
+    def __init__(
+        self,
+        name: str,
+        *clauses: _ColumnExpressionOrLiteralArgument[Any],
+        type_: _TypeEngineArgument[_T] = ...,
+        packagenames: Optional[Tuple[str, ...]] = ...,
+    ): ...
+
+    def __init__(
+        self,
+        name: str,
+        *clauses: _ColumnExpressionOrLiteralArgument[Any],
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+        packagenames: Optional[Tuple[str, ...]] = None,
+    ):
+        """Construct a :class:`.Function`.
+
+        The :data:`.func` construct is normally used to construct
+        new :class:`.Function` instances.
+
+        """
+        self.packagenames = packagenames or ()
+        self.name = name
+
+        # if type is None, we get NULLTYPE, which is our _T.  But I don't
+        # know how to get the overloads to express that correctly
+        self.type = type_api.to_instance(type_)  # type: ignore
+
+        FunctionElement.__init__(self, *clauses)
+
+    def _bind_param(
+        self,
+        operator: OperatorType,
+        obj: Any,
+        type_: Optional[TypeEngine[_T]] = None,
+        expanding: bool = False,
+        **kw: Any,
+    ) -> BindParameter[_T]:
+        return BindParameter(
+            self.name,
+            obj,
+            _compared_to_operator=operator,
+            _compared_to_type=self.type,
+            type_=type_,
+            unique=True,
+            expanding=expanding,
+            **kw,
+        )
+
+
+class GenericFunction(Function[_T]):
+    """Define a 'generic' function.
+
+    A generic function is a pre-established :class:`.Function`
+    class that is instantiated automatically when called
+    by name from the :data:`.func` attribute.    Note that
+    calling any name from :data:`.func` has the effect that
+    a new :class:`.Function` instance is created automatically,
+    given that name.  The primary use case for defining
+    a :class:`.GenericFunction` class is so that a function
+    of a particular name may be given a fixed return type.
+    It can also include custom argument parsing schemes as well
+    as additional methods.
+
+    Subclasses of :class:`.GenericFunction` are automatically
+    registered under the name of the class.  For
+    example, a user-defined function ``as_utc()`` would
+    be available immediately::
+
+        from sqlalchemy.sql.functions import GenericFunction
+        from sqlalchemy.types import DateTime
+
+
+        class as_utc(GenericFunction):
+            type = DateTime()
+            inherit_cache = True
+
+
+        print(select(func.as_utc()))
+
+    User-defined generic functions can be organized into
+    packages by specifying the "package" attribute when defining
+    :class:`.GenericFunction`.   Third party libraries
+    containing many functions may want to use this in order
+    to avoid name conflicts with other systems.   For example,
+    if our ``as_utc()`` function were part of a package
+    "time"::
+
+        class as_utc(GenericFunction):
+            type = DateTime()
+            package = "time"
+            inherit_cache = True
+
+    The above function would be available from :data:`.func`
+    using the package name ``time``::
+
+        print(select(func.time.as_utc()))
+
+    A final option is to allow the function to be accessed
+    from one name in :data:`.func` but to render as a different name.
+    The ``identifier`` attribute will override the name used to
+    access the function as loaded from :data:`.func`, but will retain
+    the usage of ``name`` as the rendered name::
+
+        class GeoBuffer(GenericFunction):
+            type = Geometry()
+            package = "geo"
+            name = "ST_Buffer"
+            identifier = "buffer"
+            inherit_cache = True
+
+    The above function will render as follows:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(func.geo.buffer())
+        {printsql}ST_Buffer()
+
+    The name will be rendered as is, however without quoting unless the name
+    contains special characters that require quoting.  To force quoting
+    on or off for the name, use the :class:`.sqlalchemy.sql.quoted_name`
+    construct::
+
+        from sqlalchemy.sql import quoted_name
+
+
+        class GeoBuffer(GenericFunction):
+            type = Geometry()
+            package = "geo"
+            name = quoted_name("ST_Buffer", True)
+            identifier = "buffer"
+            inherit_cache = True
+
+    The above function will render as:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(func.geo.buffer())
+        {printsql}"ST_Buffer"()
+
+    Type parameters for this class as a
+    `generic type <https://peps.python.org/pep-0484/#generics>`_ can be passed
+    and should match the type seen in a :class:`_engine.Result`. For example::
+
+        class as_utc(GenericFunction[datetime.datetime]):
+            type = DateTime()
+            inherit_cache = True
+
+    The above indicates that the following expression returns a ``datetime``
+    object::
+
+        connection.scalar(select(func.as_utc()))
+
+    .. versionadded:: 1.3.13  The :class:`.quoted_name` construct is now
+       recognized for quoting when used with the "name" attribute of the
+       object, so that quoting can be forced on or off for the function
+       name.
+
+
+    """
+
+    coerce_arguments = True
+    inherit_cache = True
+
+    _register: bool
+
+    name = "GenericFunction"
+
+    def __init_subclass__(cls) -> None:
+        if annotation.Annotated not in cls.__mro__:
+            cls._register_generic_function(cls.__name__, cls.__dict__)
+        super().__init_subclass__()
+
+    @classmethod
+    def _register_generic_function(
+        cls, clsname: str, clsdict: Mapping[str, Any]
+    ) -> None:
+        cls.name = name = clsdict.get("name", clsname)
+        cls.identifier = identifier = clsdict.get("identifier", name)
+        package = clsdict.get("package", "_default")
+        # legacy
+        if "__return_type__" in clsdict:
+            cls.type = clsdict["__return_type__"]
+
+        # Check _register attribute status
+        cls._register = getattr(cls, "_register", True)
+
+        # Register the function if required
+        if cls._register:
+            register_function(identifier, cls, package)
+        else:
+            # Set _register to True to register child classes by default
+            cls._register = True
+
+    def __init__(
+        self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any
+    ):
+        parsed_args = kwargs.pop("_parsed_args", None)
+        if parsed_args is None:
+            parsed_args = [
+                coercions.expect(
+                    roles.ExpressionElementRole,
+                    c,
+                    name=self.name,
+                    apply_propagate_attrs=self,
+                )
+                for c in args
+            ]
+        self._has_args = self._has_args or bool(parsed_args)
+        self.packagenames = ()
+
+        self.clause_expr = Grouping(
+            ClauseList(
+                operator=operators.comma_op, group_contents=True, *parsed_args
+            )
+        )
+
+        self.type = type_api.to_instance(  # type: ignore
+            kwargs.pop("type_", None) or getattr(self, "type", None)
+        )
+
+
+register_function("cast", Cast)  # type: ignore
+register_function("extract", Extract)  # type: ignore
+
+
+class next_value(GenericFunction[int]):
+    """Represent the 'next value', given a :class:`.Sequence`
+    as its single argument.
+
+    Compiles into the appropriate function on each backend,
+    or will raise NotImplementedError if used on a backend
+    that does not provide support for sequences.
+
+    """
+
+    type = sqltypes.Integer()
+    name = "next_value"
+
+    _traverse_internals = [
+        ("sequence", InternalTraversal.dp_named_ddl_element)
+    ]
+
+    def __init__(self, seq: schema.Sequence, **kw: Any):
+        assert isinstance(
+            seq, schema.Sequence
+        ), "next_value() accepts a Sequence object as input."
+        self.sequence = seq
+        self.type = sqltypes.to_instance(  # type: ignore
+            seq.data_type or getattr(self, "type", None)
+        )
+
+    def compare(self, other: Any, **kw: Any) -> bool:
+        return (
+            isinstance(other, next_value)
+            and self.sequence.name == other.sequence.name
+        )
+
+    @property
+    def _from_objects(self) -> Any:
+        return []
+
+
+class AnsiFunction(GenericFunction[_T]):
+    """Define a function in "ansi" format, which doesn't render parenthesis."""
+
+    inherit_cache = True
+
+    def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any):
+        GenericFunction.__init__(self, *args, **kwargs)
+
+
+class ReturnTypeFromArgs(GenericFunction[_T]):
+    """Define a function whose return type is the same as its arguments."""
+
+    inherit_cache = True
+
+    # set ColumnElement[_T] as a separate overload, to appease mypy which seems
+    # to not want to accept _T from _ColumnExpressionArgument.  this is even if
+    # all non-generic types are removed from it, so reasons remain unclear for
+    # why this does not work
+
+    @overload
+    def __init__(
+        self,
+        col: ColumnElement[_T],
+        *args: _ColumnExpressionOrLiteralArgument[Any],
+        **kwargs: Any,
+    ): ...
+
+    @overload
+    def __init__(
+        self,
+        col: _ColumnExpressionArgument[_T],
+        *args: _ColumnExpressionOrLiteralArgument[Any],
+        **kwargs: Any,
+    ): ...
+
+    @overload
+    def __init__(
+        self,
+        col: _ColumnExpressionOrLiteralArgument[_T],
+        *args: _ColumnExpressionOrLiteralArgument[Any],
+        **kwargs: Any,
+    ): ...
+
+    def __init__(
+        self, *args: _ColumnExpressionOrLiteralArgument[Any], **kwargs: Any
+    ):
+        fn_args: Sequence[ColumnElement[Any]] = [
+            coercions.expect(
+                roles.ExpressionElementRole,
+                c,
+                name=self.name,
+                apply_propagate_attrs=self,
+            )
+            for c in args
+        ]
+        kwargs.setdefault("type_", _type_from_args(fn_args))
+        kwargs["_parsed_args"] = fn_args
+        super().__init__(*fn_args, **kwargs)
+
+
+class coalesce(ReturnTypeFromArgs[_T]):
+    _has_args = True
+    inherit_cache = True
+
+
+class max(ReturnTypeFromArgs[_T]):  # noqa:  A001
+    """The SQL MAX() aggregate function."""
+
+    inherit_cache = True
+
+
+class min(ReturnTypeFromArgs[_T]):  # noqa: A001
+    """The SQL MIN() aggregate function."""
+
+    inherit_cache = True
+
+
+class sum(ReturnTypeFromArgs[_T]):  # noqa: A001
+    """The SQL SUM() aggregate function."""
+
+    inherit_cache = True
+
+
+class now(GenericFunction[datetime.datetime]):
+    """The SQL now() datetime function.
+
+    SQLAlchemy dialects will usually render this particular function
+    in a backend-specific way, such as rendering it as ``CURRENT_TIMESTAMP``.
+
+    """
+
+    type = sqltypes.DateTime()
+    inherit_cache = True
+
+
+class concat(GenericFunction[str]):
+    """The SQL CONCAT() function, which concatenates strings.
+
+    E.g.:
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(func.concat("a", "b")))
+        {printsql}SELECT concat(:concat_2, :concat_3) AS concat_1
+
+    String concatenation in SQLAlchemy is more commonly available using the
+    Python ``+`` operator with string datatypes, which will render a
+    backend-specific concatenation operator, such as :
+
+    .. sourcecode:: pycon+sql
+
+        >>> print(select(literal("a") + "b"))
+        {printsql}SELECT :param_1 || :param_2 AS anon_1
+
+
+    """
+
+    type = sqltypes.String()
+    inherit_cache = True
+
+
+class char_length(GenericFunction[int]):
+    """The CHAR_LENGTH() SQL function."""
+
+    type = sqltypes.Integer()
+    inherit_cache = True
+
+    def __init__(self, arg: _ColumnExpressionArgument[str], **kw: Any):
+        # slight hack to limit to just one positional argument
+        # not sure why this one function has this special treatment
+        super().__init__(arg, **kw)
+
+
+class random(GenericFunction[float]):
+    """The RANDOM() SQL function."""
+
+    _has_args = True
+    inherit_cache = True
+
+
+class count(GenericFunction[int]):
+    r"""The ANSI COUNT aggregate function.  With no arguments,
+    emits COUNT \*.
+
+    E.g.::
+
+        from sqlalchemy import func
+        from sqlalchemy import select
+        from sqlalchemy import table, column
+
+        my_table = table("some_table", column("id"))
+
+        stmt = select(func.count()).select_from(my_table)
+
+    Executing ``stmt`` would emit:
+
+    .. sourcecode:: sql
+
+        SELECT count(*) AS count_1
+        FROM some_table
+
+
+    """
+
+    type = sqltypes.Integer()
+    inherit_cache = True
+
+    def __init__(
+        self,
+        expression: Union[
+            _ColumnExpressionArgument[Any], _StarOrOne, None
+        ] = None,
+        **kwargs: Any,
+    ):
+        if expression is None:
+            expression = literal_column("*")
+        super().__init__(expression, **kwargs)
+
+
+class current_date(AnsiFunction[datetime.date]):
+    """The CURRENT_DATE() SQL function."""
+
+    type = sqltypes.Date()
+    inherit_cache = True
+
+
+class current_time(AnsiFunction[datetime.time]):
+    """The CURRENT_TIME() SQL function."""
+
+    type = sqltypes.Time()
+    inherit_cache = True
+
+
+class current_timestamp(AnsiFunction[datetime.datetime]):
+    """The CURRENT_TIMESTAMP() SQL function."""
+
+    type = sqltypes.DateTime()
+    inherit_cache = True
+
+
+class current_user(AnsiFunction[str]):
+    """The CURRENT_USER() SQL function."""
+
+    type = sqltypes.String()
+    inherit_cache = True
+
+
+class localtime(AnsiFunction[datetime.datetime]):
+    """The localtime() SQL function."""
+
+    type = sqltypes.DateTime()
+    inherit_cache = True
+
+
+class localtimestamp(AnsiFunction[datetime.datetime]):
+    """The localtimestamp() SQL function."""
+
+    type = sqltypes.DateTime()
+    inherit_cache = True
+
+
+class session_user(AnsiFunction[str]):
+    """The SESSION_USER() SQL function."""
+
+    type = sqltypes.String()
+    inherit_cache = True
+
+
+class sysdate(AnsiFunction[datetime.datetime]):
+    """The SYSDATE() SQL function."""
+
+    type = sqltypes.DateTime()
+    inherit_cache = True
+
+
+class user(AnsiFunction[str]):
+    """The USER() SQL function."""
+
+    type = sqltypes.String()
+    inherit_cache = True
+
+
+class array_agg(GenericFunction[_T]):
+    """Support for the ARRAY_AGG function.
+
+    The ``func.array_agg(expr)`` construct returns an expression of
+    type :class:`_types.ARRAY`.
+
+    e.g.::
+
+        stmt = select(func.array_agg(table.c.values)[2:5])
+
+    .. seealso::
+
+        :func:`_postgresql.array_agg` - PostgreSQL-specific version that
+        returns :class:`_postgresql.ARRAY`, which has PG-specific operators
+        added.
+
+    """
+
+    inherit_cache = True
+
+    def __init__(self, *args: _ColumnExpressionArgument[Any], **kwargs: Any):
+        fn_args: Sequence[ColumnElement[Any]] = [
+            coercions.expect(
+                roles.ExpressionElementRole, c, apply_propagate_attrs=self
+            )
+            for c in args
+        ]
+
+        default_array_type = kwargs.pop("_default_array_type", sqltypes.ARRAY)
+        if "type_" not in kwargs:
+            type_from_args = _type_from_args(fn_args)
+            if isinstance(type_from_args, sqltypes.ARRAY):
+                kwargs["type_"] = type_from_args
+            else:
+                kwargs["type_"] = default_array_type(
+                    type_from_args, dimensions=1
+                )
+        kwargs["_parsed_args"] = fn_args
+        super().__init__(*fn_args, **kwargs)
+
+
+class OrderedSetAgg(GenericFunction[_T]):
+    """Define a function where the return type is based on the sort
+    expression type as defined by the expression passed to the
+    :meth:`.FunctionElement.within_group` method."""
+
+    array_for_multi_clause = False
+    inherit_cache = True
+
+    def within_group_type(
+        self, within_group: WithinGroup[Any]
+    ) -> TypeEngine[Any]:
+        func_clauses = cast(ClauseList, self.clause_expr.element)
+        order_by: Sequence[ColumnElement[Any]] = sqlutil.unwrap_order_by(
+            within_group.order_by
+        )
+        if self.array_for_multi_clause and len(func_clauses.clauses) > 1:
+            return sqltypes.ARRAY(order_by[0].type)
+        else:
+            return order_by[0].type
+
+
+class mode(OrderedSetAgg[_T]):
+    """Implement the ``mode`` ordered-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is the same as the sort expression.
+
+    """
+
+    inherit_cache = True
+
+
+class percentile_cont(OrderedSetAgg[_T]):
+    """Implement the ``percentile_cont`` ordered-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is the same as the sort expression,
+    or if the arguments are an array, an :class:`_types.ARRAY` of the sort
+    expression's type.
+
+    """
+
+    array_for_multi_clause = True
+    inherit_cache = True
+
+
+class percentile_disc(OrderedSetAgg[_T]):
+    """Implement the ``percentile_disc`` ordered-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is the same as the sort expression,
+    or if the arguments are an array, an :class:`_types.ARRAY` of the sort
+    expression's type.
+
+    """
+
+    array_for_multi_clause = True
+    inherit_cache = True
+
+
+class rank(GenericFunction[int]):
+    """Implement the ``rank`` hypothetical-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is :class:`.Integer`.
+
+    """
+
+    type = sqltypes.Integer()
+    inherit_cache = True
+
+
+class dense_rank(GenericFunction[int]):
+    """Implement the ``dense_rank`` hypothetical-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is :class:`.Integer`.
+
+    """
+
+    type = sqltypes.Integer()
+    inherit_cache = True
+
+
+class percent_rank(GenericFunction[decimal.Decimal]):
+    """Implement the ``percent_rank`` hypothetical-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is :class:`.Numeric`.
+
+    """
+
+    type: sqltypes.Numeric[decimal.Decimal] = sqltypes.Numeric()
+    inherit_cache = True
+
+
+class cume_dist(GenericFunction[decimal.Decimal]):
+    """Implement the ``cume_dist`` hypothetical-set aggregate function.
+
+    This function must be used with the :meth:`.FunctionElement.within_group`
+    modifier to supply a sort expression to operate upon.
+
+    The return type of this function is :class:`.Numeric`.
+
+    """
+
+    type: sqltypes.Numeric[decimal.Decimal] = sqltypes.Numeric()
+    inherit_cache = True
+
+
+class cube(GenericFunction[_T]):
+    r"""Implement the ``CUBE`` grouping operation.
+
+    This function is used as part of the GROUP BY of a statement,
+    e.g. :meth:`_expression.Select.group_by`::
+
+        stmt = select(
+            func.sum(table.c.value), table.c.col_1, table.c.col_2
+        ).group_by(func.cube(table.c.col_1, table.c.col_2))
+
+    .. versionadded:: 1.2
+
+    """
+
+    _has_args = True
+    inherit_cache = True
+
+
+class rollup(GenericFunction[_T]):
+    r"""Implement the ``ROLLUP`` grouping operation.
+
+    This function is used as part of the GROUP BY of a statement,
+    e.g. :meth:`_expression.Select.group_by`::
+
+        stmt = select(
+            func.sum(table.c.value), table.c.col_1, table.c.col_2
+        ).group_by(func.rollup(table.c.col_1, table.c.col_2))
+
+    .. versionadded:: 1.2
+
+    """
+
+    _has_args = True
+    inherit_cache = True
+
+
+class grouping_sets(GenericFunction[_T]):
+    r"""Implement the ``GROUPING SETS`` grouping operation.
+
+    This function is used as part of the GROUP BY of a statement,
+    e.g. :meth:`_expression.Select.group_by`::
+
+        stmt = select(
+            func.sum(table.c.value), table.c.col_1, table.c.col_2
+        ).group_by(func.grouping_sets(table.c.col_1, table.c.col_2))
+
+    In order to group by multiple sets, use the :func:`.tuple_` construct::
+
+        from sqlalchemy import tuple_
+
+        stmt = select(
+            func.sum(table.c.value), table.c.col_1, table.c.col_2, table.c.col_3
+        ).group_by(
+            func.grouping_sets(
+                tuple_(table.c.col_1, table.c.col_2),
+                tuple_(table.c.value, table.c.col_3),
+            )
+        )
+
+    .. versionadded:: 1.2
+
+    """  # noqa: E501
+
+    _has_args = True
+    inherit_cache = True
+
+
+class aggregate_strings(GenericFunction[str]):
+    """Implement a generic string aggregation function.
+
+    This function will concatenate non-null values into a string and
+    separate the values by a delimiter.
+
+    This function is compiled on a per-backend basis, into functions
+    such as ``group_concat()``, ``string_agg()``, or ``LISTAGG()``.
+
+    e.g. Example usage with delimiter '.'::
+
+        stmt = select(func.aggregate_strings(table.c.str_col, "."))
+
+    The return type of this function is :class:`.String`.
+
+    .. versionadded: 2.0.21
+
+    """
+
+    type = sqltypes.String()
+    _has_args = True
+    inherit_cache = True
+
+    def __init__(self, clause: _ColumnExpressionArgument[Any], separator: str):
+        super().__init__(clause, separator)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/lambdas.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/lambdas.py
new file mode 100644
index 00000000..8d70f800
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/lambdas.py
@@ -0,0 +1,1443 @@
+# sql/lambdas.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+from __future__ import annotations
+
+import collections.abc as collections_abc
+import inspect
+import itertools
+import operator
+import threading
+import types
+from types import CodeType
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import List
+from typing import MutableMapping
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+import weakref
+
+from . import cache_key as _cache_key
+from . import coercions
+from . import elements
+from . import roles
+from . import schema
+from . import visitors
+from .base import _clone
+from .base import Executable
+from .base import Options
+from .cache_key import CacheConst
+from .operators import ColumnOperators
+from .. import exc
+from .. import inspection
+from .. import util
+from ..util.typing import Literal
+
+
+if TYPE_CHECKING:
+    from .elements import BindParameter
+    from .elements import ClauseElement
+    from .roles import SQLRole
+    from .visitors import _CloneCallableType
+
+_LambdaCacheType = MutableMapping[
+    Tuple[Any, ...], Union["NonAnalyzedFunction", "AnalyzedFunction"]
+]
+_BoundParameterGetter = Callable[..., Any]
+
+_closure_per_cache_key: _LambdaCacheType = util.LRUCache(1000)
+
+
+_LambdaType = Callable[[], Any]
+
+_AnyLambdaType = Callable[..., Any]
+
+_StmtLambdaType = Callable[[], Any]
+
+_E = TypeVar("_E", bound=Executable)
+_StmtLambdaElementType = Callable[[_E], Any]
+
+
+class LambdaOptions(Options):
+    enable_tracking = True
+    track_closure_variables = True
+    track_on: Optional[object] = None
+    global_track_bound_values = True
+    track_bound_values = True
+    lambda_cache: Optional[_LambdaCacheType] = None
+
+
+def lambda_stmt(
+    lmb: _StmtLambdaType,
+    enable_tracking: bool = True,
+    track_closure_variables: bool = True,
+    track_on: Optional[object] = None,
+    global_track_bound_values: bool = True,
+    track_bound_values: bool = True,
+    lambda_cache: Optional[_LambdaCacheType] = None,
+) -> StatementLambdaElement:
+    """Produce a SQL statement that is cached as a lambda.
+
+    The Python code object within the lambda is scanned for both Python
+    literals that will become bound parameters as well as closure variables
+    that refer to Core or ORM constructs that may vary.   The lambda itself
+    will be invoked only once per particular set of constructs detected.
+
+    E.g.::
+
+        from sqlalchemy import lambda_stmt
+
+        stmt = lambda_stmt(lambda: table.select())
+        stmt += lambda s: s.where(table.c.id == 5)
+
+        result = connection.execute(stmt)
+
+    The object returned is an instance of :class:`_sql.StatementLambdaElement`.
+
+    .. versionadded:: 1.4
+
+    :param lmb: a Python function, typically a lambda, which takes no arguments
+     and returns a SQL expression construct
+    :param enable_tracking: when False, all scanning of the given lambda for
+     changes in closure variables or bound parameters is disabled.  Use for
+     a lambda that produces the identical results in all cases with no
+     parameterization.
+    :param track_closure_variables: when False, changes in closure variables
+     within the lambda will not be scanned.   Use for a lambda where the
+     state of its closure variables will never change the SQL structure
+     returned by the lambda.
+    :param track_bound_values: when False, bound parameter tracking will
+     be disabled for the given lambda.  Use for a lambda that either does
+     not produce any bound values, or where the initial bound values never
+     change.
+    :param global_track_bound_values: when False, bound parameter tracking
+     will be disabled for the entire statement including additional links
+     added via the :meth:`_sql.StatementLambdaElement.add_criteria` method.
+    :param lambda_cache: a dictionary or other mapping-like object where
+     information about the lambda's Python code as well as the tracked closure
+     variables in the lambda itself will be stored.   Defaults
+     to a global LRU cache.  This cache is independent of the "compiled_cache"
+     used by the :class:`_engine.Connection` object.
+
+    .. seealso::
+
+        :ref:`engine_lambda_caching`
+
+
+    """
+
+    return StatementLambdaElement(
+        lmb,
+        roles.StatementRole,
+        LambdaOptions(
+            enable_tracking=enable_tracking,
+            track_on=track_on,
+            track_closure_variables=track_closure_variables,
+            global_track_bound_values=global_track_bound_values,
+            track_bound_values=track_bound_values,
+            lambda_cache=lambda_cache,
+        ),
+    )
+
+
+class LambdaElement(elements.ClauseElement):
+    """A SQL construct where the state is stored as an un-invoked lambda.
+
+    The :class:`_sql.LambdaElement` is produced transparently whenever
+    passing lambda expressions into SQL constructs, such as::
+
+        stmt = select(table).where(lambda: table.c.col == parameter)
+
+    The :class:`_sql.LambdaElement` is the base of the
+    :class:`_sql.StatementLambdaElement` which represents a full statement
+    within a lambda.
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`engine_lambda_caching`
+
+    """
+
+    __visit_name__ = "lambda_element"
+
+    _is_lambda_element = True
+
+    _traverse_internals = [
+        ("_resolved", visitors.InternalTraversal.dp_clauseelement)
+    ]
+
+    _transforms: Tuple[_CloneCallableType, ...] = ()
+
+    _resolved_bindparams: List[BindParameter[Any]]
+    parent_lambda: Optional[StatementLambdaElement] = None
+    closure_cache_key: Union[Tuple[Any, ...], Literal[CacheConst.NO_CACHE]]
+    role: Type[SQLRole]
+    _rec: Union[AnalyzedFunction, NonAnalyzedFunction]
+    fn: _AnyLambdaType
+    tracker_key: Tuple[CodeType, ...]
+
+    def __repr__(self):
+        return "%s(%r)" % (
+            self.__class__.__name__,
+            self.fn.__code__,
+        )
+
+    def __init__(
+        self,
+        fn: _LambdaType,
+        role: Type[SQLRole],
+        opts: Union[Type[LambdaOptions], LambdaOptions] = LambdaOptions,
+        apply_propagate_attrs: Optional[ClauseElement] = None,
+    ):
+        self.fn = fn
+        self.role = role
+        self.tracker_key = (fn.__code__,)
+        self.opts = opts
+
+        if apply_propagate_attrs is None and (role is roles.StatementRole):
+            apply_propagate_attrs = self
+
+        rec = self._retrieve_tracker_rec(fn, apply_propagate_attrs, opts)
+
+        if apply_propagate_attrs is not None:
+            propagate_attrs = rec.propagate_attrs
+            if propagate_attrs:
+                apply_propagate_attrs._propagate_attrs = propagate_attrs
+
+    def _retrieve_tracker_rec(self, fn, apply_propagate_attrs, opts):
+        lambda_cache = opts.lambda_cache
+        if lambda_cache is None:
+            lambda_cache = _closure_per_cache_key
+
+        tracker_key = self.tracker_key
+
+        fn = self.fn
+        closure = fn.__closure__
+        tracker = AnalyzedCode.get(
+            fn,
+            self,
+            opts,
+        )
+
+        bindparams: List[BindParameter[Any]]
+        self._resolved_bindparams = bindparams = []
+
+        if self.parent_lambda is not None:
+            parent_closure_cache_key = self.parent_lambda.closure_cache_key
+        else:
+            parent_closure_cache_key = ()
+
+        cache_key: Union[Tuple[Any, ...], Literal[CacheConst.NO_CACHE]]
+
+        if parent_closure_cache_key is not _cache_key.NO_CACHE:
+            anon_map = visitors.anon_map()
+            cache_key = tuple(
+                [
+                    getter(closure, opts, anon_map, bindparams)
+                    for getter in tracker.closure_trackers
+                ]
+            )
+
+            if _cache_key.NO_CACHE not in anon_map:
+                cache_key = parent_closure_cache_key + cache_key
+
+                self.closure_cache_key = cache_key
+
+                try:
+                    rec = lambda_cache[tracker_key + cache_key]
+                except KeyError:
+                    rec = None
+            else:
+                cache_key = _cache_key.NO_CACHE
+                rec = None
+
+        else:
+            cache_key = _cache_key.NO_CACHE
+            rec = None
+
+        self.closure_cache_key = cache_key
+
+        if rec is None:
+            if cache_key is not _cache_key.NO_CACHE:
+                with AnalyzedCode._generation_mutex:
+                    key = tracker_key + cache_key
+                    if key not in lambda_cache:
+                        rec = AnalyzedFunction(
+                            tracker, self, apply_propagate_attrs, fn
+                        )
+                        rec.closure_bindparams = list(bindparams)
+                        lambda_cache[key] = rec
+                    else:
+                        rec = lambda_cache[key]
+            else:
+                rec = NonAnalyzedFunction(self._invoke_user_fn(fn))
+
+        else:
+            bindparams[:] = [
+                orig_bind._with_value(new_bind.value, maintain_key=True)
+                for orig_bind, new_bind in zip(
+                    rec.closure_bindparams, bindparams
+                )
+            ]
+
+        self._rec = rec
+
+        if cache_key is not _cache_key.NO_CACHE:
+            if self.parent_lambda is not None:
+                bindparams[:0] = self.parent_lambda._resolved_bindparams
+
+            lambda_element: Optional[LambdaElement] = self
+            while lambda_element is not None:
+                rec = lambda_element._rec
+                if rec.bindparam_trackers:
+                    tracker_instrumented_fn = rec.tracker_instrumented_fn
+                    for tracker in rec.bindparam_trackers:
+                        tracker(
+                            lambda_element.fn,
+                            tracker_instrumented_fn,
+                            bindparams,
+                        )
+                lambda_element = lambda_element.parent_lambda
+
+        return rec
+
+    def __getattr__(self, key):
+        return getattr(self._rec.expected_expr, key)
+
+    @property
+    def _is_sequence(self):
+        return self._rec.is_sequence
+
+    @property
+    def _select_iterable(self):
+        if self._is_sequence:
+            return itertools.chain.from_iterable(
+                [element._select_iterable for element in self._resolved]
+            )
+
+        else:
+            return self._resolved._select_iterable
+
+    @property
+    def _from_objects(self):
+        if self._is_sequence:
+            return itertools.chain.from_iterable(
+                [element._from_objects for element in self._resolved]
+            )
+
+        else:
+            return self._resolved._from_objects
+
+    def _param_dict(self):
+        return {b.key: b.value for b in self._resolved_bindparams}
+
+    def _setup_binds_for_tracked_expr(self, expr):
+        bindparam_lookup = {b.key: b for b in self._resolved_bindparams}
+
+        def replace(
+            element: Optional[visitors.ExternallyTraversible], **kw: Any
+        ) -> Optional[visitors.ExternallyTraversible]:
+            if isinstance(element, elements.BindParameter):
+                if element.key in bindparam_lookup:
+                    bind = bindparam_lookup[element.key]
+                    if element.expanding:
+                        bind.expanding = True
+                        bind.expand_op = element.expand_op
+                        bind.type = element.type
+                    return bind
+
+            return None
+
+        if self._rec.is_sequence:
+            expr = [
+                visitors.replacement_traverse(sub_expr, {}, replace)
+                for sub_expr in expr
+            ]
+        elif getattr(expr, "is_clause_element", False):
+            expr = visitors.replacement_traverse(expr, {}, replace)
+
+        return expr
+
+    def _copy_internals(
+        self,
+        clone: _CloneCallableType = _clone,
+        deferred_copy_internals: Optional[_CloneCallableType] = None,
+        **kw: Any,
+    ) -> None:
+        # TODO: this needs A LOT of tests
+        self._resolved = clone(
+            self._resolved,
+            deferred_copy_internals=deferred_copy_internals,
+            **kw,
+        )
+
+    @util.memoized_property
+    def _resolved(self):
+        expr = self._rec.expected_expr
+
+        if self._resolved_bindparams:
+            expr = self._setup_binds_for_tracked_expr(expr)
+
+        return expr
+
+    def _gen_cache_key(self, anon_map, bindparams):
+        if self.closure_cache_key is _cache_key.NO_CACHE:
+            anon_map[_cache_key.NO_CACHE] = True
+            return None
+
+        cache_key = (
+            self.fn.__code__,
+            self.__class__,
+        ) + self.closure_cache_key
+
+        parent = self.parent_lambda
+
+        while parent is not None:
+            assert parent.closure_cache_key is not CacheConst.NO_CACHE
+            parent_closure_cache_key: Tuple[Any, ...] = (
+                parent.closure_cache_key
+            )
+
+            cache_key = (
+                (parent.fn.__code__,) + parent_closure_cache_key + cache_key
+            )
+
+            parent = parent.parent_lambda
+
+        if self._resolved_bindparams:
+            bindparams.extend(self._resolved_bindparams)
+        return cache_key
+
+    def _invoke_user_fn(self, fn: _AnyLambdaType, *arg: Any) -> ClauseElement:
+        return fn()  # type: ignore[no-any-return]
+
+
+class DeferredLambdaElement(LambdaElement):
+    """A LambdaElement where the lambda accepts arguments and is
+    invoked within the compile phase with special context.
+
+    This lambda doesn't normally produce its real SQL expression outside of the
+    compile phase.  It is passed a fixed set of initial arguments
+    so that it can generate a sample expression.
+
+    """
+
+    def __init__(
+        self,
+        fn: _AnyLambdaType,
+        role: Type[roles.SQLRole],
+        opts: Union[Type[LambdaOptions], LambdaOptions] = LambdaOptions,
+        lambda_args: Tuple[Any, ...] = (),
+    ):
+        self.lambda_args = lambda_args
+        super().__init__(fn, role, opts)
+
+    def _invoke_user_fn(self, fn, *arg):
+        return fn(*self.lambda_args)
+
+    def _resolve_with_args(self, *lambda_args: Any) -> ClauseElement:
+        assert isinstance(self._rec, AnalyzedFunction)
+        tracker_fn = self._rec.tracker_instrumented_fn
+        expr = tracker_fn(*lambda_args)
+
+        expr = coercions.expect(self.role, expr)
+
+        expr = self._setup_binds_for_tracked_expr(expr)
+
+        # this validation is getting very close, but not quite, to achieving
+        # #5767.  The problem is if the base lambda uses an unnamed column
+        # as is very common with mixins, the parameter name is different
+        # and it produces a false positive; that is, for the documented case
+        # that is exactly what people will be doing, it doesn't work, so
+        # I'm not really sure how to handle this right now.
+        # expected_binds = [
+        #    b._orig_key
+        #    for b in self._rec.expr._generate_cache_key()[1]
+        #    if b.required
+        # ]
+        # got_binds = [
+        #    b._orig_key for b in expr._generate_cache_key()[1] if b.required
+        # ]
+        # if expected_binds != got_binds:
+        #    raise exc.InvalidRequestError(
+        #        "Lambda callable at %s produced a different set of bound "
+        #        "parameters than its original run: %s"
+        #        % (self.fn.__code__, ", ".join(got_binds))
+        #    )
+
+        # TODO: TEST TEST TEST, this is very out there
+        for deferred_copy_internals in self._transforms:
+            expr = deferred_copy_internals(expr)
+
+        return expr  # type: ignore
+
+    def _copy_internals(
+        self, clone=_clone, deferred_copy_internals=None, **kw
+    ):
+        super()._copy_internals(
+            clone=clone,
+            deferred_copy_internals=deferred_copy_internals,  # **kw
+            opts=kw,
+        )
+
+        # TODO: A LOT A LOT of tests.   for _resolve_with_args, we don't know
+        # our expression yet.   so hold onto the replacement
+        if deferred_copy_internals:
+            self._transforms += (deferred_copy_internals,)
+
+
+class StatementLambdaElement(
+    roles.AllowsLambdaRole, LambdaElement, Executable
+):
+    """Represent a composable SQL statement as a :class:`_sql.LambdaElement`.
+
+    The :class:`_sql.StatementLambdaElement` is constructed using the
+    :func:`_sql.lambda_stmt` function::
+
+
+        from sqlalchemy import lambda_stmt
+
+        stmt = lambda_stmt(lambda: select(table))
+
+    Once constructed, additional criteria can be built onto the statement
+    by adding subsequent lambdas, which accept the existing statement
+    object as a single parameter::
+
+        stmt += lambda s: s.where(table.c.col == parameter)
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`engine_lambda_caching`
+
+    """
+
+    if TYPE_CHECKING:
+
+        def __init__(
+            self,
+            fn: _StmtLambdaType,
+            role: Type[SQLRole],
+            opts: Union[Type[LambdaOptions], LambdaOptions] = LambdaOptions,
+            apply_propagate_attrs: Optional[ClauseElement] = None,
+        ): ...
+
+    def __add__(
+        self, other: _StmtLambdaElementType[Any]
+    ) -> StatementLambdaElement:
+        return self.add_criteria(other)
+
+    def add_criteria(
+        self,
+        other: _StmtLambdaElementType[Any],
+        enable_tracking: bool = True,
+        track_on: Optional[Any] = None,
+        track_closure_variables: bool = True,
+        track_bound_values: bool = True,
+    ) -> StatementLambdaElement:
+        """Add new criteria to this :class:`_sql.StatementLambdaElement`.
+
+        E.g.::
+
+            >>> def my_stmt(parameter):
+            ...     stmt = lambda_stmt(
+            ...         lambda: select(table.c.x, table.c.y),
+            ...     )
+            ...     stmt = stmt.add_criteria(lambda: table.c.x > parameter)
+            ...     return stmt
+
+        The :meth:`_sql.StatementLambdaElement.add_criteria` method is
+        equivalent to using the Python addition operator to add a new
+        lambda, except that additional arguments may be added including
+        ``track_closure_values`` and ``track_on``::
+
+            >>> def my_stmt(self, foo):
+            ...     stmt = lambda_stmt(
+            ...         lambda: select(func.max(foo.x, foo.y)),
+            ...         track_closure_variables=False,
+            ...     )
+            ...     stmt = stmt.add_criteria(lambda: self.where_criteria, track_on=[self])
+            ...     return stmt
+
+        See :func:`_sql.lambda_stmt` for a description of the parameters
+        accepted.
+
+        """  # noqa: E501
+
+        opts = self.opts + dict(
+            enable_tracking=enable_tracking,
+            track_closure_variables=track_closure_variables,
+            global_track_bound_values=self.opts.global_track_bound_values,
+            track_on=track_on,
+            track_bound_values=track_bound_values,
+        )
+
+        return LinkedLambdaElement(other, parent_lambda=self, opts=opts)
+
+    def _execute_on_connection(
+        self, connection, distilled_params, execution_options
+    ):
+        if TYPE_CHECKING:
+            assert isinstance(self._rec.expected_expr, ClauseElement)
+        if self._rec.expected_expr.supports_execution:
+            return connection._execute_clauseelement(
+                self, distilled_params, execution_options
+            )
+        else:
+            raise exc.ObjectNotExecutableError(self)
+
+    @property
+    def _proxied(self) -> Any:
+        return self._rec_expected_expr
+
+    @property
+    def _with_options(self):
+        return self._proxied._with_options
+
+    @property
+    def _effective_plugin_target(self):
+        return self._proxied._effective_plugin_target
+
+    @property
+    def _execution_options(self):
+        return self._proxied._execution_options
+
+    @property
+    def _all_selected_columns(self):
+        return self._proxied._all_selected_columns
+
+    @property
+    def is_select(self):
+        return self._proxied.is_select
+
+    @property
+    def is_update(self):
+        return self._proxied.is_update
+
+    @property
+    def is_insert(self):
+        return self._proxied.is_insert
+
+    @property
+    def is_text(self):
+        return self._proxied.is_text
+
+    @property
+    def is_delete(self):
+        return self._proxied.is_delete
+
+    @property
+    def is_dml(self):
+        return self._proxied.is_dml
+
+    def spoil(self) -> NullLambdaStatement:
+        """Return a new :class:`.StatementLambdaElement` that will run
+        all lambdas unconditionally each time.
+
+        """
+        return NullLambdaStatement(self.fn())
+
+
+class NullLambdaStatement(roles.AllowsLambdaRole, elements.ClauseElement):
+    """Provides the :class:`.StatementLambdaElement` API but does not
+    cache or analyze lambdas.
+
+    the lambdas are instead invoked immediately.
+
+    The intended use is to isolate issues that may arise when using
+    lambda statements.
+
+    """
+
+    __visit_name__ = "lambda_element"
+
+    _is_lambda_element = True
+
+    _traverse_internals = [
+        ("_resolved", visitors.InternalTraversal.dp_clauseelement)
+    ]
+
+    def __init__(self, statement):
+        self._resolved = statement
+        self._propagate_attrs = statement._propagate_attrs
+
+    def __getattr__(self, key):
+        return getattr(self._resolved, key)
+
+    def __add__(self, other):
+        statement = other(self._resolved)
+
+        return NullLambdaStatement(statement)
+
+    def add_criteria(self, other, **kw):
+        statement = other(self._resolved)
+
+        return NullLambdaStatement(statement)
+
+    def _execute_on_connection(
+        self, connection, distilled_params, execution_options
+    ):
+        if self._resolved.supports_execution:
+            return connection._execute_clauseelement(
+                self, distilled_params, execution_options
+            )
+        else:
+            raise exc.ObjectNotExecutableError(self)
+
+
+class LinkedLambdaElement(StatementLambdaElement):
+    """Represent subsequent links of a :class:`.StatementLambdaElement`."""
+
+    parent_lambda: StatementLambdaElement
+
+    def __init__(
+        self,
+        fn: _StmtLambdaElementType[Any],
+        parent_lambda: StatementLambdaElement,
+        opts: Union[Type[LambdaOptions], LambdaOptions],
+    ):
+        self.opts = opts
+        self.fn = fn
+        self.parent_lambda = parent_lambda
+
+        self.tracker_key = parent_lambda.tracker_key + (fn.__code__,)
+        self._retrieve_tracker_rec(fn, self, opts)
+        self._propagate_attrs = parent_lambda._propagate_attrs
+
+    def _invoke_user_fn(self, fn, *arg):
+        return fn(self.parent_lambda._resolved)
+
+
+class AnalyzedCode:
+    __slots__ = (
+        "track_closure_variables",
+        "track_bound_values",
+        "bindparam_trackers",
+        "closure_trackers",
+        "build_py_wrappers",
+    )
+    _fns: weakref.WeakKeyDictionary[CodeType, AnalyzedCode] = (
+        weakref.WeakKeyDictionary()
+    )
+
+    _generation_mutex = threading.RLock()
+
+    @classmethod
+    def get(cls, fn, lambda_element, lambda_kw, **kw):
+        try:
+            # TODO: validate kw haven't changed?
+            return cls._fns[fn.__code__]
+        except KeyError:
+            pass
+
+        with cls._generation_mutex:
+            # check for other thread already created object
+            if fn.__code__ in cls._fns:
+                return cls._fns[fn.__code__]
+
+            analyzed: AnalyzedCode
+            cls._fns[fn.__code__] = analyzed = AnalyzedCode(
+                fn, lambda_element, lambda_kw, **kw
+            )
+            return analyzed
+
+    def __init__(self, fn, lambda_element, opts):
+        if inspect.ismethod(fn):
+            raise exc.ArgumentError(
+                "Method %s may not be passed as a SQL expression" % fn
+            )
+        closure = fn.__closure__
+
+        self.track_bound_values = (
+            opts.track_bound_values and opts.global_track_bound_values
+        )
+        enable_tracking = opts.enable_tracking
+        track_on = opts.track_on
+        track_closure_variables = opts.track_closure_variables
+
+        self.track_closure_variables = track_closure_variables and not track_on
+
+        # a list of callables generated from _bound_parameter_getter_*
+        # functions.  Each of these uses a PyWrapper object to retrieve
+        # a parameter value
+        self.bindparam_trackers = []
+
+        # a list of callables generated from _cache_key_getter_* functions
+        # these callables work to generate a cache key for the lambda
+        # based on what's inside its closure variables.
+        self.closure_trackers = []
+
+        self.build_py_wrappers = []
+
+        if enable_tracking:
+            if track_on:
+                self._init_track_on(track_on)
+
+            self._init_globals(fn)
+
+            if closure:
+                self._init_closure(fn)
+
+        self._setup_additional_closure_trackers(fn, lambda_element, opts)
+
+    def _init_track_on(self, track_on):
+        self.closure_trackers.extend(
+            self._cache_key_getter_track_on(idx, elem)
+            for idx, elem in enumerate(track_on)
+        )
+
+    def _init_globals(self, fn):
+        build_py_wrappers = self.build_py_wrappers
+        bindparam_trackers = self.bindparam_trackers
+        track_bound_values = self.track_bound_values
+
+        for name in fn.__code__.co_names:
+            if name not in fn.__globals__:
+                continue
+
+            _bound_value = self._roll_down_to_literal(fn.__globals__[name])
+
+            if coercions._deep_is_literal(_bound_value):
+                build_py_wrappers.append((name, None))
+                if track_bound_values:
+                    bindparam_trackers.append(
+                        self._bound_parameter_getter_func_globals(name)
+                    )
+
+    def _init_closure(self, fn):
+        build_py_wrappers = self.build_py_wrappers
+        closure = fn.__closure__
+
+        track_bound_values = self.track_bound_values
+        track_closure_variables = self.track_closure_variables
+        bindparam_trackers = self.bindparam_trackers
+        closure_trackers = self.closure_trackers
+
+        for closure_index, (fv, cell) in enumerate(
+            zip(fn.__code__.co_freevars, closure)
+        ):
+            _bound_value = self._roll_down_to_literal(cell.cell_contents)
+
+            if coercions._deep_is_literal(_bound_value):
+                build_py_wrappers.append((fv, closure_index))
+                if track_bound_values:
+                    bindparam_trackers.append(
+                        self._bound_parameter_getter_func_closure(
+                            fv, closure_index
+                        )
+                    )
+            else:
+                # for normal cell contents, add them to a list that
+                # we can compare later when we get new lambdas.  if
+                # any identities have changed, then we will
+                # recalculate the whole lambda and run it again.
+
+                if track_closure_variables:
+                    closure_trackers.append(
+                        self._cache_key_getter_closure_variable(
+                            fn, fv, closure_index, cell.cell_contents
+                        )
+                    )
+
+    def _setup_additional_closure_trackers(self, fn, lambda_element, opts):
+        # an additional step is to actually run the function, then
+        # go through the PyWrapper objects that were set up to catch a bound
+        # parameter.   then if they *didn't* make a param, oh they're another
+        # object in the closure we have to track for our cache key.  so
+        # create trackers to catch those.
+
+        analyzed_function = AnalyzedFunction(
+            self,
+            lambda_element,
+            None,
+            fn,
+        )
+
+        closure_trackers = self.closure_trackers
+
+        for pywrapper in analyzed_function.closure_pywrappers:
+            if not pywrapper._sa__has_param:
+                closure_trackers.append(
+                    self._cache_key_getter_tracked_literal(fn, pywrapper)
+                )
+
+    @classmethod
+    def _roll_down_to_literal(cls, element):
+        is_clause_element = hasattr(element, "__clause_element__")
+
+        if is_clause_element:
+            while not isinstance(
+                element, (elements.ClauseElement, schema.SchemaItem, type)
+            ):
+                try:
+                    element = element.__clause_element__()
+                except AttributeError:
+                    break
+
+        if not is_clause_element:
+            insp = inspection.inspect(element, raiseerr=False)
+            if insp is not None:
+                try:
+                    return insp.__clause_element__()
+                except AttributeError:
+                    return insp
+
+            # TODO: should we coerce consts None/True/False here?
+            return element
+        else:
+            return element
+
+    def _bound_parameter_getter_func_globals(self, name):
+        """Return a getter that will extend a list of bound parameters
+        with new entries from the ``__globals__`` collection of a particular
+        lambda.
+
+        """
+
+        def extract_parameter_value(
+            current_fn, tracker_instrumented_fn, result
+        ):
+            wrapper = tracker_instrumented_fn.__globals__[name]
+            object.__getattribute__(wrapper, "_extract_bound_parameters")(
+                current_fn.__globals__[name], result
+            )
+
+        return extract_parameter_value
+
+    def _bound_parameter_getter_func_closure(self, name, closure_index):
+        """Return a getter that will extend a list of bound parameters
+        with new entries from the ``__closure__`` collection of a particular
+        lambda.
+
+        """
+
+        def extract_parameter_value(
+            current_fn, tracker_instrumented_fn, result
+        ):
+            wrapper = tracker_instrumented_fn.__closure__[
+                closure_index
+            ].cell_contents
+            object.__getattribute__(wrapper, "_extract_bound_parameters")(
+                current_fn.__closure__[closure_index].cell_contents, result
+            )
+
+        return extract_parameter_value
+
+    def _cache_key_getter_track_on(self, idx, elem):
+        """Return a getter that will extend a cache key with new entries
+        from the "track_on" parameter passed to a :class:`.LambdaElement`.
+
+        """
+
+        if isinstance(elem, tuple):
+            # tuple must contain hascachekey elements
+            def get(closure, opts, anon_map, bindparams):
+                return tuple(
+                    tup_elem._gen_cache_key(anon_map, bindparams)
+                    for tup_elem in opts.track_on[idx]
+                )
+
+        elif isinstance(elem, _cache_key.HasCacheKey):
+
+            def get(closure, opts, anon_map, bindparams):
+                return opts.track_on[idx]._gen_cache_key(anon_map, bindparams)
+
+        else:
+
+            def get(closure, opts, anon_map, bindparams):
+                return opts.track_on[idx]
+
+        return get
+
+    def _cache_key_getter_closure_variable(
+        self,
+        fn,
+        variable_name,
+        idx,
+        cell_contents,
+        use_clause_element=False,
+        use_inspect=False,
+    ):
+        """Return a getter that will extend a cache key with new entries
+        from the ``__closure__`` collection of a particular lambda.
+
+        """
+
+        if isinstance(cell_contents, _cache_key.HasCacheKey):
+
+            def get(closure, opts, anon_map, bindparams):
+                obj = closure[idx].cell_contents
+                if use_inspect:
+                    obj = inspection.inspect(obj)
+                elif use_clause_element:
+                    while hasattr(obj, "__clause_element__"):
+                        if not getattr(obj, "is_clause_element", False):
+                            obj = obj.__clause_element__()
+
+                return obj._gen_cache_key(anon_map, bindparams)
+
+        elif isinstance(cell_contents, types.FunctionType):
+
+            def get(closure, opts, anon_map, bindparams):
+                return closure[idx].cell_contents.__code__
+
+        elif isinstance(cell_contents, collections_abc.Sequence):
+
+            def get(closure, opts, anon_map, bindparams):
+                contents = closure[idx].cell_contents
+
+                try:
+                    return tuple(
+                        elem._gen_cache_key(anon_map, bindparams)
+                        for elem in contents
+                    )
+                except AttributeError as ae:
+                    self._raise_for_uncacheable_closure_variable(
+                        variable_name, fn, from_=ae
+                    )
+
+        else:
+            # if the object is a mapped class or aliased class, or some
+            # other object in the ORM realm of things like that, imitate
+            # the logic used in coercions.expect() to roll it down to the
+            # SQL element
+            element = cell_contents
+            is_clause_element = False
+            while hasattr(element, "__clause_element__"):
+                is_clause_element = True
+                if not getattr(element, "is_clause_element", False):
+                    element = element.__clause_element__()
+                else:
+                    break
+
+            if not is_clause_element:
+                insp = inspection.inspect(element, raiseerr=False)
+                if insp is not None:
+                    return self._cache_key_getter_closure_variable(
+                        fn, variable_name, idx, insp, use_inspect=True
+                    )
+            else:
+                return self._cache_key_getter_closure_variable(
+                    fn, variable_name, idx, element, use_clause_element=True
+                )
+
+            self._raise_for_uncacheable_closure_variable(variable_name, fn)
+
+        return get
+
+    def _raise_for_uncacheable_closure_variable(
+        self, variable_name, fn, from_=None
+    ):
+        raise exc.InvalidRequestError(
+            "Closure variable named '%s' inside of lambda callable %s "
+            "does not refer to a cacheable SQL element, and also does not "
+            "appear to be serving as a SQL literal bound value based on "
+            "the default "
+            "SQL expression returned by the function.   This variable "
+            "needs to remain outside the scope of a SQL-generating lambda "
+            "so that a proper cache key may be generated from the "
+            "lambda's state.  Evaluate this variable outside of the "
+            "lambda, set track_on=[<elements>] to explicitly select "
+            "closure elements to track, or set "
+            "track_closure_variables=False to exclude "
+            "closure variables from being part of the cache key."
+            % (variable_name, fn.__code__),
+        ) from from_
+
+    def _cache_key_getter_tracked_literal(self, fn, pytracker):
+        """Return a getter that will extend a cache key with new entries
+        from the ``__closure__`` collection of a particular lambda.
+
+        this getter differs from _cache_key_getter_closure_variable
+        in that these are detected after the function is run, and PyWrapper
+        objects have recorded that a particular literal value is in fact
+        not being interpreted as a bound parameter.
+
+        """
+
+        elem = pytracker._sa__to_evaluate
+        closure_index = pytracker._sa__closure_index
+        variable_name = pytracker._sa__name
+
+        return self._cache_key_getter_closure_variable(
+            fn, variable_name, closure_index, elem
+        )
+
+
+class NonAnalyzedFunction:
+    __slots__ = ("expr",)
+
+    closure_bindparams: Optional[List[BindParameter[Any]]] = None
+    bindparam_trackers: Optional[List[_BoundParameterGetter]] = None
+
+    is_sequence = False
+
+    expr: ClauseElement
+
+    def __init__(self, expr: ClauseElement):
+        self.expr = expr
+
+    @property
+    def expected_expr(self) -> ClauseElement:
+        return self.expr
+
+
+class AnalyzedFunction:
+    __slots__ = (
+        "analyzed_code",
+        "fn",
+        "closure_pywrappers",
+        "tracker_instrumented_fn",
+        "expr",
+        "bindparam_trackers",
+        "expected_expr",
+        "is_sequence",
+        "propagate_attrs",
+        "closure_bindparams",
+    )
+
+    closure_bindparams: Optional[List[BindParameter[Any]]]
+    expected_expr: Union[ClauseElement, List[ClauseElement]]
+    bindparam_trackers: Optional[List[_BoundParameterGetter]]
+
+    def __init__(
+        self,
+        analyzed_code,
+        lambda_element,
+        apply_propagate_attrs,
+        fn,
+    ):
+        self.analyzed_code = analyzed_code
+        self.fn = fn
+
+        self.bindparam_trackers = analyzed_code.bindparam_trackers
+
+        self._instrument_and_run_function(lambda_element)
+
+        self._coerce_expression(lambda_element, apply_propagate_attrs)
+
+    def _instrument_and_run_function(self, lambda_element):
+        analyzed_code = self.analyzed_code
+
+        fn = self.fn
+        self.closure_pywrappers = closure_pywrappers = []
+
+        build_py_wrappers = analyzed_code.build_py_wrappers
+
+        if not build_py_wrappers:
+            self.tracker_instrumented_fn = tracker_instrumented_fn = fn
+            self.expr = lambda_element._invoke_user_fn(tracker_instrumented_fn)
+        else:
+            track_closure_variables = analyzed_code.track_closure_variables
+            closure = fn.__closure__
+
+            # will form the __closure__ of the function when we rebuild it
+            if closure:
+                new_closure = {
+                    fv: cell.cell_contents
+                    for fv, cell in zip(fn.__code__.co_freevars, closure)
+                }
+            else:
+                new_closure = {}
+
+            # will form the __globals__ of the function when we rebuild it
+            new_globals = fn.__globals__.copy()
+
+            for name, closure_index in build_py_wrappers:
+                if closure_index is not None:
+                    value = closure[closure_index].cell_contents
+                    new_closure[name] = bind = PyWrapper(
+                        fn,
+                        name,
+                        value,
+                        closure_index=closure_index,
+                        track_bound_values=(
+                            self.analyzed_code.track_bound_values
+                        ),
+                    )
+                    if track_closure_variables:
+                        closure_pywrappers.append(bind)
+                else:
+                    value = fn.__globals__[name]
+                    new_globals[name] = bind = PyWrapper(fn, name, value)
+
+            # rewrite the original fn.   things that look like they will
+            # become bound parameters are wrapped in a PyWrapper.
+            self.tracker_instrumented_fn = tracker_instrumented_fn = (
+                self._rewrite_code_obj(
+                    fn,
+                    [new_closure[name] for name in fn.__code__.co_freevars],
+                    new_globals,
+                )
+            )
+
+            # now invoke the function.  This will give us a new SQL
+            # expression, but all the places that there would be a bound
+            # parameter, the PyWrapper in its place will give us a bind
+            # with a predictable name we can match up later.
+
+            # additionally, each PyWrapper will log that it did in fact
+            # create a parameter, otherwise, it's some kind of Python
+            # object in the closure and we want to track that, to make
+            # sure it doesn't change to something else, or if it does,
+            # that we create a different tracked function with that
+            # variable.
+            self.expr = lambda_element._invoke_user_fn(tracker_instrumented_fn)
+
+    def _coerce_expression(self, lambda_element, apply_propagate_attrs):
+        """Run the tracker-generated expression through coercion rules.
+
+        After the user-defined lambda has been invoked to produce a statement
+        for re-use, run it through coercion rules to both check that it's the
+        correct type of object and also to coerce it to its useful form.
+
+        """
+
+        parent_lambda = lambda_element.parent_lambda
+        expr = self.expr
+
+        if parent_lambda is None:
+            if isinstance(expr, collections_abc.Sequence):
+                self.expected_expr = [
+                    cast(
+                        "ClauseElement",
+                        coercions.expect(
+                            lambda_element.role,
+                            sub_expr,
+                            apply_propagate_attrs=apply_propagate_attrs,
+                        ),
+                    )
+                    for sub_expr in expr
+                ]
+                self.is_sequence = True
+            else:
+                self.expected_expr = cast(
+                    "ClauseElement",
+                    coercions.expect(
+                        lambda_element.role,
+                        expr,
+                        apply_propagate_attrs=apply_propagate_attrs,
+                    ),
+                )
+                self.is_sequence = False
+        else:
+            self.expected_expr = expr
+            self.is_sequence = False
+
+        if apply_propagate_attrs is not None:
+            self.propagate_attrs = apply_propagate_attrs._propagate_attrs
+        else:
+            self.propagate_attrs = util.EMPTY_DICT
+
+    def _rewrite_code_obj(self, f, cell_values, globals_):
+        """Return a copy of f, with a new closure and new globals
+
+        yes it works in pypy :P
+
+        """
+
+        argrange = range(len(cell_values))
+
+        code = "def make_cells():\n"
+        if cell_values:
+            code += "    (%s) = (%s)\n" % (
+                ", ".join("i%d" % i for i in argrange),
+                ", ".join("o%d" % i for i in argrange),
+            )
+        code += "    def closure():\n"
+        code += "        return %s\n" % ", ".join("i%d" % i for i in argrange)
+        code += "    return closure.__closure__"
+        vars_ = {"o%d" % i: cell_values[i] for i in argrange}
+        exec(code, vars_, vars_)
+        closure = vars_["make_cells"]()
+
+        func = type(f)(
+            f.__code__, globals_, f.__name__, f.__defaults__, closure
+        )
+        func.__annotations__ = f.__annotations__
+        func.__kwdefaults__ = f.__kwdefaults__
+        func.__doc__ = f.__doc__
+        func.__module__ = f.__module__
+
+        return func
+
+
+class PyWrapper(ColumnOperators):
+    """A wrapper object that is injected into the ``__globals__`` and
+    ``__closure__`` of a Python function.
+
+    When the function is instrumented with :class:`.PyWrapper` objects, it is
+    then invoked just once in order to set up the wrappers.  We look through
+    all the :class:`.PyWrapper` objects we made to find the ones that generated
+    a :class:`.BindParameter` object, e.g. the expression system interpreted
+    something as a literal.   Those positions in the globals/closure are then
+    ones that we will look at, each time a new lambda comes in that refers to
+    the same ``__code__`` object.   In this way, we keep a single version of
+    the SQL expression that this lambda produced, without calling upon the
+    Python function that created it more than once, unless its other closure
+    variables have changed.   The expression is then transformed to have the
+    new bound values embedded into it.
+
+    """
+
+    def __init__(
+        self,
+        fn,
+        name,
+        to_evaluate,
+        closure_index=None,
+        getter=None,
+        track_bound_values=True,
+    ):
+        self.fn = fn
+        self._name = name
+        self._to_evaluate = to_evaluate
+        self._param = None
+        self._has_param = False
+        self._bind_paths = {}
+        self._getter = getter
+        self._closure_index = closure_index
+        self.track_bound_values = track_bound_values
+
+    def __call__(self, *arg, **kw):
+        elem = object.__getattribute__(self, "_to_evaluate")
+        value = elem(*arg, **kw)
+        if (
+            self._sa_track_bound_values
+            and coercions._deep_is_literal(value)
+            and not isinstance(
+                # TODO: coverage where an ORM option or similar is here
+                value,
+                _cache_key.HasCacheKey,
+            )
+        ):
+            name = object.__getattribute__(self, "_name")
+            raise exc.InvalidRequestError(
+                "Can't invoke Python callable %s() inside of lambda "
+                "expression argument at %s; lambda SQL constructs should "
+                "not invoke functions from closure variables to produce "
+                "literal values since the "
+                "lambda SQL system normally extracts bound values without "
+                "actually "
+                "invoking the lambda or any functions within it.  Call the "
+                "function outside of the "
+                "lambda and assign to a local variable that is used in the "
+                "lambda as a closure variable, or set "
+                "track_bound_values=False if the return value of this "
+                "function is used in some other way other than a SQL bound "
+                "value." % (name, self._sa_fn.__code__)
+            )
+        else:
+            return value
+
+    def operate(self, op, *other, **kwargs):
+        elem = object.__getattribute__(self, "_py_wrapper_literal")()
+        return op(elem, *other, **kwargs)
+
+    def reverse_operate(self, op, other, **kwargs):
+        elem = object.__getattribute__(self, "_py_wrapper_literal")()
+        return op(other, elem, **kwargs)
+
+    def _extract_bound_parameters(self, starting_point, result_list):
+        param = object.__getattribute__(self, "_param")
+        if param is not None:
+            param = param._with_value(starting_point, maintain_key=True)
+            result_list.append(param)
+        for pywrapper in object.__getattribute__(self, "_bind_paths").values():
+            getter = object.__getattribute__(pywrapper, "_getter")
+            element = getter(starting_point)
+            pywrapper._sa__extract_bound_parameters(element, result_list)
+
+    def _py_wrapper_literal(self, expr=None, operator=None, **kw):
+        param = object.__getattribute__(self, "_param")
+        to_evaluate = object.__getattribute__(self, "_to_evaluate")
+        if param is None:
+            name = object.__getattribute__(self, "_name")
+            self._param = param = elements.BindParameter(
+                name,
+                required=False,
+                unique=True,
+                _compared_to_operator=operator,
+                _compared_to_type=expr.type if expr is not None else None,
+            )
+            self._has_param = True
+        return param._with_value(to_evaluate, maintain_key=True)
+
+    def __bool__(self):
+        to_evaluate = object.__getattribute__(self, "_to_evaluate")
+        return bool(to_evaluate)
+
+    def __getattribute__(self, key):
+        if key.startswith("_sa_"):
+            return object.__getattribute__(self, key[4:])
+        elif key in (
+            "__clause_element__",
+            "operate",
+            "reverse_operate",
+            "_py_wrapper_literal",
+            "__class__",
+            "__dict__",
+        ):
+            return object.__getattribute__(self, key)
+
+        if key.startswith("__"):
+            elem = object.__getattribute__(self, "_to_evaluate")
+            return getattr(elem, key)
+        else:
+            return self._sa__add_getter(key, operator.attrgetter)
+
+    def __iter__(self):
+        elem = object.__getattribute__(self, "_to_evaluate")
+        return iter(elem)
+
+    def __getitem__(self, key):
+        elem = object.__getattribute__(self, "_to_evaluate")
+        if not hasattr(elem, "__getitem__"):
+            raise AttributeError("__getitem__")
+
+        if isinstance(key, PyWrapper):
+            # TODO: coverage
+            raise exc.InvalidRequestError(
+                "Dictionary keys / list indexes inside of a cached "
+                "lambda must be Python literals only"
+            )
+        return self._sa__add_getter(key, operator.itemgetter)
+
+    def _add_getter(self, key, getter_fn):
+        bind_paths = object.__getattribute__(self, "_bind_paths")
+
+        bind_path_key = (key, getter_fn)
+        if bind_path_key in bind_paths:
+            return bind_paths[bind_path_key]
+
+        getter = getter_fn(key)
+        elem = object.__getattribute__(self, "_to_evaluate")
+        value = getter(elem)
+
+        rolled_down_value = AnalyzedCode._roll_down_to_literal(value)
+
+        if coercions._deep_is_literal(rolled_down_value):
+            wrapper = PyWrapper(self._sa_fn, key, value, getter=getter)
+            bind_paths[bind_path_key] = wrapper
+            return wrapper
+        else:
+            return value
+
+
+@inspection._inspects(LambdaElement)
+def insp(lmb):
+    return inspection.inspect(lmb._resolved)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/naming.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/naming.py
new file mode 100644
index 00000000..58203e4b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/naming.py
@@ -0,0 +1,212 @@
+# sql/naming.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Establish constraint and index naming conventions.
+
+
+"""
+
+from __future__ import annotations
+
+import re
+
+from . import events  # noqa
+from .base import _NONE_NAME
+from .elements import conv as conv
+from .schema import CheckConstraint
+from .schema import Column
+from .schema import Constraint
+from .schema import ForeignKeyConstraint
+from .schema import Index
+from .schema import PrimaryKeyConstraint
+from .schema import Table
+from .schema import UniqueConstraint
+from .. import event
+from .. import exc
+
+
+class ConventionDict:
+    def __init__(self, const, table, convention):
+        self.const = const
+        self._is_fk = isinstance(const, ForeignKeyConstraint)
+        self.table = table
+        self.convention = convention
+        self._const_name = const.name
+
+    def _key_table_name(self):
+        return self.table.name
+
+    def _column_X(self, idx, attrname):
+        if self._is_fk:
+            try:
+                fk = self.const.elements[idx]
+            except IndexError:
+                return ""
+            else:
+                return getattr(fk.parent, attrname)
+        else:
+            cols = list(self.const.columns)
+            try:
+                col = cols[idx]
+            except IndexError:
+                return ""
+            else:
+                return getattr(col, attrname)
+
+    def _key_constraint_name(self):
+        if self._const_name in (None, _NONE_NAME):
+            raise exc.InvalidRequestError(
+                "Naming convention including "
+                "%(constraint_name)s token requires that "
+                "constraint is explicitly named."
+            )
+        if not isinstance(self._const_name, conv):
+            self.const.name = None
+        return self._const_name
+
+    def _key_column_X_key(self, idx):
+        # note this method was missing before
+        # [ticket:3989], meaning tokens like ``%(column_0_key)s`` weren't
+        # working even though documented.
+        return self._column_X(idx, "key")
+
+    def _key_column_X_name(self, idx):
+        return self._column_X(idx, "name")
+
+    def _key_column_X_label(self, idx):
+        return self._column_X(idx, "_ddl_label")
+
+    def _key_referred_table_name(self):
+        fk = self.const.elements[0]
+        refs = fk.target_fullname.split(".")
+        if len(refs) == 3:
+            refschema, reftable, refcol = refs
+        else:
+            reftable, refcol = refs
+        return reftable
+
+    def _key_referred_column_X_name(self, idx):
+        fk = self.const.elements[idx]
+        # note that before [ticket:3989], this method was returning
+        # the specification for the :class:`.ForeignKey` itself, which normally
+        # would be using the ``.key`` of the column, not the name.
+        return fk.column.name
+
+    def __getitem__(self, key):
+        if key in self.convention:
+            return self.convention[key](self.const, self.table)
+        elif hasattr(self, "_key_%s" % key):
+            return getattr(self, "_key_%s" % key)()
+        else:
+            col_template = re.match(r".*_?column_(\d+)(_?N)?_.+", key)
+            if col_template:
+                idx = col_template.group(1)
+                multiples = col_template.group(2)
+
+                if multiples:
+                    if self._is_fk:
+                        elems = self.const.elements
+                    else:
+                        elems = list(self.const.columns)
+                    tokens = []
+                    for idx, elem in enumerate(elems):
+                        attr = "_key_" + key.replace("0" + multiples, "X")
+                        try:
+                            tokens.append(getattr(self, attr)(idx))
+                        except AttributeError:
+                            raise KeyError(key)
+                    sep = "_" if multiples.startswith("_") else ""
+                    return sep.join(tokens)
+                else:
+                    attr = "_key_" + key.replace(idx, "X")
+                    idx = int(idx)
+                    if hasattr(self, attr):
+                        return getattr(self, attr)(idx)
+        raise KeyError(key)
+
+
+_prefix_dict = {
+    Index: "ix",
+    PrimaryKeyConstraint: "pk",
+    CheckConstraint: "ck",
+    UniqueConstraint: "uq",
+    ForeignKeyConstraint: "fk",
+}
+
+
+def _get_convention(dict_, key):
+    for super_ in key.__mro__:
+        if super_ in _prefix_dict and _prefix_dict[super_] in dict_:
+            return dict_[_prefix_dict[super_]]
+        elif super_ in dict_:
+            return dict_[super_]
+    else:
+        return None
+
+
+def _constraint_name_for_table(const, table):
+    metadata = table.metadata
+    convention = _get_convention(metadata.naming_convention, type(const))
+
+    if isinstance(const.name, conv):
+        return const.name
+    elif (
+        convention is not None
+        and not isinstance(const.name, conv)
+        and (
+            const.name is None
+            or "constraint_name" in convention
+            or const.name is _NONE_NAME
+        )
+    ):
+        return conv(
+            convention
+            % ConventionDict(const, table, metadata.naming_convention)
+        )
+    elif convention is _NONE_NAME:
+        return None
+
+
+@event.listens_for(
+    PrimaryKeyConstraint, "_sa_event_column_added_to_pk_constraint"
+)
+def _column_added_to_pk_constraint(pk_constraint, col):
+    if pk_constraint._implicit_generated:
+        # only operate upon the "implicit" pk constraint for now,
+        # as we have to force the name to None to reset it.  the
+        # "implicit" constraint will only have a naming convention name
+        # if at all.
+        table = pk_constraint.table
+        pk_constraint.name = None
+        newname = _constraint_name_for_table(pk_constraint, table)
+        if newname:
+            pk_constraint.name = newname
+
+
+@event.listens_for(Constraint, "after_parent_attach")
+@event.listens_for(Index, "after_parent_attach")
+def _constraint_name(const, table):
+    if isinstance(table, Column):
+        # this path occurs for a CheckConstraint linked to a Column
+
+        # for column-attached constraint, set another event
+        # to link the column attached to the table as this constraint
+        # associated with the table.
+        event.listen(
+            table,
+            "after_parent_attach",
+            lambda col, table: _constraint_name(const, table),
+        )
+
+    elif isinstance(table, Table):
+        if isinstance(const.name, conv) or const.name is _NONE_NAME:
+            return
+
+        newname = _constraint_name_for_table(const, table)
+        if newname:
+            const.name = newname
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/operators.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/operators.py
new file mode 100644
index 00000000..d5f876cb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/operators.py
@@ -0,0 +1,2623 @@
+# sql/operators.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Defines operators used in SQL expressions."""
+
+from __future__ import annotations
+
+from enum import IntEnum
+from operator import add as _uncast_add
+from operator import and_ as _uncast_and_
+from operator import contains as _uncast_contains
+from operator import eq as _uncast_eq
+from operator import floordiv as _uncast_floordiv
+from operator import ge as _uncast_ge
+from operator import getitem as _uncast_getitem
+from operator import gt as _uncast_gt
+from operator import inv as _uncast_inv
+from operator import le as _uncast_le
+from operator import lshift as _uncast_lshift
+from operator import lt as _uncast_lt
+from operator import mod as _uncast_mod
+from operator import mul as _uncast_mul
+from operator import ne as _uncast_ne
+from operator import neg as _uncast_neg
+from operator import or_ as _uncast_or_
+from operator import rshift as _uncast_rshift
+from operator import sub as _uncast_sub
+from operator import truediv as _uncast_truediv
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import Optional
+from typing import overload
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .. import exc
+from .. import util
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if typing.TYPE_CHECKING:
+    from ._typing import ColumnExpressionArgument
+    from .cache_key import CacheConst
+    from .elements import ColumnElement
+    from .type_api import TypeEngine
+
+_T = TypeVar("_T", bound=Any)
+_FN = TypeVar("_FN", bound=Callable[..., Any])
+
+
+class OperatorType(Protocol):
+    """describe an op() function."""
+
+    __slots__ = ()
+
+    __name__: str
+
+    @overload
+    def __call__(
+        self,
+        left: ColumnExpressionArgument[Any],
+        right: Optional[Any] = None,
+        *other: Any,
+        **kwargs: Any,
+    ) -> ColumnElement[Any]: ...
+
+    @overload
+    def __call__(
+        self,
+        left: Operators,
+        right: Optional[Any] = None,
+        *other: Any,
+        **kwargs: Any,
+    ) -> Operators: ...
+
+    def __call__(
+        self,
+        left: Any,
+        right: Optional[Any] = None,
+        *other: Any,
+        **kwargs: Any,
+    ) -> Operators: ...
+
+
+add = cast(OperatorType, _uncast_add)
+and_ = cast(OperatorType, _uncast_and_)
+contains = cast(OperatorType, _uncast_contains)
+eq = cast(OperatorType, _uncast_eq)
+floordiv = cast(OperatorType, _uncast_floordiv)
+ge = cast(OperatorType, _uncast_ge)
+getitem = cast(OperatorType, _uncast_getitem)
+gt = cast(OperatorType, _uncast_gt)
+inv = cast(OperatorType, _uncast_inv)
+le = cast(OperatorType, _uncast_le)
+lshift = cast(OperatorType, _uncast_lshift)
+lt = cast(OperatorType, _uncast_lt)
+mod = cast(OperatorType, _uncast_mod)
+mul = cast(OperatorType, _uncast_mul)
+ne = cast(OperatorType, _uncast_ne)
+neg = cast(OperatorType, _uncast_neg)
+or_ = cast(OperatorType, _uncast_or_)
+rshift = cast(OperatorType, _uncast_rshift)
+sub = cast(OperatorType, _uncast_sub)
+truediv = cast(OperatorType, _uncast_truediv)
+
+
+class Operators:
+    """Base of comparison and logical operators.
+
+    Implements base methods
+    :meth:`~sqlalchemy.sql.operators.Operators.operate` and
+    :meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as
+    :meth:`~sqlalchemy.sql.operators.Operators.__and__`,
+    :meth:`~sqlalchemy.sql.operators.Operators.__or__`,
+    :meth:`~sqlalchemy.sql.operators.Operators.__invert__`.
+
+    Usually is used via its most common subclass
+    :class:`.ColumnOperators`.
+
+    """
+
+    __slots__ = ()
+
+    def __and__(self, other: Any) -> Operators:
+        """Implement the ``&`` operator.
+
+        When used with SQL expressions, results in an
+        AND operation, equivalent to
+        :func:`_expression.and_`, that is::
+
+            a & b
+
+        is equivalent to::
+
+            from sqlalchemy import and_
+
+            and_(a, b)
+
+        Care should be taken when using ``&`` regarding
+        operator precedence; the ``&`` operator has the highest precedence.
+        The operands should be enclosed in parenthesis if they contain
+        further sub expressions::
+
+            (a == 2) & (b == 4)
+
+        """
+        return self.operate(and_, other)
+
+    def __or__(self, other: Any) -> Operators:
+        """Implement the ``|`` operator.
+
+        When used with SQL expressions, results in an
+        OR operation, equivalent to
+        :func:`_expression.or_`, that is::
+
+            a | b
+
+        is equivalent to::
+
+            from sqlalchemy import or_
+
+            or_(a, b)
+
+        Care should be taken when using ``|`` regarding
+        operator precedence; the ``|`` operator has the highest precedence.
+        The operands should be enclosed in parenthesis if they contain
+        further sub expressions::
+
+            (a == 2) | (b == 4)
+
+        """
+        return self.operate(or_, other)
+
+    def __invert__(self) -> Operators:
+        """Implement the ``~`` operator.
+
+        When used with SQL expressions, results in a
+        NOT operation, equivalent to
+        :func:`_expression.not_`, that is::
+
+            ~a
+
+        is equivalent to::
+
+            from sqlalchemy import not_
+
+            not_(a)
+
+        """
+        return self.operate(inv)
+
+    def op(
+        self,
+        opstring: str,
+        precedence: int = 0,
+        is_comparison: bool = False,
+        return_type: Optional[
+            Union[Type[TypeEngine[Any]], TypeEngine[Any]]
+        ] = None,
+        python_impl: Optional[Callable[..., Any]] = None,
+    ) -> Callable[[Any], Operators]:
+        """Produce a generic operator function.
+
+        e.g.::
+
+          somecolumn.op("*")(5)
+
+        produces::
+
+          somecolumn * 5
+
+        This function can also be used to make bitwise operators explicit. For
+        example::
+
+          somecolumn.op("&")(0xFF)
+
+        is a bitwise AND of the value in ``somecolumn``.
+
+        :param opstring: a string which will be output as the infix operator
+          between this element and the expression passed to the
+          generated function.
+
+        :param precedence: precedence which the database is expected to apply
+         to the operator in SQL expressions. This integer value acts as a hint
+         for the SQL compiler to know when explicit parenthesis should be
+         rendered around a particular operation. A lower number will cause the
+         expression to be parenthesized when applied against another operator
+         with higher precedence. The default value of ``0`` is lower than all
+         operators except for the comma (``,``) and ``AS`` operators. A value
+         of 100 will be higher or equal to all operators, and -100 will be
+         lower than or equal to all operators.
+
+         .. seealso::
+
+            :ref:`faq_sql_expression_op_parenthesis` - detailed description
+            of how the SQLAlchemy SQL compiler renders parenthesis
+
+        :param is_comparison: legacy; if True, the operator will be considered
+         as a "comparison" operator, that is which evaluates to a boolean
+         true/false value, like ``==``, ``>``, etc.  This flag is provided
+         so that ORM relationships can establish that the operator is a
+         comparison operator when used in a custom join condition.
+
+         Using the ``is_comparison`` parameter is superseded by using the
+         :meth:`.Operators.bool_op` method instead;  this more succinct
+         operator sets this parameter automatically, but also provides
+         correct :pep:`484` typing support as the returned object will
+         express a "boolean" datatype, i.e. ``BinaryExpression[bool]``.
+
+        :param return_type: a :class:`.TypeEngine` class or object that will
+          force the return type of an expression produced by this operator
+          to be of that type.   By default, operators that specify
+          :paramref:`.Operators.op.is_comparison` will resolve to
+          :class:`.Boolean`, and those that do not will be of the same
+          type as the left-hand operand.
+
+        :param python_impl: an optional Python function that can evaluate
+         two Python values in the same way as this operator works when
+         run on the database server.  Useful for in-Python SQL expression
+         evaluation functions, such as for ORM hybrid attributes, and the
+         ORM "evaluator" used to match objects in a session after a multi-row
+         update or delete.
+
+         e.g.::
+
+            >>> expr = column("x").op("+", python_impl=lambda a, b: a + b)("y")
+
+         The operator for the above expression will also work for non-SQL
+         left and right objects::
+
+            >>> expr.operator(5, 10)
+            15
+
+         .. versionadded:: 2.0
+
+
+        .. seealso::
+
+            :meth:`.Operators.bool_op`
+
+            :ref:`types_operators`
+
+            :ref:`relationship_custom_operator`
+
+        """
+        operator = custom_op(
+            opstring,
+            precedence,
+            is_comparison,
+            return_type,
+            python_impl=python_impl,
+        )
+
+        def against(other: Any) -> Operators:
+            return operator(self, other)
+
+        return against
+
+    def bool_op(
+        self,
+        opstring: str,
+        precedence: int = 0,
+        python_impl: Optional[Callable[..., Any]] = None,
+    ) -> Callable[[Any], Operators]:
+        """Return a custom boolean operator.
+
+        This method is shorthand for calling
+        :meth:`.Operators.op` and passing the
+        :paramref:`.Operators.op.is_comparison`
+        flag with True.    A key advantage to using :meth:`.Operators.bool_op`
+        is that when using column constructs, the "boolean" nature of the
+        returned expression will be present for :pep:`484` purposes.
+
+        .. seealso::
+
+            :meth:`.Operators.op`
+
+        """
+        return self.op(
+            opstring,
+            precedence=precedence,
+            is_comparison=True,
+            python_impl=python_impl,
+        )
+
+    def operate(
+        self, op: OperatorType, *other: Any, **kwargs: Any
+    ) -> Operators:
+        r"""Operate on an argument.
+
+        This is the lowest level of operation, raises
+        :class:`NotImplementedError` by default.
+
+        Overriding this on a subclass can allow common
+        behavior to be applied to all operations.
+        For example, overriding :class:`.ColumnOperators`
+        to apply ``func.lower()`` to the left and right
+        side::
+
+            class MyComparator(ColumnOperators):
+                def operate(self, op, other, **kwargs):
+                    return op(func.lower(self), func.lower(other), **kwargs)
+
+        :param op:  Operator callable.
+        :param \*other: the 'other' side of the operation. Will
+         be a single scalar for most operations.
+        :param \**kwargs: modifiers.  These may be passed by special
+         operators such as :meth:`ColumnOperators.contains`.
+
+
+        """
+        raise NotImplementedError(str(op))
+
+    __sa_operate__ = operate
+
+    def reverse_operate(
+        self, op: OperatorType, other: Any, **kwargs: Any
+    ) -> Operators:
+        """Reverse operate on an argument.
+
+        Usage is the same as :meth:`operate`.
+
+        """
+        raise NotImplementedError(str(op))
+
+
+class custom_op(OperatorType, Generic[_T]):
+    """Represent a 'custom' operator.
+
+    :class:`.custom_op` is normally instantiated when the
+    :meth:`.Operators.op` or :meth:`.Operators.bool_op` methods
+    are used to create a custom operator callable.  The class can also be
+    used directly when programmatically constructing expressions.   E.g.
+    to represent the "factorial" operation::
+
+        from sqlalchemy.sql import UnaryExpression
+        from sqlalchemy.sql import operators
+        from sqlalchemy import Numeric
+
+        unary = UnaryExpression(
+            table.c.somecolumn, modifier=operators.custom_op("!"), type_=Numeric
+        )
+
+    .. seealso::
+
+        :meth:`.Operators.op`
+
+        :meth:`.Operators.bool_op`
+
+    """  # noqa: E501
+
+    __name__ = "custom_op"
+
+    __slots__ = (
+        "opstring",
+        "precedence",
+        "is_comparison",
+        "natural_self_precedent",
+        "eager_grouping",
+        "return_type",
+        "python_impl",
+    )
+
+    def __init__(
+        self,
+        opstring: str,
+        precedence: int = 0,
+        is_comparison: bool = False,
+        return_type: Optional[
+            Union[Type[TypeEngine[_T]], TypeEngine[_T]]
+        ] = None,
+        natural_self_precedent: bool = False,
+        eager_grouping: bool = False,
+        python_impl: Optional[Callable[..., Any]] = None,
+    ):
+        self.opstring = opstring
+        self.precedence = precedence
+        self.is_comparison = is_comparison
+        self.natural_self_precedent = natural_self_precedent
+        self.eager_grouping = eager_grouping
+        self.return_type = (
+            return_type._to_instance(return_type) if return_type else None
+        )
+        self.python_impl = python_impl
+
+    def __eq__(self, other: Any) -> bool:
+        return (
+            isinstance(other, custom_op)
+            and other._hash_key() == self._hash_key()
+        )
+
+    def __hash__(self) -> int:
+        return hash(self._hash_key())
+
+    def _hash_key(self) -> Union[CacheConst, Tuple[Any, ...]]:
+        return (
+            self.__class__,
+            self.opstring,
+            self.precedence,
+            self.is_comparison,
+            self.natural_self_precedent,
+            self.eager_grouping,
+            self.return_type._static_cache_key if self.return_type else None,
+        )
+
+    @overload
+    def __call__(
+        self,
+        left: ColumnExpressionArgument[Any],
+        right: Optional[Any] = None,
+        *other: Any,
+        **kwargs: Any,
+    ) -> ColumnElement[Any]: ...
+
+    @overload
+    def __call__(
+        self,
+        left: Operators,
+        right: Optional[Any] = None,
+        *other: Any,
+        **kwargs: Any,
+    ) -> Operators: ...
+
+    def __call__(
+        self,
+        left: Any,
+        right: Optional[Any] = None,
+        *other: Any,
+        **kwargs: Any,
+    ) -> Operators:
+        if hasattr(left, "__sa_operate__"):
+            return left.operate(self, right, *other, **kwargs)  # type: ignore
+        elif self.python_impl:
+            return self.python_impl(left, right, *other, **kwargs)  # type: ignore  # noqa: E501
+        else:
+            raise exc.InvalidRequestError(
+                f"Custom operator {self.opstring!r} can't be used with "
+                "plain Python objects unless it includes the "
+                "'python_impl' parameter."
+            )
+
+
+class ColumnOperators(Operators):
+    """Defines boolean, comparison, and other operators for
+    :class:`_expression.ColumnElement` expressions.
+
+    By default, all methods call down to
+    :meth:`.operate` or :meth:`.reverse_operate`,
+    passing in the appropriate operator function from the
+    Python builtin ``operator`` module or
+    a SQLAlchemy-specific operator function from
+    :mod:`sqlalchemy.expression.operators`.   For example
+    the ``__eq__`` function::
+
+        def __eq__(self, other):
+            return self.operate(operators.eq, other)
+
+    Where ``operators.eq`` is essentially::
+
+        def eq(a, b):
+            return a == b
+
+    The core column expression unit :class:`_expression.ColumnElement`
+    overrides :meth:`.Operators.operate` and others
+    to return further :class:`_expression.ColumnElement` constructs,
+    so that the ``==`` operation above is replaced by a clause
+    construct.
+
+    .. seealso::
+
+        :ref:`types_operators`
+
+        :attr:`.TypeEngine.comparator_factory`
+
+        :class:`.ColumnOperators`
+
+        :class:`.PropComparator`
+
+    """
+
+    __slots__ = ()
+
+    timetuple: Literal[None] = None
+    """Hack, allows datetime objects to be compared on the LHS."""
+
+    if typing.TYPE_CHECKING:
+
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnOperators: ...
+
+        def reverse_operate(
+            self, op: OperatorType, other: Any, **kwargs: Any
+        ) -> ColumnOperators: ...
+
+    def __lt__(self, other: Any) -> ColumnOperators:
+        """Implement the ``<`` operator.
+
+        In a column context, produces the clause ``a < b``.
+
+        """
+        return self.operate(lt, other)
+
+    def __le__(self, other: Any) -> ColumnOperators:
+        """Implement the ``<=`` operator.
+
+        In a column context, produces the clause ``a <= b``.
+
+        """
+        return self.operate(le, other)
+
+    # ColumnOperators defines an __eq__ so it must explicitly declare also
+    # an hash or it's set to None by python:
+    # https://docs.python.org/3/reference/datamodel.html#object.__hash__
+    if TYPE_CHECKING:
+
+        def __hash__(self) -> int: ...
+
+    else:
+        __hash__ = Operators.__hash__
+
+    def __eq__(self, other: Any) -> ColumnOperators:  # type: ignore[override]
+        """Implement the ``==`` operator.
+
+        In a column context, produces the clause ``a = b``.
+        If the target is ``None``, produces ``a IS NULL``.
+
+        """
+        return self.operate(eq, other)
+
+    def __ne__(self, other: Any) -> ColumnOperators:  # type: ignore[override]
+        """Implement the ``!=`` operator.
+
+        In a column context, produces the clause ``a != b``.
+        If the target is ``None``, produces ``a IS NOT NULL``.
+
+        """
+        return self.operate(ne, other)
+
+    def is_distinct_from(self, other: Any) -> ColumnOperators:
+        """Implement the ``IS DISTINCT FROM`` operator.
+
+        Renders "a IS DISTINCT FROM b" on most platforms;
+        on some such as SQLite may render "a IS NOT b".
+
+        """
+        return self.operate(is_distinct_from, other)
+
+    def is_not_distinct_from(self, other: Any) -> ColumnOperators:
+        """Implement the ``IS NOT DISTINCT FROM`` operator.
+
+        Renders "a IS NOT DISTINCT FROM b" on most platforms;
+        on some such as SQLite may render "a IS b".
+
+        .. versionchanged:: 1.4 The ``is_not_distinct_from()`` operator is
+           renamed from ``isnot_distinct_from()`` in previous releases.
+           The previous name remains available for backwards compatibility.
+
+        """
+        return self.operate(is_not_distinct_from, other)
+
+    # deprecated 1.4; see #5435
+    if TYPE_CHECKING:
+
+        def isnot_distinct_from(self, other: Any) -> ColumnOperators: ...
+
+    else:
+        isnot_distinct_from = is_not_distinct_from
+
+    def __gt__(self, other: Any) -> ColumnOperators:
+        """Implement the ``>`` operator.
+
+        In a column context, produces the clause ``a > b``.
+
+        """
+        return self.operate(gt, other)
+
+    def __ge__(self, other: Any) -> ColumnOperators:
+        """Implement the ``>=`` operator.
+
+        In a column context, produces the clause ``a >= b``.
+
+        """
+        return self.operate(ge, other)
+
+    def __neg__(self) -> ColumnOperators:
+        """Implement the ``-`` operator.
+
+        In a column context, produces the clause ``-a``.
+
+        """
+        return self.operate(neg)
+
+    def __contains__(self, other: Any) -> ColumnOperators:
+        return self.operate(contains, other)
+
+    def __getitem__(self, index: Any) -> ColumnOperators:
+        """Implement the [] operator.
+
+        This can be used by some database-specific types
+        such as PostgreSQL ARRAY and HSTORE.
+
+        """
+        return self.operate(getitem, index)
+
+    def __lshift__(self, other: Any) -> ColumnOperators:
+        """implement the << operator.
+
+        Not used by SQLAlchemy core, this is provided
+        for custom operator systems which want to use
+        << as an extension point.
+        """
+        return self.operate(lshift, other)
+
+    def __rshift__(self, other: Any) -> ColumnOperators:
+        """implement the >> operator.
+
+        Not used by SQLAlchemy core, this is provided
+        for custom operator systems which want to use
+        >> as an extension point.
+        """
+        return self.operate(rshift, other)
+
+    def concat(self, other: Any) -> ColumnOperators:
+        """Implement the 'concat' operator.
+
+        In a column context, produces the clause ``a || b``,
+        or uses the ``concat()`` operator on MySQL.
+
+        """
+        return self.operate(concat_op, other)
+
+    def _rconcat(self, other: Any) -> ColumnOperators:
+        """Implement an 'rconcat' operator.
+
+        this is for internal use at the moment
+
+        .. versionadded:: 1.4.40
+
+        """
+        return self.reverse_operate(concat_op, other)
+
+    def like(
+        self, other: Any, escape: Optional[str] = None
+    ) -> ColumnOperators:
+        r"""Implement the ``like`` operator.
+
+        In a column context, produces the expression:
+
+        .. sourcecode:: sql
+
+            a LIKE other
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.like("%foobar%"))
+
+        :param other: expression to be compared
+        :param escape: optional escape character, renders the ``ESCAPE``
+          keyword, e.g.::
+
+            somecolumn.like("foo/%bar", escape="/")
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.ilike`
+
+        """
+        return self.operate(like_op, other, escape=escape)
+
+    def ilike(
+        self, other: Any, escape: Optional[str] = None
+    ) -> ColumnOperators:
+        r"""Implement the ``ilike`` operator, e.g. case insensitive LIKE.
+
+        In a column context, produces an expression either of the form:
+
+        .. sourcecode:: sql
+
+            lower(a) LIKE lower(other)
+
+        Or on backends that support the ILIKE operator:
+
+        .. sourcecode:: sql
+
+            a ILIKE other
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.ilike("%foobar%"))
+
+        :param other: expression to be compared
+        :param escape: optional escape character, renders the ``ESCAPE``
+          keyword, e.g.::
+
+            somecolumn.ilike("foo/%bar", escape="/")
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.like`
+
+        """  # noqa: E501
+        return self.operate(ilike_op, other, escape=escape)
+
+    def bitwise_xor(self, other: Any) -> ColumnOperators:
+        """Produce a bitwise XOR operation, typically via the ``^``
+        operator, or ``#`` for PostgreSQL.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`operators_bitwise`
+
+        """
+
+        return self.operate(bitwise_xor_op, other)
+
+    def bitwise_or(self, other: Any) -> ColumnOperators:
+        """Produce a bitwise OR operation, typically via the ``|``
+        operator.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`operators_bitwise`
+
+        """
+
+        return self.operate(bitwise_or_op, other)
+
+    def bitwise_and(self, other: Any) -> ColumnOperators:
+        """Produce a bitwise AND operation, typically via the ``&``
+        operator.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`operators_bitwise`
+
+        """
+
+        return self.operate(bitwise_and_op, other)
+
+    def bitwise_not(self) -> ColumnOperators:
+        """Produce a bitwise NOT operation, typically via the ``~``
+        operator.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`operators_bitwise`
+
+        """
+
+        return self.operate(bitwise_not_op)
+
+    def bitwise_lshift(self, other: Any) -> ColumnOperators:
+        """Produce a bitwise LSHIFT operation, typically via the ``<<``
+        operator.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`operators_bitwise`
+
+        """
+
+        return self.operate(bitwise_lshift_op, other)
+
+    def bitwise_rshift(self, other: Any) -> ColumnOperators:
+        """Produce a bitwise RSHIFT operation, typically via the ``>>``
+        operator.
+
+        .. versionadded:: 2.0.2
+
+        .. seealso::
+
+            :ref:`operators_bitwise`
+
+        """
+
+        return self.operate(bitwise_rshift_op, other)
+
+    def in_(self, other: Any) -> ColumnOperators:
+        """Implement the ``in`` operator.
+
+        In a column context, produces the clause ``column IN <other>``.
+
+        The given parameter ``other`` may be:
+
+        * A list of literal values,
+          e.g.::
+
+            stmt.where(column.in_([1, 2, 3]))
+
+          In this calling form, the list of items is converted to a set of
+          bound parameters the same length as the list given:
+
+          .. sourcecode:: sql
+
+            WHERE COL IN (?, ?, ?)
+
+        * A list of tuples may be provided if the comparison is against a
+          :func:`.tuple_` containing multiple expressions::
+
+            from sqlalchemy import tuple_
+
+            stmt.where(tuple_(col1, col2).in_([(1, 10), (2, 20), (3, 30)]))
+
+        * An empty list,
+          e.g.::
+
+            stmt.where(column.in_([]))
+
+          In this calling form, the expression renders an "empty set"
+          expression.  These expressions are tailored to individual backends
+          and are generally trying to get an empty SELECT statement as a
+          subquery.  Such as on SQLite, the expression is:
+
+          .. sourcecode:: sql
+
+            WHERE col IN (SELECT 1 FROM (SELECT 1) WHERE 1!=1)
+
+          .. versionchanged:: 1.4  empty IN expressions now use an
+             execution-time generated SELECT subquery in all cases.
+
+        * A bound parameter, e.g. :func:`.bindparam`, may be used if it
+          includes the :paramref:`.bindparam.expanding` flag::
+
+            stmt.where(column.in_(bindparam("value", expanding=True)))
+
+          In this calling form, the expression renders a special non-SQL
+          placeholder expression that looks like:
+
+          .. sourcecode:: sql
+
+            WHERE COL IN ([EXPANDING_value])
+
+          This placeholder expression is intercepted at statement execution
+          time to be converted into the variable number of bound parameter
+          form illustrated earlier.   If the statement were executed as::
+
+            connection.execute(stmt, {"value": [1, 2, 3]})
+
+          The database would be passed a bound parameter for each value:
+
+          .. sourcecode:: sql
+
+            WHERE COL IN (?, ?, ?)
+
+          .. versionadded:: 1.2 added "expanding" bound parameters
+
+          If an empty list is passed, a special "empty list" expression,
+          which is specific to the database in use, is rendered.  On
+          SQLite this would be:
+
+          .. sourcecode:: sql
+
+            WHERE COL IN (SELECT 1 FROM (SELECT 1) WHERE 1!=1)
+
+          .. versionadded:: 1.3 "expanding" bound parameters now support
+             empty lists
+
+        * a :func:`_expression.select` construct, which is usually a
+          correlated scalar select::
+
+            stmt.where(
+                column.in_(select(othertable.c.y).where(table.c.x == othertable.c.x))
+            )
+
+          In this calling form, :meth:`.ColumnOperators.in_` renders as given:
+
+          .. sourcecode:: sql
+
+            WHERE COL IN (SELECT othertable.y
+            FROM othertable WHERE othertable.x = table.x)
+
+        :param other: a list of literals, a :func:`_expression.select`
+         construct, or a :func:`.bindparam` construct that includes the
+         :paramref:`.bindparam.expanding` flag set to True.
+
+        """  # noqa: E501
+        return self.operate(in_op, other)
+
+    def not_in(self, other: Any) -> ColumnOperators:
+        """implement the ``NOT IN`` operator.
+
+        This is equivalent to using negation with
+        :meth:`.ColumnOperators.in_`, i.e. ``~x.in_(y)``.
+
+        In the case that ``other`` is an empty sequence, the compiler
+        produces an "empty not in" expression.   This defaults to the
+        expression "1 = 1" to produce true in all cases.  The
+        :paramref:`_sa.create_engine.empty_in_strategy` may be used to
+        alter this behavior.
+
+        .. versionchanged:: 1.4 The ``not_in()`` operator is renamed from
+           ``notin_()`` in previous releases.  The previous name remains
+           available for backwards compatibility.
+
+        .. versionchanged:: 1.2  The :meth:`.ColumnOperators.in_` and
+           :meth:`.ColumnOperators.not_in` operators
+           now produce a "static" expression for an empty IN sequence
+           by default.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.in_`
+
+        """
+        return self.operate(not_in_op, other)
+
+    # deprecated 1.4; see #5429
+    if TYPE_CHECKING:
+
+        def notin_(self, other: Any) -> ColumnOperators: ...
+
+    else:
+        notin_ = not_in
+
+    def not_like(
+        self, other: Any, escape: Optional[str] = None
+    ) -> ColumnOperators:
+        """implement the ``NOT LIKE`` operator.
+
+        This is equivalent to using negation with
+        :meth:`.ColumnOperators.like`, i.e. ``~x.like(y)``.
+
+        .. versionchanged:: 1.4 The ``not_like()`` operator is renamed from
+           ``notlike()`` in previous releases.  The previous name remains
+           available for backwards compatibility.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.like`
+
+        """
+        return self.operate(not_like_op, other, escape=escape)
+
+    # deprecated 1.4; see #5435
+    if TYPE_CHECKING:
+
+        def notlike(
+            self, other: Any, escape: Optional[str] = None
+        ) -> ColumnOperators: ...
+
+    else:
+        notlike = not_like
+
+    def not_ilike(
+        self, other: Any, escape: Optional[str] = None
+    ) -> ColumnOperators:
+        """implement the ``NOT ILIKE`` operator.
+
+        This is equivalent to using negation with
+        :meth:`.ColumnOperators.ilike`, i.e. ``~x.ilike(y)``.
+
+        .. versionchanged:: 1.4 The ``not_ilike()`` operator is renamed from
+           ``notilike()`` in previous releases.  The previous name remains
+           available for backwards compatibility.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.ilike`
+
+        """
+        return self.operate(not_ilike_op, other, escape=escape)
+
+    # deprecated 1.4; see #5435
+    if TYPE_CHECKING:
+
+        def notilike(
+            self, other: Any, escape: Optional[str] = None
+        ) -> ColumnOperators: ...
+
+    else:
+        notilike = not_ilike
+
+    def is_(self, other: Any) -> ColumnOperators:
+        """Implement the ``IS`` operator.
+
+        Normally, ``IS`` is generated automatically when comparing to a
+        value of ``None``, which resolves to ``NULL``.  However, explicit
+        usage of ``IS`` may be desirable if comparing to boolean values
+        on certain platforms.
+
+        .. seealso:: :meth:`.ColumnOperators.is_not`
+
+        """
+        return self.operate(is_, other)
+
+    def is_not(self, other: Any) -> ColumnOperators:
+        """Implement the ``IS NOT`` operator.
+
+        Normally, ``IS NOT`` is generated automatically when comparing to a
+        value of ``None``, which resolves to ``NULL``.  However, explicit
+        usage of ``IS NOT`` may be desirable if comparing to boolean values
+        on certain platforms.
+
+        .. versionchanged:: 1.4 The ``is_not()`` operator is renamed from
+           ``isnot()`` in previous releases.  The previous name remains
+           available for backwards compatibility.
+
+        .. seealso:: :meth:`.ColumnOperators.is_`
+
+        """
+        return self.operate(is_not, other)
+
+    # deprecated 1.4; see #5429
+    if TYPE_CHECKING:
+
+        def isnot(self, other: Any) -> ColumnOperators: ...
+
+    else:
+        isnot = is_not
+
+    def startswith(
+        self,
+        other: Any,
+        escape: Optional[str] = None,
+        autoescape: bool = False,
+    ) -> ColumnOperators:
+        r"""Implement the ``startswith`` operator.
+
+        Produces a LIKE expression that tests against a match for the start
+        of a string value:
+
+        .. sourcecode:: sql
+
+            column LIKE <other> || '%'
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.startswith("foobar"))
+
+        Since the operator uses ``LIKE``, wildcard characters
+        ``"%"`` and ``"_"`` that are present inside the <other> expression
+        will behave like wildcards as well.   For literal string
+        values, the :paramref:`.ColumnOperators.startswith.autoescape` flag
+        may be set to ``True`` to apply escaping to occurrences of these
+        characters within the string value so that they match as themselves
+        and not as wildcard characters.  Alternatively, the
+        :paramref:`.ColumnOperators.startswith.escape` parameter will establish
+        a given character as an escape character which can be of use when
+        the target expression is not a literal string.
+
+        :param other: expression to be compared.   This is usually a plain
+          string value, but can also be an arbitrary SQL expression.  LIKE
+          wildcard characters ``%`` and ``_`` are not escaped by default unless
+          the :paramref:`.ColumnOperators.startswith.autoescape` flag is
+          set to True.
+
+        :param autoescape: boolean; when True, establishes an escape character
+          within the LIKE expression, then applies it to all occurrences of
+          ``"%"``, ``"_"`` and the escape character itself within the
+          comparison value, which is assumed to be a literal string and not a
+          SQL expression.
+
+          An expression such as::
+
+            somecolumn.startswith("foo%bar", autoescape=True)
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            somecolumn LIKE :param || '%' ESCAPE '/'
+
+          With the value of ``:param`` as ``"foo/%bar"``.
+
+        :param escape: a character which when given will render with the
+          ``ESCAPE`` keyword to establish that character as the escape
+          character.  This character can then be placed preceding occurrences
+          of ``%`` and ``_`` to allow them to act as themselves and not
+          wildcard characters.
+
+          An expression such as::
+
+            somecolumn.startswith("foo/%bar", escape="^")
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            somecolumn LIKE :param || '%' ESCAPE '^'
+
+          The parameter may also be combined with
+          :paramref:`.ColumnOperators.startswith.autoescape`::
+
+            somecolumn.startswith("foo%bar^bat", escape="^", autoescape=True)
+
+          Where above, the given literal parameter will be converted to
+          ``"foo^%bar^^bat"`` before being passed to the database.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.endswith`
+
+            :meth:`.ColumnOperators.contains`
+
+            :meth:`.ColumnOperators.like`
+
+        """  # noqa: E501
+        return self.operate(
+            startswith_op, other, escape=escape, autoescape=autoescape
+        )
+
+    def istartswith(
+        self,
+        other: Any,
+        escape: Optional[str] = None,
+        autoescape: bool = False,
+    ) -> ColumnOperators:
+        r"""Implement the ``istartswith`` operator, e.g. case insensitive
+        version of :meth:`.ColumnOperators.startswith`.
+
+        Produces a LIKE expression that tests against an insensitive
+        match for the start of a string value:
+
+        .. sourcecode:: sql
+
+            lower(column) LIKE lower(<other>) || '%'
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.istartswith("foobar"))
+
+        Since the operator uses ``LIKE``, wildcard characters
+        ``"%"`` and ``"_"`` that are present inside the <other> expression
+        will behave like wildcards as well.   For literal string
+        values, the :paramref:`.ColumnOperators.istartswith.autoescape` flag
+        may be set to ``True`` to apply escaping to occurrences of these
+        characters within the string value so that they match as themselves
+        and not as wildcard characters.  Alternatively, the
+        :paramref:`.ColumnOperators.istartswith.escape` parameter will
+        establish a given character as an escape character which can be of
+        use when the target expression is not a literal string.
+
+        :param other: expression to be compared.   This is usually a plain
+          string value, but can also be an arbitrary SQL expression.  LIKE
+          wildcard characters ``%`` and ``_`` are not escaped by default unless
+          the :paramref:`.ColumnOperators.istartswith.autoescape` flag is
+          set to True.
+
+        :param autoescape: boolean; when True, establishes an escape character
+          within the LIKE expression, then applies it to all occurrences of
+          ``"%"``, ``"_"`` and the escape character itself within the
+          comparison value, which is assumed to be a literal string and not a
+          SQL expression.
+
+          An expression such as::
+
+            somecolumn.istartswith("foo%bar", autoescape=True)
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            lower(somecolumn) LIKE lower(:param) || '%' ESCAPE '/'
+
+          With the value of ``:param`` as ``"foo/%bar"``.
+
+        :param escape: a character which when given will render with the
+          ``ESCAPE`` keyword to establish that character as the escape
+          character.  This character can then be placed preceding occurrences
+          of ``%`` and ``_`` to allow them to act as themselves and not
+          wildcard characters.
+
+          An expression such as::
+
+            somecolumn.istartswith("foo/%bar", escape="^")
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            lower(somecolumn) LIKE lower(:param) || '%' ESCAPE '^'
+
+          The parameter may also be combined with
+          :paramref:`.ColumnOperators.istartswith.autoescape`::
+
+            somecolumn.istartswith("foo%bar^bat", escape="^", autoescape=True)
+
+          Where above, the given literal parameter will be converted to
+          ``"foo^%bar^^bat"`` before being passed to the database.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.startswith`
+        """  # noqa: E501
+        return self.operate(
+            istartswith_op, other, escape=escape, autoescape=autoescape
+        )
+
+    def endswith(
+        self,
+        other: Any,
+        escape: Optional[str] = None,
+        autoescape: bool = False,
+    ) -> ColumnOperators:
+        r"""Implement the 'endswith' operator.
+
+        Produces a LIKE expression that tests against a match for the end
+        of a string value:
+
+        .. sourcecode:: sql
+
+            column LIKE '%' || <other>
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.endswith("foobar"))
+
+        Since the operator uses ``LIKE``, wildcard characters
+        ``"%"`` and ``"_"`` that are present inside the <other> expression
+        will behave like wildcards as well.   For literal string
+        values, the :paramref:`.ColumnOperators.endswith.autoescape` flag
+        may be set to ``True`` to apply escaping to occurrences of these
+        characters within the string value so that they match as themselves
+        and not as wildcard characters.  Alternatively, the
+        :paramref:`.ColumnOperators.endswith.escape` parameter will establish
+        a given character as an escape character which can be of use when
+        the target expression is not a literal string.
+
+        :param other: expression to be compared.   This is usually a plain
+          string value, but can also be an arbitrary SQL expression.  LIKE
+          wildcard characters ``%`` and ``_`` are not escaped by default unless
+          the :paramref:`.ColumnOperators.endswith.autoescape` flag is
+          set to True.
+
+        :param autoescape: boolean; when True, establishes an escape character
+          within the LIKE expression, then applies it to all occurrences of
+          ``"%"``, ``"_"`` and the escape character itself within the
+          comparison value, which is assumed to be a literal string and not a
+          SQL expression.
+
+          An expression such as::
+
+            somecolumn.endswith("foo%bar", autoescape=True)
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            somecolumn LIKE '%' || :param ESCAPE '/'
+
+          With the value of ``:param`` as ``"foo/%bar"``.
+
+        :param escape: a character which when given will render with the
+          ``ESCAPE`` keyword to establish that character as the escape
+          character.  This character can then be placed preceding occurrences
+          of ``%`` and ``_`` to allow them to act as themselves and not
+          wildcard characters.
+
+          An expression such as::
+
+            somecolumn.endswith("foo/%bar", escape="^")
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            somecolumn LIKE '%' || :param ESCAPE '^'
+
+          The parameter may also be combined with
+          :paramref:`.ColumnOperators.endswith.autoescape`::
+
+            somecolumn.endswith("foo%bar^bat", escape="^", autoescape=True)
+
+          Where above, the given literal parameter will be converted to
+          ``"foo^%bar^^bat"`` before being passed to the database.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.startswith`
+
+            :meth:`.ColumnOperators.contains`
+
+            :meth:`.ColumnOperators.like`
+
+        """  # noqa: E501
+        return self.operate(
+            endswith_op, other, escape=escape, autoescape=autoescape
+        )
+
+    def iendswith(
+        self,
+        other: Any,
+        escape: Optional[str] = None,
+        autoescape: bool = False,
+    ) -> ColumnOperators:
+        r"""Implement the ``iendswith`` operator, e.g. case insensitive
+        version of :meth:`.ColumnOperators.endswith`.
+
+        Produces a LIKE expression that tests against an insensitive match
+        for the end of a string value:
+
+        .. sourcecode:: sql
+
+            lower(column) LIKE '%' || lower(<other>)
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.iendswith("foobar"))
+
+        Since the operator uses ``LIKE``, wildcard characters
+        ``"%"`` and ``"_"`` that are present inside the <other> expression
+        will behave like wildcards as well.   For literal string
+        values, the :paramref:`.ColumnOperators.iendswith.autoescape` flag
+        may be set to ``True`` to apply escaping to occurrences of these
+        characters within the string value so that they match as themselves
+        and not as wildcard characters.  Alternatively, the
+        :paramref:`.ColumnOperators.iendswith.escape` parameter will establish
+        a given character as an escape character which can be of use when
+        the target expression is not a literal string.
+
+        :param other: expression to be compared.   This is usually a plain
+          string value, but can also be an arbitrary SQL expression.  LIKE
+          wildcard characters ``%`` and ``_`` are not escaped by default unless
+          the :paramref:`.ColumnOperators.iendswith.autoescape` flag is
+          set to True.
+
+        :param autoescape: boolean; when True, establishes an escape character
+          within the LIKE expression, then applies it to all occurrences of
+          ``"%"``, ``"_"`` and the escape character itself within the
+          comparison value, which is assumed to be a literal string and not a
+          SQL expression.
+
+          An expression such as::
+
+            somecolumn.iendswith("foo%bar", autoescape=True)
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            lower(somecolumn) LIKE '%' || lower(:param) ESCAPE '/'
+
+          With the value of ``:param`` as ``"foo/%bar"``.
+
+        :param escape: a character which when given will render with the
+          ``ESCAPE`` keyword to establish that character as the escape
+          character.  This character can then be placed preceding occurrences
+          of ``%`` and ``_`` to allow them to act as themselves and not
+          wildcard characters.
+
+          An expression such as::
+
+            somecolumn.iendswith("foo/%bar", escape="^")
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            lower(somecolumn) LIKE '%' || lower(:param) ESCAPE '^'
+
+          The parameter may also be combined with
+          :paramref:`.ColumnOperators.iendswith.autoescape`::
+
+            somecolumn.endswith("foo%bar^bat", escape="^", autoescape=True)
+
+          Where above, the given literal parameter will be converted to
+          ``"foo^%bar^^bat"`` before being passed to the database.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.endswith`
+        """  # noqa: E501
+        return self.operate(
+            iendswith_op, other, escape=escape, autoescape=autoescape
+        )
+
+    def contains(self, other: Any, **kw: Any) -> ColumnOperators:
+        r"""Implement the 'contains' operator.
+
+        Produces a LIKE expression that tests against a match for the middle
+        of a string value:
+
+        .. sourcecode:: sql
+
+            column LIKE '%' || <other> || '%'
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.contains("foobar"))
+
+        Since the operator uses ``LIKE``, wildcard characters
+        ``"%"`` and ``"_"`` that are present inside the <other> expression
+        will behave like wildcards as well.   For literal string
+        values, the :paramref:`.ColumnOperators.contains.autoescape` flag
+        may be set to ``True`` to apply escaping to occurrences of these
+        characters within the string value so that they match as themselves
+        and not as wildcard characters.  Alternatively, the
+        :paramref:`.ColumnOperators.contains.escape` parameter will establish
+        a given character as an escape character which can be of use when
+        the target expression is not a literal string.
+
+        :param other: expression to be compared.   This is usually a plain
+          string value, but can also be an arbitrary SQL expression.  LIKE
+          wildcard characters ``%`` and ``_`` are not escaped by default unless
+          the :paramref:`.ColumnOperators.contains.autoescape` flag is
+          set to True.
+
+        :param autoescape: boolean; when True, establishes an escape character
+          within the LIKE expression, then applies it to all occurrences of
+          ``"%"``, ``"_"`` and the escape character itself within the
+          comparison value, which is assumed to be a literal string and not a
+          SQL expression.
+
+          An expression such as::
+
+            somecolumn.contains("foo%bar", autoescape=True)
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            somecolumn LIKE '%' || :param || '%' ESCAPE '/'
+
+          With the value of ``:param`` as ``"foo/%bar"``.
+
+        :param escape: a character which when given will render with the
+          ``ESCAPE`` keyword to establish that character as the escape
+          character.  This character can then be placed preceding occurrences
+          of ``%`` and ``_`` to allow them to act as themselves and not
+          wildcard characters.
+
+          An expression such as::
+
+            somecolumn.contains("foo/%bar", escape="^")
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            somecolumn LIKE '%' || :param || '%' ESCAPE '^'
+
+          The parameter may also be combined with
+          :paramref:`.ColumnOperators.contains.autoescape`::
+
+            somecolumn.contains("foo%bar^bat", escape="^", autoescape=True)
+
+          Where above, the given literal parameter will be converted to
+          ``"foo^%bar^^bat"`` before being passed to the database.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.startswith`
+
+            :meth:`.ColumnOperators.endswith`
+
+            :meth:`.ColumnOperators.like`
+
+
+        """  # noqa: E501
+        return self.operate(contains_op, other, **kw)
+
+    def icontains(self, other: Any, **kw: Any) -> ColumnOperators:
+        r"""Implement the ``icontains`` operator, e.g. case insensitive
+        version of :meth:`.ColumnOperators.contains`.
+
+        Produces a LIKE expression that tests against an insensitive match
+        for the middle of a string value:
+
+        .. sourcecode:: sql
+
+            lower(column) LIKE '%' || lower(<other>) || '%'
+
+        E.g.::
+
+            stmt = select(sometable).where(sometable.c.column.icontains("foobar"))
+
+        Since the operator uses ``LIKE``, wildcard characters
+        ``"%"`` and ``"_"`` that are present inside the <other> expression
+        will behave like wildcards as well.   For literal string
+        values, the :paramref:`.ColumnOperators.icontains.autoescape` flag
+        may be set to ``True`` to apply escaping to occurrences of these
+        characters within the string value so that they match as themselves
+        and not as wildcard characters.  Alternatively, the
+        :paramref:`.ColumnOperators.icontains.escape` parameter will establish
+        a given character as an escape character which can be of use when
+        the target expression is not a literal string.
+
+        :param other: expression to be compared.   This is usually a plain
+          string value, but can also be an arbitrary SQL expression.  LIKE
+          wildcard characters ``%`` and ``_`` are not escaped by default unless
+          the :paramref:`.ColumnOperators.icontains.autoescape` flag is
+          set to True.
+
+        :param autoescape: boolean; when True, establishes an escape character
+          within the LIKE expression, then applies it to all occurrences of
+          ``"%"``, ``"_"`` and the escape character itself within the
+          comparison value, which is assumed to be a literal string and not a
+          SQL expression.
+
+          An expression such as::
+
+            somecolumn.icontains("foo%bar", autoescape=True)
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            lower(somecolumn) LIKE '%' || lower(:param) || '%' ESCAPE '/'
+
+          With the value of ``:param`` as ``"foo/%bar"``.
+
+        :param escape: a character which when given will render with the
+          ``ESCAPE`` keyword to establish that character as the escape
+          character.  This character can then be placed preceding occurrences
+          of ``%`` and ``_`` to allow them to act as themselves and not
+          wildcard characters.
+
+          An expression such as::
+
+            somecolumn.icontains("foo/%bar", escape="^")
+
+          Will render as:
+
+          .. sourcecode:: sql
+
+            lower(somecolumn) LIKE '%' || lower(:param) || '%' ESCAPE '^'
+
+          The parameter may also be combined with
+          :paramref:`.ColumnOperators.contains.autoescape`::
+
+            somecolumn.icontains("foo%bar^bat", escape="^", autoescape=True)
+
+          Where above, the given literal parameter will be converted to
+          ``"foo^%bar^^bat"`` before being passed to the database.
+
+        .. seealso::
+
+            :meth:`.ColumnOperators.contains`
+
+        """  # noqa: E501
+        return self.operate(icontains_op, other, **kw)
+
+    def match(self, other: Any, **kwargs: Any) -> ColumnOperators:
+        """Implements a database-specific 'match' operator.
+
+        :meth:`_sql.ColumnOperators.match` attempts to resolve to
+        a MATCH-like function or operator provided by the backend.
+        Examples include:
+
+        * PostgreSQL - renders ``x @@ plainto_tsquery(y)``
+
+            .. versionchanged:: 2.0  ``plainto_tsquery()`` is used instead
+               of ``to_tsquery()`` for PostgreSQL now; for compatibility with
+               other forms, see :ref:`postgresql_match`.
+
+
+        * MySQL - renders ``MATCH (x) AGAINST (y IN BOOLEAN MODE)``
+
+          .. seealso::
+
+                :class:`_mysql.match` - MySQL specific construct with
+                additional features.
+
+        * Oracle Database - renders ``CONTAINS(x, y)``
+        * other backends may provide special implementations.
+        * Backends without any special implementation will emit
+          the operator as "MATCH".  This is compatible with SQLite, for
+          example.
+
+        """
+        return self.operate(match_op, other, **kwargs)
+
+    def regexp_match(
+        self, pattern: Any, flags: Optional[str] = None
+    ) -> ColumnOperators:
+        """Implements a database-specific 'regexp match' operator.
+
+        E.g.::
+
+            stmt = select(table.c.some_column).where(
+                table.c.some_column.regexp_match("^(b|c)")
+            )
+
+        :meth:`_sql.ColumnOperators.regexp_match` attempts to resolve to
+        a REGEXP-like function or operator provided by the backend, however
+        the specific regular expression syntax and flags available are
+        **not backend agnostic**.
+
+        Examples include:
+
+        * PostgreSQL - renders ``x ~ y`` or ``x !~ y`` when negated.
+        * Oracle Database - renders ``REGEXP_LIKE(x, y)``
+        * SQLite - uses SQLite's ``REGEXP`` placeholder operator and calls into
+          the Python ``re.match()`` builtin.
+        * other backends may provide special implementations.
+        * Backends without any special implementation will emit
+          the operator as "REGEXP" or "NOT REGEXP".  This is compatible with
+          SQLite and MySQL, for example.
+
+        Regular expression support is currently implemented for Oracle
+        Database, PostgreSQL, MySQL and MariaDB.  Partial support is available
+        for SQLite.  Support among third-party dialects may vary.
+
+        :param pattern: The regular expression pattern string or column
+          clause.
+        :param flags: Any regular expression string flags to apply, passed as
+          plain Python string only.  These flags are backend specific.
+          Some backends, like PostgreSQL and MariaDB, may alternatively
+          specify the flags as part of the pattern.
+          When using the ignore case flag 'i' in PostgreSQL, the ignore case
+          regexp match operator ``~*`` or ``!~*`` will be used.
+
+        .. versionadded:: 1.4
+
+        .. versionchanged:: 1.4.48, 2.0.18  Note that due to an implementation
+           error, the "flags" parameter previously accepted SQL expression
+           objects such as column expressions in addition to plain Python
+           strings.   This implementation did not work correctly with caching
+           and was removed; strings only should be passed for the "flags"
+           parameter, as these flags are rendered as literal inline values
+           within SQL expressions.
+
+        .. seealso::
+
+            :meth:`_sql.ColumnOperators.regexp_replace`
+
+
+        """
+        return self.operate(regexp_match_op, pattern, flags=flags)
+
+    def regexp_replace(
+        self, pattern: Any, replacement: Any, flags: Optional[str] = None
+    ) -> ColumnOperators:
+        """Implements a database-specific 'regexp replace' operator.
+
+        E.g.::
+
+            stmt = select(
+                table.c.some_column.regexp_replace("b(..)", "X\1Y", flags="g")
+            )
+
+        :meth:`_sql.ColumnOperators.regexp_replace` attempts to resolve to
+        a REGEXP_REPLACE-like function provided by the backend, that
+        usually emit the function ``REGEXP_REPLACE()``.  However,
+        the specific regular expression syntax and flags available are
+        **not backend agnostic**.
+
+        Regular expression replacement support is currently implemented for
+        Oracle Database, PostgreSQL, MySQL 8 or greater and MariaDB.  Support
+        among third-party dialects may vary.
+
+        :param pattern: The regular expression pattern string or column
+          clause.
+        :param pattern: The replacement string or column clause.
+        :param flags: Any regular expression string flags to apply, passed as
+          plain Python string only.  These flags are backend specific.
+          Some backends, like PostgreSQL and MariaDB, may alternatively
+          specify the flags as part of the pattern.
+
+        .. versionadded:: 1.4
+
+        .. versionchanged:: 1.4.48, 2.0.18  Note that due to an implementation
+           error, the "flags" parameter previously accepted SQL expression
+           objects such as column expressions in addition to plain Python
+           strings.   This implementation did not work correctly with caching
+           and was removed; strings only should be passed for the "flags"
+           parameter, as these flags are rendered as literal inline values
+           within SQL expressions.
+
+
+        .. seealso::
+
+            :meth:`_sql.ColumnOperators.regexp_match`
+
+        """
+        return self.operate(
+            regexp_replace_op,
+            pattern,
+            replacement=replacement,
+            flags=flags,
+        )
+
+    def desc(self) -> ColumnOperators:
+        """Produce a :func:`_expression.desc` clause against the
+        parent object."""
+        return self.operate(desc_op)
+
+    def asc(self) -> ColumnOperators:
+        """Produce a :func:`_expression.asc` clause against the
+        parent object."""
+        return self.operate(asc_op)
+
+    def nulls_first(self) -> ColumnOperators:
+        """Produce a :func:`_expression.nulls_first` clause against the
+        parent object.
+
+        .. versionchanged:: 1.4 The ``nulls_first()`` operator is
+           renamed from ``nullsfirst()`` in previous releases.
+           The previous name remains available for backwards compatibility.
+        """
+        return self.operate(nulls_first_op)
+
+    # deprecated 1.4; see #5435
+    if TYPE_CHECKING:
+
+        def nullsfirst(self) -> ColumnOperators: ...
+
+    else:
+        nullsfirst = nulls_first
+
+    def nulls_last(self) -> ColumnOperators:
+        """Produce a :func:`_expression.nulls_last` clause against the
+        parent object.
+
+        .. versionchanged:: 1.4 The ``nulls_last()`` operator is
+           renamed from ``nullslast()`` in previous releases.
+           The previous name remains available for backwards compatibility.
+        """
+        return self.operate(nulls_last_op)
+
+    # deprecated 1.4; see #5429
+    if TYPE_CHECKING:
+
+        def nullslast(self) -> ColumnOperators: ...
+
+    else:
+        nullslast = nulls_last
+
+    def collate(self, collation: str) -> ColumnOperators:
+        """Produce a :func:`_expression.collate` clause against
+        the parent object, given the collation string.
+
+        .. seealso::
+
+            :func:`_expression.collate`
+
+        """
+        return self.operate(collate, collation)
+
+    def __radd__(self, other: Any) -> ColumnOperators:
+        """Implement the ``+`` operator in reverse.
+
+        See :meth:`.ColumnOperators.__add__`.
+
+        """
+        return self.reverse_operate(add, other)
+
+    def __rsub__(self, other: Any) -> ColumnOperators:
+        """Implement the ``-`` operator in reverse.
+
+        See :meth:`.ColumnOperators.__sub__`.
+
+        """
+        return self.reverse_operate(sub, other)
+
+    def __rmul__(self, other: Any) -> ColumnOperators:
+        """Implement the ``*`` operator in reverse.
+
+        See :meth:`.ColumnOperators.__mul__`.
+
+        """
+        return self.reverse_operate(mul, other)
+
+    def __rmod__(self, other: Any) -> ColumnOperators:
+        """Implement the ``%`` operator in reverse.
+
+        See :meth:`.ColumnOperators.__mod__`.
+
+        """
+        return self.reverse_operate(mod, other)
+
+    def between(
+        self, cleft: Any, cright: Any, symmetric: bool = False
+    ) -> ColumnOperators:
+        """Produce a :func:`_expression.between` clause against
+        the parent object, given the lower and upper range.
+
+        """
+        return self.operate(between_op, cleft, cright, symmetric=symmetric)
+
+    def distinct(self) -> ColumnOperators:
+        """Produce a :func:`_expression.distinct` clause against the
+        parent object.
+
+        """
+        return self.operate(distinct_op)
+
+    def any_(self) -> ColumnOperators:
+        """Produce an :func:`_expression.any_` clause against the
+        parent object.
+
+        See the documentation for :func:`_sql.any_` for examples.
+
+        .. note:: be sure to not confuse the newer
+            :meth:`_sql.ColumnOperators.any_` method with the **legacy**
+            version of this method, the :meth:`_types.ARRAY.Comparator.any`
+            method that's specific to :class:`_types.ARRAY`, which uses a
+            different calling style.
+
+        """
+        return self.operate(any_op)
+
+    def all_(self) -> ColumnOperators:
+        """Produce an :func:`_expression.all_` clause against the
+        parent object.
+
+        See the documentation for :func:`_sql.all_` for examples.
+
+        .. note:: be sure to not confuse the newer
+            :meth:`_sql.ColumnOperators.all_` method with the **legacy**
+            version of this method, the :meth:`_types.ARRAY.Comparator.all`
+            method that's specific to :class:`_types.ARRAY`, which uses a
+            different calling style.
+
+        """
+        return self.operate(all_op)
+
+    def __add__(self, other: Any) -> ColumnOperators:
+        """Implement the ``+`` operator.
+
+        In a column context, produces the clause ``a + b``
+        if the parent object has non-string affinity.
+        If the parent object has a string affinity,
+        produces the concatenation operator, ``a || b`` -
+        see :meth:`.ColumnOperators.concat`.
+
+        """
+        return self.operate(add, other)
+
+    def __sub__(self, other: Any) -> ColumnOperators:
+        """Implement the ``-`` operator.
+
+        In a column context, produces the clause ``a - b``.
+
+        """
+        return self.operate(sub, other)
+
+    def __mul__(self, other: Any) -> ColumnOperators:
+        """Implement the ``*`` operator.
+
+        In a column context, produces the clause ``a * b``.
+
+        """
+        return self.operate(mul, other)
+
+    def __mod__(self, other: Any) -> ColumnOperators:
+        """Implement the ``%`` operator.
+
+        In a column context, produces the clause ``a % b``.
+
+        """
+        return self.operate(mod, other)
+
+    def __truediv__(self, other: Any) -> ColumnOperators:
+        """Implement the ``/`` operator.
+
+        In a column context, produces the clause ``a / b``, and
+        considers the result type to be numeric.
+
+        .. versionchanged:: 2.0  The truediv operator against two integers
+           is now considered to return a numeric value.    Behavior on specific
+           backends may vary.
+
+        """
+        return self.operate(truediv, other)
+
+    def __rtruediv__(self, other: Any) -> ColumnOperators:
+        """Implement the ``/`` operator in reverse.
+
+        See :meth:`.ColumnOperators.__truediv__`.
+
+        """
+        return self.reverse_operate(truediv, other)
+
+    def __floordiv__(self, other: Any) -> ColumnOperators:
+        """Implement the ``//`` operator.
+
+        In a column context, produces the clause ``a / b``,
+        which is the same as "truediv", but considers the result
+        type to be integer.
+
+        .. versionadded:: 2.0
+
+        """
+        return self.operate(floordiv, other)
+
+    def __rfloordiv__(self, other: Any) -> ColumnOperators:
+        """Implement the ``//`` operator in reverse.
+
+        See :meth:`.ColumnOperators.__floordiv__`.
+
+        """
+        return self.reverse_operate(floordiv, other)
+
+
+_commutative: Set[Any] = {eq, ne, add, mul}
+_comparison: Set[Any] = {eq, ne, lt, gt, ge, le}
+
+
+def _operator_fn(fn: Callable[..., Any]) -> OperatorType:
+    return cast(OperatorType, fn)
+
+
+def commutative_op(fn: _FN) -> _FN:
+    _commutative.add(fn)
+    return fn
+
+
+def comparison_op(fn: _FN) -> _FN:
+    _comparison.add(fn)
+    return fn
+
+
+@_operator_fn
+def from_() -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+@comparison_op
+def function_as_comparison_op() -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def as_() -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def exists() -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def is_true(a: Any) -> Any:
+    raise NotImplementedError()
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def istrue(a: Any) -> Any: ...
+
+else:
+    istrue = is_true
+
+
+@_operator_fn
+def is_false(a: Any) -> Any:
+    raise NotImplementedError()
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def isfalse(a: Any) -> Any: ...
+
+else:
+    isfalse = is_false
+
+
+@comparison_op
+@_operator_fn
+def is_distinct_from(a: Any, b: Any) -> Any:
+    return a.is_distinct_from(b)
+
+
+@comparison_op
+@_operator_fn
+def is_not_distinct_from(a: Any, b: Any) -> Any:
+    return a.is_not_distinct_from(b)
+
+
+# deprecated 1.4; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def isnot_distinct_from(a: Any, b: Any) -> Any: ...
+
+else:
+    isnot_distinct_from = is_not_distinct_from
+
+
+@comparison_op
+@_operator_fn
+def is_(a: Any, b: Any) -> Any:
+    return a.is_(b)
+
+
+@comparison_op
+@_operator_fn
+def is_not(a: Any, b: Any) -> Any:
+    return a.is_not(b)
+
+
+# 1.4 deprecated; see #5429
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def isnot(a: Any, b: Any) -> Any: ...
+
+else:
+    isnot = is_not
+
+
+@_operator_fn
+def collate(a: Any, b: Any) -> Any:
+    return a.collate(b)
+
+
+@_operator_fn
+def op(a: Any, opstring: str, b: Any) -> Any:
+    return a.op(opstring)(b)
+
+
+@comparison_op
+@_operator_fn
+def like_op(a: Any, b: Any, escape: Optional[str] = None) -> Any:
+    return a.like(b, escape=escape)
+
+
+@comparison_op
+@_operator_fn
+def not_like_op(a: Any, b: Any, escape: Optional[str] = None) -> Any:
+    return a.notlike(b, escape=escape)
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notlike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: ...
+
+else:
+    notlike_op = not_like_op
+
+
+@comparison_op
+@_operator_fn
+def ilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any:
+    return a.ilike(b, escape=escape)
+
+
+@comparison_op
+@_operator_fn
+def not_ilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any:
+    return a.not_ilike(b, escape=escape)
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notilike_op(a: Any, b: Any, escape: Optional[str] = None) -> Any: ...
+
+else:
+    notilike_op = not_ilike_op
+
+
+@comparison_op
+@_operator_fn
+def between_op(a: Any, b: Any, c: Any, symmetric: bool = False) -> Any:
+    return a.between(b, c, symmetric=symmetric)
+
+
+@comparison_op
+@_operator_fn
+def not_between_op(a: Any, b: Any, c: Any, symmetric: bool = False) -> Any:
+    return ~a.between(b, c, symmetric=symmetric)
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notbetween_op(
+        a: Any, b: Any, c: Any, symmetric: bool = False
+    ) -> Any: ...
+
+else:
+    notbetween_op = not_between_op
+
+
+@comparison_op
+@_operator_fn
+def in_op(a: Any, b: Any) -> Any:
+    return a.in_(b)
+
+
+@comparison_op
+@_operator_fn
+def not_in_op(a: Any, b: Any) -> Any:
+    return a.not_in(b)
+
+
+# 1.4 deprecated; see #5429
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notin_op(a: Any, b: Any) -> Any: ...
+
+else:
+    notin_op = not_in_op
+
+
+@_operator_fn
+def distinct_op(a: Any) -> Any:
+    return a.distinct()
+
+
+@_operator_fn
+def any_op(a: Any) -> Any:
+    return a.any_()
+
+
+@_operator_fn
+def all_op(a: Any) -> Any:
+    return a.all_()
+
+
+def _escaped_like_impl(
+    fn: Callable[..., Any], other: Any, escape: Optional[str], autoescape: bool
+) -> Any:
+    if autoescape:
+        if autoescape is not True:
+            util.warn(
+                "The autoescape parameter is now a simple boolean True/False"
+            )
+        if escape is None:
+            escape = "/"
+
+        if not isinstance(other, str):
+            raise TypeError("String value expected when autoescape=True")
+
+        if escape not in ("%", "_"):
+            other = other.replace(escape, escape + escape)
+
+        other = other.replace("%", escape + "%").replace("_", escape + "_")
+
+    return fn(other, escape=escape)
+
+
+@comparison_op
+@_operator_fn
+def startswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return _escaped_like_impl(a.startswith, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def not_startswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return ~_escaped_like_impl(a.startswith, b, escape, autoescape)
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notstartswith_op(
+        a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+    ) -> Any: ...
+
+else:
+    notstartswith_op = not_startswith_op
+
+
+@comparison_op
+@_operator_fn
+def istartswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return _escaped_like_impl(a.istartswith, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def not_istartswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return ~_escaped_like_impl(a.istartswith, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def endswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return _escaped_like_impl(a.endswith, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def not_endswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return ~_escaped_like_impl(a.endswith, b, escape, autoescape)
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notendswith_op(
+        a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+    ) -> Any: ...
+
+else:
+    notendswith_op = not_endswith_op
+
+
+@comparison_op
+@_operator_fn
+def iendswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return _escaped_like_impl(a.iendswith, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def not_iendswith_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return ~_escaped_like_impl(a.iendswith, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def contains_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return _escaped_like_impl(a.contains, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def not_contains_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return ~_escaped_like_impl(a.contains, b, escape, autoescape)
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notcontains_op(
+        a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+    ) -> Any: ...
+
+else:
+    notcontains_op = not_contains_op
+
+
+@comparison_op
+@_operator_fn
+def icontains_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return _escaped_like_impl(a.icontains, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def not_icontains_op(
+    a: Any, b: Any, escape: Optional[str] = None, autoescape: bool = False
+) -> Any:
+    return ~_escaped_like_impl(a.icontains, b, escape, autoescape)
+
+
+@comparison_op
+@_operator_fn
+def match_op(a: Any, b: Any, **kw: Any) -> Any:
+    return a.match(b, **kw)
+
+
+@comparison_op
+@_operator_fn
+def regexp_match_op(a: Any, b: Any, flags: Optional[str] = None) -> Any:
+    return a.regexp_match(b, flags=flags)
+
+
+@comparison_op
+@_operator_fn
+def not_regexp_match_op(a: Any, b: Any, flags: Optional[str] = None) -> Any:
+    return ~a.regexp_match(b, flags=flags)
+
+
+@_operator_fn
+def regexp_replace_op(
+    a: Any, b: Any, replacement: Any, flags: Optional[str] = None
+) -> Any:
+    return a.regexp_replace(b, replacement=replacement, flags=flags)
+
+
+@comparison_op
+@_operator_fn
+def not_match_op(a: Any, b: Any, **kw: Any) -> Any:
+    return ~a.match(b, **kw)
+
+
+# 1.4 deprecated; see #5429
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def notmatch_op(a: Any, b: Any, **kw: Any) -> Any: ...
+
+else:
+    notmatch_op = not_match_op
+
+
+@_operator_fn
+def comma_op(a: Any, b: Any) -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def filter_op(a: Any, b: Any) -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def concat_op(a: Any, b: Any) -> Any:
+    try:
+        concat = a.concat
+    except AttributeError:
+        return b._rconcat(a)
+    else:
+        return concat(b)
+
+
+@_operator_fn
+def desc_op(a: Any) -> Any:
+    return a.desc()
+
+
+@_operator_fn
+def asc_op(a: Any) -> Any:
+    return a.asc()
+
+
+@_operator_fn
+def nulls_first_op(a: Any) -> Any:
+    return a.nulls_first()
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def nullsfirst_op(a: Any) -> Any: ...
+
+else:
+    nullsfirst_op = nulls_first_op
+
+
+@_operator_fn
+def nulls_last_op(a: Any) -> Any:
+    return a.nulls_last()
+
+
+# 1.4 deprecated; see #5435
+if TYPE_CHECKING:
+
+    @_operator_fn
+    def nullslast_op(a: Any) -> Any: ...
+
+else:
+    nullslast_op = nulls_last_op
+
+
+@_operator_fn
+def json_getitem_op(a: Any, b: Any) -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def json_path_getitem_op(a: Any, b: Any) -> Any:
+    raise NotImplementedError()
+
+
+@_operator_fn
+def bitwise_xor_op(a: Any, b: Any) -> Any:
+    return a.bitwise_xor(b)
+
+
+@_operator_fn
+def bitwise_or_op(a: Any, b: Any) -> Any:
+    return a.bitwise_or(b)
+
+
+@_operator_fn
+def bitwise_and_op(a: Any, b: Any) -> Any:
+    return a.bitwise_and(b)
+
+
+@_operator_fn
+def bitwise_not_op(a: Any) -> Any:
+    return a.bitwise_not()
+
+
+@_operator_fn
+def bitwise_lshift_op(a: Any, b: Any) -> Any:
+    return a.bitwise_lshift(b)
+
+
+@_operator_fn
+def bitwise_rshift_op(a: Any, b: Any) -> Any:
+    return a.bitwise_rshift(b)
+
+
+def is_comparison(op: OperatorType) -> bool:
+    return op in _comparison or isinstance(op, custom_op) and op.is_comparison
+
+
+def is_commutative(op: OperatorType) -> bool:
+    return op in _commutative
+
+
+def is_ordering_modifier(op: OperatorType) -> bool:
+    return op in (asc_op, desc_op, nulls_first_op, nulls_last_op)
+
+
+def is_natural_self_precedent(op: OperatorType) -> bool:
+    return (
+        op in _natural_self_precedent
+        or isinstance(op, custom_op)
+        and op.natural_self_precedent
+    )
+
+
+_booleans = (inv, is_true, is_false, and_, or_)
+
+
+def is_boolean(op: OperatorType) -> bool:
+    return is_comparison(op) or op in _booleans
+
+
+_mirror = {gt: lt, ge: le, lt: gt, le: ge}
+
+
+def mirror(op: OperatorType) -> OperatorType:
+    """rotate a comparison operator 180 degrees.
+
+    Note this is not the same as negation.
+
+    """
+    return _mirror.get(op, op)
+
+
+_associative = _commutative.union([concat_op, and_, or_]).difference([eq, ne])
+
+
+def is_associative(op: OperatorType) -> bool:
+    return op in _associative
+
+
+def is_order_by_modifier(op: Optional[OperatorType]) -> bool:
+    return op in _order_by_modifier
+
+
+_order_by_modifier = {desc_op, asc_op, nulls_first_op, nulls_last_op}
+
+_natural_self_precedent = _associative.union(
+    [getitem, json_getitem_op, json_path_getitem_op]
+)
+"""Operators where if we have (a op b) op c, we don't want to
+parenthesize (a op b).
+
+"""
+
+
+@_operator_fn
+def _asbool(a: Any) -> Any:
+    raise NotImplementedError()
+
+
+class _OpLimit(IntEnum):
+    _smallest = -100
+    _largest = 100
+
+
+_PRECEDENCE: Dict[OperatorType, int] = {
+    from_: 15,
+    function_as_comparison_op: 15,
+    any_op: 15,
+    all_op: 15,
+    getitem: 15,
+    json_getitem_op: 15,
+    json_path_getitem_op: 15,
+    mul: 8,
+    truediv: 8,
+    floordiv: 8,
+    mod: 8,
+    neg: 8,
+    bitwise_not_op: 8,
+    add: 7,
+    sub: 7,
+    bitwise_xor_op: 7,
+    bitwise_or_op: 7,
+    bitwise_and_op: 7,
+    bitwise_lshift_op: 7,
+    bitwise_rshift_op: 7,
+    filter_op: 6,
+    concat_op: 5,
+    match_op: 5,
+    not_match_op: 5,
+    regexp_match_op: 5,
+    not_regexp_match_op: 5,
+    regexp_replace_op: 5,
+    ilike_op: 5,
+    not_ilike_op: 5,
+    like_op: 5,
+    not_like_op: 5,
+    in_op: 5,
+    not_in_op: 5,
+    is_: 5,
+    is_not: 5,
+    eq: 5,
+    ne: 5,
+    is_distinct_from: 5,
+    is_not_distinct_from: 5,
+    gt: 5,
+    lt: 5,
+    ge: 5,
+    le: 5,
+    between_op: 5,
+    not_between_op: 5,
+    distinct_op: 5,
+    inv: 5,
+    is_true: 5,
+    is_false: 5,
+    and_: 3,
+    or_: 2,
+    comma_op: -1,
+    desc_op: 3,
+    asc_op: 3,
+    collate: 4,
+    as_: -1,
+    exists: 0,
+    _asbool: -10,
+}
+
+
+def is_precedent(
+    operator: OperatorType, against: Optional[OperatorType]
+) -> bool:
+    if operator is against and is_natural_self_precedent(operator):
+        return False
+    elif against is None:
+        return True
+    else:
+        return bool(
+            _PRECEDENCE.get(
+                operator, getattr(operator, "precedence", _OpLimit._smallest)
+            )
+            <= _PRECEDENCE.get(
+                against, getattr(against, "precedence", _OpLimit._largest)
+            )
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/roles.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/roles.py
new file mode 100644
index 00000000..da69616d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/roles.py
@@ -0,0 +1,323 @@
+# sql/roles.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
+
+from typing import Any
+from typing import Generic
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import TypeVar
+
+from .. import util
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from ._typing import _PropagateAttrsType
+    from .elements import Label
+    from .selectable import _SelectIterable
+    from .selectable import FromClause
+    from .selectable import Subquery
+
+_T = TypeVar("_T", bound=Any)
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+
+
+class SQLRole:
+    """Define a "role" within a SQL statement structure.
+
+    Classes within SQL Core participate within SQLRole hierarchies in order
+    to more accurately indicate where they may be used within SQL statements
+    of all types.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ()
+    allows_lambda = False
+    uses_inspection = False
+
+
+class UsesInspection:
+    __slots__ = ()
+    _post_inspect: Literal[None] = None
+    uses_inspection = True
+
+
+class AllowsLambdaRole:
+    __slots__ = ()
+    allows_lambda = True
+
+
+class HasCacheKeyRole(SQLRole):
+    __slots__ = ()
+    _role_name = "Cacheable Core or ORM object"
+
+
+class ExecutableOptionRole(SQLRole):
+    __slots__ = ()
+    _role_name = "ExecutionOption Core or ORM object"
+
+
+class LiteralValueRole(SQLRole):
+    __slots__ = ()
+    _role_name = "Literal Python value"
+
+
+class ColumnArgumentRole(SQLRole):
+    __slots__ = ()
+    _role_name = "Column expression"
+
+
+class ColumnArgumentOrKeyRole(ColumnArgumentRole):
+    __slots__ = ()
+    _role_name = "Column expression or string key"
+
+
+class StrAsPlainColumnRole(ColumnArgumentRole):
+    __slots__ = ()
+    _role_name = "Column expression or string key"
+
+
+class ColumnListRole(SQLRole):
+    """Elements suitable for forming comma separated lists of expressions."""
+
+    __slots__ = ()
+
+
+class StringRole(SQLRole):
+    """mixin indicating a role that results in strings"""
+
+    __slots__ = ()
+
+
+class TruncatedLabelRole(StringRole, SQLRole):
+    __slots__ = ()
+    _role_name = "String SQL identifier"
+
+
+class ColumnsClauseRole(AllowsLambdaRole, UsesInspection, ColumnListRole):
+    __slots__ = ()
+    _role_name = (
+        "Column expression, FROM clause, or other columns clause element"
+    )
+
+    @property
+    def _select_iterable(self) -> _SelectIterable:
+        raise NotImplementedError()
+
+
+class TypedColumnsClauseRole(Generic[_T_co], SQLRole):
+    """element-typed form of ColumnsClauseRole"""
+
+    __slots__ = ()
+
+
+class LimitOffsetRole(SQLRole):
+    __slots__ = ()
+    _role_name = "LIMIT / OFFSET expression"
+
+
+class ByOfRole(ColumnListRole):
+    __slots__ = ()
+    _role_name = "GROUP BY / OF / etc. expression"
+
+
+class GroupByRole(AllowsLambdaRole, UsesInspection, ByOfRole):
+    __slots__ = ()
+    # note there's a special case right now where you can pass a whole
+    # ORM entity to group_by() and it splits out.   we may not want to keep
+    # this around
+
+    _role_name = "GROUP BY expression"
+
+
+class OrderByRole(AllowsLambdaRole, ByOfRole):
+    __slots__ = ()
+    _role_name = "ORDER BY expression"
+
+
+class StructuralRole(SQLRole):
+    __slots__ = ()
+
+
+class StatementOptionRole(StructuralRole):
+    __slots__ = ()
+    _role_name = "statement sub-expression element"
+
+
+class OnClauseRole(AllowsLambdaRole, StructuralRole):
+    __slots__ = ()
+    _role_name = (
+        "ON clause, typically a SQL expression or "
+        "ORM relationship attribute"
+    )
+
+
+class WhereHavingRole(OnClauseRole):
+    __slots__ = ()
+    _role_name = "SQL expression for WHERE/HAVING role"
+
+
+class ExpressionElementRole(TypedColumnsClauseRole[_T_co]):
+    # note when using generics for ExpressionElementRole,
+    # the generic type needs to be in
+    # sqlalchemy.sql.coercions._impl_lookup mapping also.
+    # these are set up for basic types like int, bool, str, float
+    # right now
+
+    __slots__ = ()
+    _role_name = "SQL expression element"
+
+    def label(self, name: Optional[str]) -> Label[_T]:
+        raise NotImplementedError()
+
+
+class ConstExprRole(ExpressionElementRole[_T]):
+    __slots__ = ()
+    _role_name = "Constant True/False/None expression"
+
+
+class LabeledColumnExprRole(ExpressionElementRole[_T]):
+    __slots__ = ()
+
+
+class BinaryElementRole(ExpressionElementRole[_T]):
+    __slots__ = ()
+    _role_name = "SQL expression element or literal value"
+
+
+class InElementRole(SQLRole):
+    __slots__ = ()
+    _role_name = (
+        "IN expression list, SELECT construct, or bound parameter object"
+    )
+
+
+class JoinTargetRole(AllowsLambdaRole, UsesInspection, StructuralRole):
+    __slots__ = ()
+    _role_name = (
+        "Join target, typically a FROM expression, or ORM "
+        "relationship attribute"
+    )
+
+
+class FromClauseRole(ColumnsClauseRole, JoinTargetRole):
+    __slots__ = ()
+    _role_name = "FROM expression, such as a Table or alias() object"
+
+    _is_subquery = False
+
+    named_with_column: bool
+
+
+class StrictFromClauseRole(FromClauseRole):
+    __slots__ = ()
+    # does not allow text() or select() objects
+
+
+class AnonymizedFromClauseRole(StrictFromClauseRole):
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        def _anonymous_fromclause(
+            self, *, name: Optional[str] = None, flat: bool = False
+        ) -> FromClause: ...
+
+
+class ReturnsRowsRole(SQLRole):
+    __slots__ = ()
+    _role_name = (
+        "Row returning expression such as a SELECT, a FROM clause, or an "
+        "INSERT/UPDATE/DELETE with RETURNING"
+    )
+
+
+class StatementRole(SQLRole):
+    __slots__ = ()
+    _role_name = "Executable SQL or text() construct"
+
+    if TYPE_CHECKING:
+
+        @util.memoized_property
+        def _propagate_attrs(self) -> _PropagateAttrsType: ...
+
+    else:
+        _propagate_attrs = util.EMPTY_DICT
+
+
+class SelectStatementRole(StatementRole, ReturnsRowsRole):
+    __slots__ = ()
+    _role_name = "SELECT construct or equivalent text() construct"
+
+    def subquery(self) -> Subquery:
+        raise NotImplementedError(
+            "All SelectStatementRole objects should implement a "
+            ".subquery() method."
+        )
+
+
+class HasCTERole(ReturnsRowsRole):
+    __slots__ = ()
+
+
+class IsCTERole(SQLRole):
+    __slots__ = ()
+    _role_name = "CTE object"
+
+
+class CompoundElementRole(AllowsLambdaRole, SQLRole):
+    """SELECT statements inside a CompoundSelect, e.g. UNION, EXTRACT, etc."""
+
+    __slots__ = ()
+    _role_name = (
+        "SELECT construct for inclusion in a UNION or other set construct"
+    )
+
+
+# TODO: are we using this?
+class DMLRole(StatementRole):
+    __slots__ = ()
+
+
+class DMLTableRole(FromClauseRole):
+    __slots__ = ()
+    _role_name = "subject table for an INSERT, UPDATE or DELETE"
+
+
+class DMLColumnRole(SQLRole):
+    __slots__ = ()
+    _role_name = "SET/VALUES column expression or string key"
+
+
+class DMLSelectRole(SQLRole):
+    """A SELECT statement embedded in DML, typically INSERT from SELECT"""
+
+    __slots__ = ()
+    _role_name = "SELECT statement or equivalent textual object"
+
+
+class DDLRole(StatementRole):
+    __slots__ = ()
+
+
+class DDLExpressionRole(StructuralRole):
+    __slots__ = ()
+    _role_name = "SQL expression element for DDL constraint"
+
+
+class DDLConstraintColumnRole(SQLRole):
+    __slots__ = ()
+    _role_name = "String column name or column expression for DDL constraint"
+
+
+class DDLReferredColumnRole(DDLConstraintColumnRole):
+    __slots__ = ()
+    _role_name = (
+        "String column name or Column object for DDL foreign key constraint"
+    )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/schema.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/schema.py
new file mode 100644
index 00000000..a6c24ce6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/schema.py
@@ -0,0 +1,6201 @@
+# sql/schema.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""The schema module provides the building blocks for database metadata.
+
+Each element within this module describes a database entity which can be
+created and dropped, or is otherwise part of such an entity.  Examples include
+tables, columns, sequences, and indexes.
+
+All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
+defined in this module they are intended to be agnostic of any vendor-specific
+constructs.
+
+A collection of entities are grouped into a unit called
+:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
+schema elements, and can also be associated with an actual database connection
+such that operations involving the contained elements can contact the database
+as needed.
+
+Two of the elements here also build upon their "syntactic" counterparts, which
+are defined in :class:`~sqlalchemy.sql.expression.`, specifically
+:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
+Since these objects are part of the SQL expression language, they are usable
+as components in SQL expressions.
+
+"""
+from __future__ import annotations
+
+from abc import ABC
+import collections
+from enum import Enum
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence as _typing_Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import coercions
+from . import ddl
+from . import roles
+from . import type_api
+from . import visitors
+from .base import _DefaultDescriptionTuple
+from .base import _NoArg
+from .base import _NoneName
+from .base import _SentinelColumnCharacterization
+from .base import _SentinelDefaultCharacterization
+from .base import DedupeColumnCollection
+from .base import DialectKWArgs
+from .base import Executable
+from .base import SchemaEventTarget as SchemaEventTarget
+from .coercions import _document_text_coercion
+from .elements import ClauseElement
+from .elements import ColumnClause
+from .elements import ColumnElement
+from .elements import quoted_name
+from .elements import TextClause
+from .selectable import TableClause
+from .type_api import to_instance
+from .visitors import ExternallyTraversible
+from .. import event
+from .. import exc
+from .. import inspection
+from .. import util
+from ..util import HasMemoized
+from ..util.typing import Final
+from ..util.typing import Literal
+from ..util.typing import Protocol
+from ..util.typing import Self
+from ..util.typing import TypedDict
+from ..util.typing import TypeGuard
+
+if typing.TYPE_CHECKING:
+    from ._typing import _AutoIncrementType
+    from ._typing import _DDLColumnArgument
+    from ._typing import _InfoType
+    from ._typing import _TextCoercedExpressionArgument
+    from ._typing import _TypeEngineArgument
+    from .base import ColumnSet
+    from .base import ReadOnlyColumnCollection
+    from .compiler import DDLCompiler
+    from .elements import BindParameter
+    from .elements import KeyedColumnElement
+    from .functions import Function
+    from .type_api import TypeEngine
+    from .visitors import anon_map
+    from ..engine import Connection
+    from ..engine import Engine
+    from ..engine.interfaces import _CoreMultiExecuteParams
+    from ..engine.interfaces import CoreExecuteOptionsParameter
+    from ..engine.interfaces import ExecutionContext
+    from ..engine.mock import MockConnection
+    from ..engine.reflection import _ReflectionInfo
+    from ..sql.selectable import FromClause
+
+_T = TypeVar("_T", bound="Any")
+_SI = TypeVar("_SI", bound="SchemaItem")
+_TAB = TypeVar("_TAB", bound="Table")
+
+
+_CreateDropBind = Union["Engine", "Connection", "MockConnection"]
+
+_ConstraintNameArgument = Optional[Union[str, _NoneName]]
+
+_ServerDefaultArgument = Union[
+    "FetchedValue", str, TextClause, ColumnElement[Any]
+]
+
+_ServerOnUpdateArgument = _ServerDefaultArgument
+
+
+class SchemaConst(Enum):
+    RETAIN_SCHEMA = 1
+    """Symbol indicating that a :class:`_schema.Table`, :class:`.Sequence`
+    or in some cases a :class:`_schema.ForeignKey` object, in situations
+    where the object is being copied for a :meth:`.Table.to_metadata`
+    operation, should retain the schema name that it already has.
+
+    """
+
+    BLANK_SCHEMA = 2
+    """Symbol indicating that a :class:`_schema.Table` or :class:`.Sequence`
+    should have 'None' for its schema, even if the parent
+    :class:`_schema.MetaData` has specified a schema.
+
+    .. seealso::
+
+        :paramref:`_schema.MetaData.schema`
+
+        :paramref:`_schema.Table.schema`
+
+        :paramref:`.Sequence.schema`
+
+    """
+
+    NULL_UNSPECIFIED = 3
+    """Symbol indicating the "nullable" keyword was not passed to a Column.
+
+    This is used to distinguish between the use case of passing
+    ``nullable=None`` to a :class:`.Column`, which has special meaning
+    on some backends such as SQL Server.
+
+    """
+
+
+RETAIN_SCHEMA: Final[Literal[SchemaConst.RETAIN_SCHEMA]] = (
+    SchemaConst.RETAIN_SCHEMA
+)
+BLANK_SCHEMA: Final[Literal[SchemaConst.BLANK_SCHEMA]] = (
+    SchemaConst.BLANK_SCHEMA
+)
+NULL_UNSPECIFIED: Final[Literal[SchemaConst.NULL_UNSPECIFIED]] = (
+    SchemaConst.NULL_UNSPECIFIED
+)
+
+
+def _get_table_key(name: str, schema: Optional[str]) -> str:
+    if schema is None:
+        return name
+    else:
+        return schema + "." + name
+
+
+# this should really be in sql/util.py but we'd have to
+# break an import cycle
+def _copy_expression(
+    expression: ColumnElement[Any],
+    source_table: Optional[Table],
+    target_table: Optional[Table],
+) -> ColumnElement[Any]:
+    if source_table is None or target_table is None:
+        return expression
+
+    fixed_source_table = source_table
+    fixed_target_table = target_table
+
+    def replace(
+        element: ExternallyTraversible, **kw: Any
+    ) -> Optional[ExternallyTraversible]:
+        if (
+            isinstance(element, Column)
+            and element.table is fixed_source_table
+            and element.key in fixed_source_table.c
+        ):
+            return fixed_target_table.c[element.key]
+        else:
+            return None
+
+    return cast(
+        ColumnElement[Any],
+        visitors.replacement_traverse(expression, {}, replace),
+    )
+
+
+@inspection._self_inspects
+class SchemaItem(SchemaEventTarget, visitors.Visitable):
+    """Base class for items that define a database schema."""
+
+    __visit_name__ = "schema_item"
+
+    create_drop_stringify_dialect = "default"
+
+    def _init_items(self, *args: SchemaItem, **kw: Any) -> None:
+        """Initialize the list of child items for this SchemaItem."""
+        for item in args:
+            if item is not None:
+                try:
+                    spwd = item._set_parent_with_dispatch
+                except AttributeError as err:
+                    raise exc.ArgumentError(
+                        "'SchemaItem' object, such as a 'Column' or a "
+                        f"'Constraint' expected, got {item!r}"
+                    ) from err
+                else:
+                    spwd(self, **kw)
+
+    def __repr__(self) -> str:
+        return util.generic_repr(self, omit_kwarg=["info"])
+
+    @util.memoized_property
+    def info(self) -> _InfoType:
+        """Info dictionary associated with the object, allowing user-defined
+        data to be associated with this :class:`.SchemaItem`.
+
+        The dictionary is automatically generated when first accessed.
+        It can also be specified in the constructor of some objects,
+        such as :class:`_schema.Table` and :class:`_schema.Column`.
+
+        """
+        return {}
+
+    def _schema_item_copy(self, schema_item: _SI) -> _SI:
+        if "info" in self.__dict__:
+            schema_item.info = self.info.copy()
+        schema_item.dispatch._update(self.dispatch)
+        return schema_item
+
+    _use_schema_map = True
+
+
+class HasConditionalDDL:
+    """define a class that includes the :meth:`.HasConditionalDDL.ddl_if`
+    method, allowing for conditional rendering of DDL.
+
+    Currently applies to constraints and indexes.
+
+    .. versionadded:: 2.0
+
+
+    """
+
+    _ddl_if: Optional[ddl.DDLIf] = None
+
+    def ddl_if(
+        self,
+        dialect: Optional[str] = None,
+        callable_: Optional[ddl.DDLIfCallable] = None,
+        state: Optional[Any] = None,
+    ) -> Self:
+        r"""apply a conditional DDL rule to this schema item.
+
+        These rules work in a similar manner to the
+        :meth:`.ExecutableDDLElement.execute_if` callable, with the added
+        feature that the criteria may be checked within the DDL compilation
+        phase for a construct such as :class:`.CreateTable`.
+        :meth:`.HasConditionalDDL.ddl_if` currently applies towards the
+        :class:`.Index` construct as well as all :class:`.Constraint`
+        constructs.
+
+        :param dialect: string name of a dialect, or a tuple of string names
+         to indicate multiple dialect types.
+
+        :param callable\_: a callable that is constructed using the same form
+         as that described in
+         :paramref:`.ExecutableDDLElement.execute_if.callable_`.
+
+        :param state: any arbitrary object that will be passed to the
+         callable, if present.
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :ref:`schema_ddl_ddl_if` - background and usage examples
+
+
+        """
+        self._ddl_if = ddl.DDLIf(dialect, callable_, state)
+        return self
+
+
+class HasSchemaAttr(SchemaItem):
+    """schema item that includes a top-level schema name"""
+
+    schema: Optional[str]
+
+
+class Table(
+    DialectKWArgs, HasSchemaAttr, TableClause, inspection.Inspectable["Table"]
+):
+    r"""Represent a table in a database.
+
+    e.g.::
+
+        mytable = Table(
+            "mytable",
+            metadata,
+            Column("mytable_id", Integer, primary_key=True),
+            Column("value", String(50)),
+        )
+
+    The :class:`_schema.Table`
+    object constructs a unique instance of itself based
+    on its name and optional schema name within the given
+    :class:`_schema.MetaData` object. Calling the :class:`_schema.Table`
+    constructor with the same name and same :class:`_schema.MetaData` argument
+    a second time will return the *same* :class:`_schema.Table`
+    object - in this way
+    the :class:`_schema.Table` constructor acts as a registry function.
+
+    .. seealso::
+
+        :ref:`metadata_describing` - Introduction to database metadata
+
+    """
+
+    __visit_name__ = "table"
+
+    if TYPE_CHECKING:
+
+        @util.ro_non_memoized_property
+        def primary_key(self) -> PrimaryKeyConstraint: ...
+
+        @util.ro_non_memoized_property
+        def foreign_keys(self) -> Set[ForeignKey]: ...
+
+    _columns: DedupeColumnCollection[Column[Any]]
+
+    _sentinel_column: Optional[Column[Any]]
+
+    constraints: Set[Constraint]
+    """A collection of all :class:`_schema.Constraint` objects associated with
+      this :class:`_schema.Table`.
+
+      Includes :class:`_schema.PrimaryKeyConstraint`,
+      :class:`_schema.ForeignKeyConstraint`, :class:`_schema.UniqueConstraint`,
+      :class:`_schema.CheckConstraint`.  A separate collection
+      :attr:`_schema.Table.foreign_key_constraints` refers to the collection
+      of all :class:`_schema.ForeignKeyConstraint` objects, and the
+      :attr:`_schema.Table.primary_key` attribute refers to the single
+      :class:`_schema.PrimaryKeyConstraint` associated with the
+      :class:`_schema.Table`.
+
+      .. seealso::
+
+            :attr:`_schema.Table.constraints`
+
+            :attr:`_schema.Table.primary_key`
+
+            :attr:`_schema.Table.foreign_key_constraints`
+
+            :attr:`_schema.Table.indexes`
+
+            :class:`_reflection.Inspector`
+
+
+    """
+
+    indexes: Set[Index]
+    """A collection of all :class:`_schema.Index` objects associated with this
+      :class:`_schema.Table`.
+
+      .. seealso::
+
+            :meth:`_reflection.Inspector.get_indexes`
+
+    """
+
+    if TYPE_CHECKING:
+
+        @util.ro_non_memoized_property
+        def columns(self) -> ReadOnlyColumnCollection[str, Column[Any]]: ...
+
+        @util.ro_non_memoized_property
+        def exported_columns(
+            self,
+        ) -> ReadOnlyColumnCollection[str, Column[Any]]: ...
+
+        @util.ro_non_memoized_property
+        def c(self) -> ReadOnlyColumnCollection[str, Column[Any]]: ...
+
+    def _gen_cache_key(
+        self, anon_map: anon_map, bindparams: List[BindParameter[Any]]
+    ) -> Tuple[Any, ...]:
+        if self._annotations:
+            return (self,) + self._annotations_cache_key
+        else:
+            return (self,)
+
+    if not typing.TYPE_CHECKING:
+        # typing tools seem to be inconsistent in how they handle
+        # __new__, so suggest this pattern for classes that use
+        # __new__.  apply typing to the __init__ method normally
+        @util.deprecated_params(
+            mustexist=(
+                "1.4",
+                "Deprecated alias of :paramref:`_schema.Table.must_exist`",
+            ),
+        )
+        def __new__(cls, *args: Any, **kw: Any) -> Any:
+            return cls._new(*args, **kw)
+
+    @classmethod
+    def _new(cls, *args: Any, **kw: Any) -> Any:
+        if not args and not kw:
+            # python3k pickle seems to call this
+            return object.__new__(cls)
+
+        try:
+            name, metadata, args = args[0], args[1], args[2:]
+        except IndexError:
+            raise TypeError(
+                "Table() takes at least two positional-only "
+                "arguments 'name' and 'metadata'"
+            )
+
+        schema = kw.get("schema", None)
+        if schema is None:
+            schema = metadata.schema
+        elif schema is BLANK_SCHEMA:
+            schema = None
+        keep_existing = kw.get("keep_existing", False)
+        extend_existing = kw.get("extend_existing", False)
+
+        if keep_existing and extend_existing:
+            msg = "keep_existing and extend_existing are mutually exclusive."
+            raise exc.ArgumentError(msg)
+
+        must_exist = kw.pop("must_exist", kw.pop("mustexist", False))
+        key = _get_table_key(name, schema)
+        if key in metadata.tables:
+            if not keep_existing and not extend_existing and bool(args):
+                raise exc.InvalidRequestError(
+                    f"Table '{key}' is already defined for this MetaData "
+                    "instance.  Specify 'extend_existing=True' "
+                    "to redefine "
+                    "options and columns on an "
+                    "existing Table object."
+                )
+            table = metadata.tables[key]
+            if extend_existing:
+                table._init_existing(*args, **kw)
+            return table
+        else:
+            if must_exist:
+                raise exc.InvalidRequestError(f"Table '{key}' not defined")
+            table = object.__new__(cls)
+            table.dispatch.before_parent_attach(table, metadata)
+            metadata._add_table(name, schema, table)
+            try:
+                table.__init__(name, metadata, *args, _no_init=False, **kw)
+                table.dispatch.after_parent_attach(table, metadata)
+                return table
+            except Exception:
+                with util.safe_reraise():
+                    metadata._remove_table(name, schema)
+
+    def __init__(
+        self,
+        name: str,
+        metadata: MetaData,
+        *args: SchemaItem,
+        schema: Optional[Union[str, Literal[SchemaConst.BLANK_SCHEMA]]] = None,
+        quote: Optional[bool] = None,
+        quote_schema: Optional[bool] = None,
+        autoload_with: Optional[Union[Engine, Connection]] = None,
+        autoload_replace: bool = True,
+        keep_existing: bool = False,
+        extend_existing: bool = False,
+        resolve_fks: bool = True,
+        include_columns: Optional[Collection[str]] = None,
+        implicit_returning: bool = True,
+        comment: Optional[str] = None,
+        info: Optional[Dict[Any, Any]] = None,
+        listeners: Optional[
+            _typing_Sequence[Tuple[str, Callable[..., Any]]]
+        ] = None,
+        prefixes: Optional[_typing_Sequence[str]] = None,
+        # used internally in the metadata.reflect() process
+        _extend_on: Optional[Set[Table]] = None,
+        # used by __new__ to bypass __init__
+        _no_init: bool = True,
+        # dialect-specific keyword args
+        **kw: Any,
+    ) -> None:
+        r"""Constructor for :class:`_schema.Table`.
+
+
+        :param name: The name of this table as represented in the database.
+
+            The table name, along with the value of the ``schema`` parameter,
+            forms a key which uniquely identifies this :class:`_schema.Table`
+            within
+            the owning :class:`_schema.MetaData` collection.
+            Additional calls to :class:`_schema.Table` with the same name,
+            metadata,
+            and schema name will return the same :class:`_schema.Table` object.
+
+            Names which contain no upper case characters
+            will be treated as case insensitive names, and will not be quoted
+            unless they are a reserved word or contain special characters.
+            A name with any number of upper case characters is considered
+            to be case sensitive, and will be sent as quoted.
+
+            To enable unconditional quoting for the table name, specify the flag
+            ``quote=True`` to the constructor, or use the :class:`.quoted_name`
+            construct to specify the name.
+
+        :param metadata: a :class:`_schema.MetaData`
+            object which will contain this
+            table.  The metadata is used as a point of association of this table
+            with other tables which are referenced via foreign key.  It also
+            may be used to associate this table with a particular
+            :class:`.Connection` or :class:`.Engine`.
+
+        :param \*args: Additional positional arguments are used primarily
+            to add the list of :class:`_schema.Column`
+            objects contained within this
+            table. Similar to the style of a CREATE TABLE statement, other
+            :class:`.SchemaItem` constructs may be added here, including
+            :class:`.PrimaryKeyConstraint`, and
+            :class:`_schema.ForeignKeyConstraint`.
+
+        :param autoload_replace: Defaults to ``True``; when using
+            :paramref:`_schema.Table.autoload_with`
+            in conjunction with :paramref:`_schema.Table.extend_existing`,
+            indicates
+            that :class:`_schema.Column` objects present in the already-existing
+            :class:`_schema.Table`
+            object should be replaced with columns of the same
+            name retrieved from the autoload process.   When ``False``, columns
+            already present under existing names will be omitted from the
+            reflection process.
+
+            Note that this setting does not impact :class:`_schema.Column` objects
+            specified programmatically within the call to :class:`_schema.Table`
+            that
+            also is autoloading; those :class:`_schema.Column` objects will always
+            replace existing columns of the same name when
+            :paramref:`_schema.Table.extend_existing` is ``True``.
+
+            .. seealso::
+
+                :paramref:`_schema.Table.autoload_with`
+
+                :paramref:`_schema.Table.extend_existing`
+
+        :param autoload_with: An :class:`_engine.Engine` or
+            :class:`_engine.Connection` object,
+            or a :class:`_reflection.Inspector` object as returned by
+            :func:`_sa.inspect`
+            against one, with which this :class:`_schema.Table`
+            object will be reflected.
+            When set to a non-None value, the autoload process will take place
+            for this table against the given engine or connection.
+
+            .. seealso::
+
+                :ref:`metadata_reflection_toplevel`
+
+                :meth:`_events.DDLEvents.column_reflect`
+
+                :ref:`metadata_reflection_dbagnostic_types`
+
+        :param extend_existing: When ``True``, indicates that if this
+            :class:`_schema.Table` is already present in the given
+            :class:`_schema.MetaData`,
+            apply further arguments within the constructor to the existing
+            :class:`_schema.Table`.
+
+            If :paramref:`_schema.Table.extend_existing` or
+            :paramref:`_schema.Table.keep_existing` are not set,
+            and the given name
+            of the new :class:`_schema.Table` refers to a :class:`_schema.Table`
+            that is
+            already present in the target :class:`_schema.MetaData` collection,
+            and
+            this :class:`_schema.Table`
+            specifies additional columns or other constructs
+            or flags that modify the table's state, an
+            error is raised.  The purpose of these two mutually-exclusive flags
+            is to specify what action should be taken when a
+            :class:`_schema.Table`
+            is specified that matches an existing :class:`_schema.Table`,
+            yet specifies
+            additional constructs.
+
+            :paramref:`_schema.Table.extend_existing`
+            will also work in conjunction
+            with :paramref:`_schema.Table.autoload_with` to run a new reflection
+            operation against the database, even if a :class:`_schema.Table`
+            of the same name is already present in the target
+            :class:`_schema.MetaData`; newly reflected :class:`_schema.Column`
+            objects
+            and other options will be added into the state of the
+            :class:`_schema.Table`, potentially overwriting existing columns
+            and options of the same name.
+
+            As is always the case with :paramref:`_schema.Table.autoload_with`,
+            :class:`_schema.Column` objects can be specified in the same
+            :class:`_schema.Table`
+            constructor, which will take precedence.  Below, the existing
+            table ``mytable`` will be augmented with :class:`_schema.Column`
+            objects
+            both reflected from the database, as well as the given
+            :class:`_schema.Column`
+            named "y"::
+
+                Table(
+                    "mytable",
+                    metadata,
+                    Column("y", Integer),
+                    extend_existing=True,
+                    autoload_with=engine,
+                )
+
+            .. seealso::
+
+                :paramref:`_schema.Table.autoload_with`
+
+                :paramref:`_schema.Table.autoload_replace`
+
+                :paramref:`_schema.Table.keep_existing`
+
+
+        :param implicit_returning: True by default - indicates that
+            RETURNING can be used, typically by the ORM, in order to fetch
+            server-generated values such as primary key values and
+            server side defaults, on those backends which support RETURNING.
+
+            In modern SQLAlchemy there is generally no reason to alter this
+            setting, except for some backend specific cases
+            (see :ref:`mssql_triggers` in the SQL Server dialect documentation
+            for one such example).
+
+        :param include_columns: A list of strings indicating a subset of
+            columns to be loaded via the ``autoload`` operation; table columns who
+            aren't present in this list will not be represented on the resulting
+            ``Table`` object. Defaults to ``None`` which indicates all columns
+            should be reflected.
+
+        :param resolve_fks: Whether or not to reflect :class:`_schema.Table`
+            objects
+            related to this one via :class:`_schema.ForeignKey` objects, when
+            :paramref:`_schema.Table.autoload_with` is
+            specified.   Defaults to True.  Set to False to disable reflection of
+            related tables as :class:`_schema.ForeignKey`
+            objects are encountered; may be
+            used either to save on SQL calls or to avoid issues with related tables
+            that can't be accessed. Note that if a related table is already present
+            in the :class:`_schema.MetaData` collection, or becomes present later,
+            a
+            :class:`_schema.ForeignKey` object associated with this
+            :class:`_schema.Table` will
+            resolve to that table normally.
+
+            .. versionadded:: 1.3
+
+            .. seealso::
+
+                :paramref:`.MetaData.reflect.resolve_fks`
+
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        :param keep_existing: When ``True``, indicates that if this Table
+            is already present in the given :class:`_schema.MetaData`, ignore
+            further arguments within the constructor to the existing
+            :class:`_schema.Table`, and return the :class:`_schema.Table`
+            object as
+            originally created. This is to allow a function that wishes
+            to define a new :class:`_schema.Table` on first call, but on
+            subsequent calls will return the same :class:`_schema.Table`,
+            without any of the declarations (particularly constraints)
+            being applied a second time.
+
+            If :paramref:`_schema.Table.extend_existing` or
+            :paramref:`_schema.Table.keep_existing` are not set,
+            and the given name
+            of the new :class:`_schema.Table` refers to a :class:`_schema.Table`
+            that is
+            already present in the target :class:`_schema.MetaData` collection,
+            and
+            this :class:`_schema.Table`
+            specifies additional columns or other constructs
+            or flags that modify the table's state, an
+            error is raised.  The purpose of these two mutually-exclusive flags
+            is to specify what action should be taken when a
+            :class:`_schema.Table`
+            is specified that matches an existing :class:`_schema.Table`,
+            yet specifies
+            additional constructs.
+
+            .. seealso::
+
+                :paramref:`_schema.Table.extend_existing`
+
+        :param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
+            which will be passed to :func:`.event.listen` upon construction.
+            This alternate hook to :func:`.event.listen` allows the establishment
+            of a listener function specific to this :class:`_schema.Table` before
+            the "autoload" process begins.  Historically this has been intended
+            for use with the :meth:`.DDLEvents.column_reflect` event, however
+            note that this event hook may now be associated with the
+            :class:`_schema.MetaData` object directly::
+
+                def listen_for_reflect(table, column_info):
+                    "handle the column reflection event"
+                    # ...
+
+
+                t = Table(
+                    "sometable",
+                    autoload_with=engine,
+                    listeners=[("column_reflect", listen_for_reflect)],
+                )
+
+            .. seealso::
+
+                :meth:`_events.DDLEvents.column_reflect`
+
+        :param must_exist: When ``True``, indicates that this Table must already
+            be present in the given :class:`_schema.MetaData` collection, else
+            an exception is raised.
+
+        :param prefixes:
+            A list of strings to insert after CREATE in the CREATE TABLE
+            statement.  They will be separated by spaces.
+
+        :param quote: Force quoting of this table's name on or off, corresponding
+            to ``True`` or ``False``.  When left at its default of ``None``,
+            the column identifier will be quoted according to whether the name is
+            case sensitive (identifiers with at least one upper case character are
+            treated as case sensitive), or if it's a reserved word.  This flag
+            is only needed to force quoting of a reserved word which is not known
+            by the SQLAlchemy dialect.
+
+            .. note:: setting this flag to ``False`` will not provide
+              case-insensitive behavior for table reflection; table reflection
+              will always search for a mixed-case name in a case sensitive
+              fashion.  Case insensitive names are specified in SQLAlchemy only
+              by stating the name with all lower case characters.
+
+        :param quote_schema: same as 'quote' but applies to the schema identifier.
+
+        :param schema: The schema name for this table, which is required if
+            the table resides in a schema other than the default selected schema
+            for the engine's database connection.  Defaults to ``None``.
+
+            If the owning :class:`_schema.MetaData` of this :class:`_schema.Table`
+            specifies its
+            own :paramref:`_schema.MetaData.schema` parameter,
+            then that schema name will
+            be applied to this :class:`_schema.Table`
+            if the schema parameter here is set
+            to ``None``.  To set a blank schema name on a :class:`_schema.Table`
+            that
+            would otherwise use the schema set on the owning
+            :class:`_schema.MetaData`,
+            specify the special symbol :attr:`.BLANK_SCHEMA`.
+
+            The quoting rules for the schema name are the same as those for the
+            ``name`` parameter, in that quoting is applied for reserved words or
+            case-sensitive names; to enable unconditional quoting for the schema
+            name, specify the flag ``quote_schema=True`` to the constructor, or use
+            the :class:`.quoted_name` construct to specify the name.
+
+        :param comment: Optional string that will render an SQL comment on table
+            creation.
+
+            .. versionadded:: 1.2 Added the :paramref:`_schema.Table.comment`
+                parameter
+                to :class:`_schema.Table`.
+
+        :param \**kw: Additional keyword arguments not mentioned above are
+            dialect specific, and passed in the form ``<dialectname>_<argname>``.
+            See the documentation regarding an individual dialect at
+            :ref:`dialect_toplevel` for detail on documented arguments.
+
+        """  # noqa: E501
+        if _no_init:
+            # don't run __init__ from __new__ by default;
+            # __new__ has a specific place that __init__ is called
+            return
+
+        super().__init__(quoted_name(name, quote))
+        self.metadata = metadata
+
+        if schema is None:
+            self.schema = metadata.schema
+        elif schema is BLANK_SCHEMA:
+            self.schema = None
+        else:
+            quote_schema = quote_schema
+            assert isinstance(schema, str)
+            self.schema = quoted_name(schema, quote_schema)
+
+        self._sentinel_column = None
+
+        self.indexes = set()
+        self.constraints = set()
+        PrimaryKeyConstraint(
+            _implicit_generated=True
+        )._set_parent_with_dispatch(self)
+        self.foreign_keys = set()  # type: ignore
+        self._extra_dependencies: Set[Table] = set()
+        if self.schema is not None:
+            self.fullname = "%s.%s" % (self.schema, self.name)
+        else:
+            self.fullname = self.name
+
+        self.implicit_returning = implicit_returning
+        _reflect_info = kw.pop("_reflect_info", None)
+
+        self.comment = comment
+
+        if info is not None:
+            self.info = info
+
+        if listeners is not None:
+            for evt, fn in listeners:
+                event.listen(self, evt, fn)
+
+        self._prefixes = prefixes if prefixes else []
+
+        self._extra_kwargs(**kw)
+
+        # load column definitions from the database if 'autoload' is defined
+        # we do it after the table is in the singleton dictionary to support
+        # circular foreign keys
+        if autoload_with is not None:
+            self._autoload(
+                metadata,
+                autoload_with,
+                include_columns,
+                _extend_on=_extend_on,
+                _reflect_info=_reflect_info,
+                resolve_fks=resolve_fks,
+            )
+
+        # initialize all the column, etc. objects.  done after reflection to
+        # allow user-overrides
+
+        self._init_items(
+            *args,
+            allow_replacements=extend_existing
+            or keep_existing
+            or autoload_with,
+            all_names={},
+        )
+
+    def _autoload(
+        self,
+        metadata: MetaData,
+        autoload_with: Union[Engine, Connection],
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str] = (),
+        resolve_fks: bool = True,
+        _extend_on: Optional[Set[Table]] = None,
+        _reflect_info: _ReflectionInfo | None = None,
+    ) -> None:
+        insp = inspection.inspect(autoload_with)
+        with insp._inspection_context() as conn_insp:
+            conn_insp.reflect_table(
+                self,
+                include_columns,
+                exclude_columns,
+                resolve_fks,
+                _extend_on=_extend_on,
+                _reflect_info=_reflect_info,
+            )
+
+    @property
+    def _sorted_constraints(self) -> List[Constraint]:
+        """Return the set of constraints as a list, sorted by creation
+        order.
+
+        """
+
+        return sorted(self.constraints, key=lambda c: c._creation_order)
+
+    @property
+    def foreign_key_constraints(self) -> Set[ForeignKeyConstraint]:
+        """:class:`_schema.ForeignKeyConstraint` objects referred to by this
+        :class:`_schema.Table`.
+
+        This list is produced from the collection of
+        :class:`_schema.ForeignKey`
+        objects currently associated.
+
+
+        .. seealso::
+
+            :attr:`_schema.Table.constraints`
+
+            :attr:`_schema.Table.foreign_keys`
+
+            :attr:`_schema.Table.indexes`
+
+        """
+        return {
+            fkc.constraint
+            for fkc in self.foreign_keys
+            if fkc.constraint is not None
+        }
+
+    def _init_existing(self, *args: Any, **kwargs: Any) -> None:
+        autoload_with = kwargs.pop("autoload_with", None)
+        autoload = kwargs.pop("autoload", autoload_with is not None)
+        autoload_replace = kwargs.pop("autoload_replace", True)
+        schema = kwargs.pop("schema", None)
+        _extend_on = kwargs.pop("_extend_on", None)
+        _reflect_info = kwargs.pop("_reflect_info", None)
+
+        # these arguments are only used with _init()
+        extend_existing = kwargs.pop("extend_existing", False)
+        keep_existing = kwargs.pop("keep_existing", False)
+
+        assert extend_existing
+        assert not keep_existing
+
+        if schema and schema != self.schema:
+            raise exc.ArgumentError(
+                f"Can't change schema of existing table "
+                f"from '{self.schema}' to '{schema}'",
+            )
+
+        include_columns = kwargs.pop("include_columns", None)
+        if include_columns is not None:
+            for c in self.c:
+                if c.name not in include_columns:
+                    self._columns.remove(c)
+
+        resolve_fks = kwargs.pop("resolve_fks", True)
+
+        for key in ("quote", "quote_schema"):
+            if key in kwargs:
+                raise exc.ArgumentError(
+                    "Can't redefine 'quote' or 'quote_schema' arguments"
+                )
+
+        # update `self` with these kwargs, if provided
+        self.comment = kwargs.pop("comment", self.comment)
+        self.implicit_returning = kwargs.pop(
+            "implicit_returning", self.implicit_returning
+        )
+        self.info = kwargs.pop("info", self.info)
+
+        exclude_columns: _typing_Sequence[str]
+
+        if autoload:
+            if not autoload_replace:
+                # don't replace columns already present.
+                # we'd like to do this for constraints also however we don't
+                # have simple de-duping for unnamed constraints.
+                exclude_columns = [c.name for c in self.c]
+            else:
+                exclude_columns = ()
+            self._autoload(
+                self.metadata,
+                autoload_with,
+                include_columns,
+                exclude_columns,
+                resolve_fks,
+                _extend_on=_extend_on,
+                _reflect_info=_reflect_info,
+            )
+
+        all_names = {c.name: c for c in self.c}
+        self._extra_kwargs(**kwargs)
+        self._init_items(*args, allow_replacements=True, all_names=all_names)
+
+    def _extra_kwargs(self, **kwargs: Any) -> None:
+        self._validate_dialect_kwargs(kwargs)
+
+    def _init_collections(self) -> None:
+        pass
+
+    def _reset_exported(self) -> None:
+        pass
+
+    @util.ro_non_memoized_property
+    def _autoincrement_column(self) -> Optional[Column[int]]:
+        return self.primary_key._autoincrement_column
+
+    @util.ro_memoized_property
+    def _sentinel_column_characteristics(
+        self,
+    ) -> _SentinelColumnCharacterization:
+        """determine a candidate column (or columns, in case of a client
+        generated composite primary key) which can be used as an
+        "insert sentinel" for an INSERT statement.
+
+        The returned structure, :class:`_SentinelColumnCharacterization`,
+        includes all the details needed by :class:`.Dialect` and
+        :class:`.SQLCompiler` to determine if these column(s) can be used
+        as an INSERT..RETURNING sentinel for a particular database
+        dialect.
+
+        .. versionadded:: 2.0.10
+
+        """
+
+        sentinel_is_explicit = False
+        sentinel_is_autoinc = False
+        the_sentinel: Optional[_typing_Sequence[Column[Any]]] = None
+
+        # see if a column was explicitly marked "insert_sentinel=True".
+        explicit_sentinel_col = self._sentinel_column
+
+        if explicit_sentinel_col is not None:
+            the_sentinel = (explicit_sentinel_col,)
+            sentinel_is_explicit = True
+
+        autoinc_col = self._autoincrement_column
+        if sentinel_is_explicit and explicit_sentinel_col is autoinc_col:
+            assert autoinc_col is not None
+            sentinel_is_autoinc = True
+        elif explicit_sentinel_col is None and autoinc_col is not None:
+            the_sentinel = (autoinc_col,)
+            sentinel_is_autoinc = True
+
+        default_characterization = _SentinelDefaultCharacterization.UNKNOWN
+
+        if the_sentinel:
+            the_sentinel_zero = the_sentinel[0]
+            if the_sentinel_zero.identity:
+                if the_sentinel_zero.identity._increment_is_negative:
+                    if sentinel_is_explicit:
+                        raise exc.InvalidRequestError(
+                            "Can't use IDENTITY default with negative "
+                            "increment as an explicit sentinel column"
+                        )
+                    else:
+                        if sentinel_is_autoinc:
+                            autoinc_col = None
+                            sentinel_is_autoinc = False
+                        the_sentinel = None
+                else:
+                    default_characterization = (
+                        _SentinelDefaultCharacterization.IDENTITY
+                    )
+            elif (
+                the_sentinel_zero.default is None
+                and the_sentinel_zero.server_default is None
+            ):
+                if the_sentinel_zero.nullable:
+                    raise exc.InvalidRequestError(
+                        f"Column {the_sentinel_zero} has been marked as a "
+                        "sentinel "
+                        "column with no default generation function; it "
+                        "at least needs to be marked nullable=False assuming "
+                        "user-populated sentinel values will be used."
+                    )
+                default_characterization = (
+                    _SentinelDefaultCharacterization.NONE
+                )
+            elif the_sentinel_zero.default is not None:
+                if the_sentinel_zero.default.is_sentinel:
+                    default_characterization = (
+                        _SentinelDefaultCharacterization.SENTINEL_DEFAULT
+                    )
+                elif default_is_sequence(the_sentinel_zero.default):
+                    if the_sentinel_zero.default._increment_is_negative:
+                        if sentinel_is_explicit:
+                            raise exc.InvalidRequestError(
+                                "Can't use SEQUENCE default with negative "
+                                "increment as an explicit sentinel column"
+                            )
+                        else:
+                            if sentinel_is_autoinc:
+                                autoinc_col = None
+                                sentinel_is_autoinc = False
+                            the_sentinel = None
+
+                    default_characterization = (
+                        _SentinelDefaultCharacterization.SEQUENCE
+                    )
+                elif the_sentinel_zero.default.is_callable:
+                    default_characterization = (
+                        _SentinelDefaultCharacterization.CLIENTSIDE
+                    )
+            elif the_sentinel_zero.server_default is not None:
+                if sentinel_is_explicit:
+                    raise exc.InvalidRequestError(
+                        f"Column {the_sentinel[0]} can't be a sentinel column "
+                        "because it uses an explicit server side default "
+                        "that's not the Identity() default."
+                    )
+
+                default_characterization = (
+                    _SentinelDefaultCharacterization.SERVERSIDE
+                )
+
+        if the_sentinel is None and self.primary_key:
+            assert autoinc_col is None
+
+            # determine for non-autoincrement pk if all elements are
+            # client side
+            for _pkc in self.primary_key:
+                if _pkc.server_default is not None or (
+                    _pkc.default and not _pkc.default.is_callable
+                ):
+                    break
+            else:
+                the_sentinel = tuple(self.primary_key)
+                default_characterization = (
+                    _SentinelDefaultCharacterization.CLIENTSIDE
+                )
+
+        return _SentinelColumnCharacterization(
+            the_sentinel,
+            sentinel_is_explicit,
+            sentinel_is_autoinc,
+            default_characterization,
+        )
+
+    @property
+    def autoincrement_column(self) -> Optional[Column[int]]:
+        """Returns the :class:`.Column` object which currently represents
+        the "auto increment" column, if any, else returns None.
+
+        This is based on the rules for :class:`.Column` as defined by the
+        :paramref:`.Column.autoincrement` parameter, which generally means the
+        column within a single integer column primary key constraint that is
+        not constrained by a foreign key.   If the table does not have such
+        a primary key constraint, then there's no "autoincrement" column.
+        A :class:`.Table` may have only one column defined as the
+        "autoincrement" column.
+
+        .. versionadded:: 2.0.4
+
+        .. seealso::
+
+            :paramref:`.Column.autoincrement`
+
+        """
+        return self._autoincrement_column
+
+    @property
+    def key(self) -> str:
+        """Return the 'key' for this :class:`_schema.Table`.
+
+        This value is used as the dictionary key within the
+        :attr:`_schema.MetaData.tables` collection.   It is typically the same
+        as that of :attr:`_schema.Table.name` for a table with no
+        :attr:`_schema.Table.schema`
+        set; otherwise it is typically of the form
+        ``schemaname.tablename``.
+
+        """
+        return _get_table_key(self.name, self.schema)
+
+    def __repr__(self) -> str:
+        return "Table(%s)" % ", ".join(
+            [repr(self.name)]
+            + [repr(self.metadata)]
+            + [repr(x) for x in self.columns]
+            + ["%s=%s" % (k, repr(getattr(self, k))) for k in ["schema"]]
+        )
+
+    def __str__(self) -> str:
+        return _get_table_key(self.description, self.schema)
+
+    def add_is_dependent_on(self, table: Table) -> None:
+        """Add a 'dependency' for this Table.
+
+        This is another Table object which must be created
+        first before this one can, or dropped after this one.
+
+        Usually, dependencies between tables are determined via
+        ForeignKey objects.   However, for other situations that
+        create dependencies outside of foreign keys (rules, inheriting),
+        this method can manually establish such a link.
+
+        """
+        self._extra_dependencies.add(table)
+
+    def append_column(
+        self, column: ColumnClause[Any], replace_existing: bool = False
+    ) -> None:
+        """Append a :class:`_schema.Column` to this :class:`_schema.Table`.
+
+        The "key" of the newly added :class:`_schema.Column`, i.e. the
+        value of its ``.key`` attribute, will then be available
+        in the ``.c`` collection of this :class:`_schema.Table`, and the
+        column definition will be included in any CREATE TABLE, SELECT,
+        UPDATE, etc. statements generated from this :class:`_schema.Table`
+        construct.
+
+        Note that this does **not** change the definition of the table
+        as it exists within any underlying database, assuming that
+        table has already been created in the database.   Relational
+        databases support the addition of columns to existing tables
+        using the SQL ALTER command, which would need to be
+        emitted for an already-existing table that doesn't contain
+        the newly added column.
+
+        :param replace_existing: When ``True``, allows replacing existing
+            columns. When ``False``, the default, an warning will be raised
+            if a column with the same ``.key`` already exists. A future
+            version of sqlalchemy will instead rise a warning.
+
+            .. versionadded:: 1.4.0
+        """
+
+        try:
+            column._set_parent_with_dispatch(
+                self,
+                allow_replacements=replace_existing,
+                all_names={c.name: c for c in self.c},
+            )
+        except exc.DuplicateColumnError as de:
+            raise exc.DuplicateColumnError(
+                f"{de.args[0]} Specify replace_existing=True to "
+                "Table.append_column() to replace an "
+                "existing column."
+            ) from de
+
+    def append_constraint(self, constraint: Union[Index, Constraint]) -> None:
+        """Append a :class:`_schema.Constraint` to this
+        :class:`_schema.Table`.
+
+        This has the effect of the constraint being included in any
+        future CREATE TABLE statement, assuming specific DDL creation
+        events have not been associated with the given
+        :class:`_schema.Constraint` object.
+
+        Note that this does **not** produce the constraint within the
+        relational database automatically, for a table that already exists
+        in the database.   To add a constraint to an
+        existing relational database table, the SQL ALTER command must
+        be used.  SQLAlchemy also provides the
+        :class:`.AddConstraint` construct which can produce this SQL when
+        invoked as an executable clause.
+
+        """
+
+        constraint._set_parent_with_dispatch(self)
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        metadata = parent
+        assert isinstance(metadata, MetaData)
+        metadata._add_table(self.name, self.schema, self)
+        self.metadata = metadata
+
+    def create(self, bind: _CreateDropBind, checkfirst: bool = False) -> None:
+        """Issue a ``CREATE`` statement for this
+        :class:`_schema.Table`, using the given
+        :class:`.Connection` or :class:`.Engine`
+        for connectivity.
+
+        .. seealso::
+
+            :meth:`_schema.MetaData.create_all`.
+
+        """
+
+        bind._run_ddl_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
+
+    def drop(self, bind: _CreateDropBind, checkfirst: bool = False) -> None:
+        """Issue a ``DROP`` statement for this
+        :class:`_schema.Table`, using the given
+        :class:`.Connection` or :class:`.Engine` for connectivity.
+
+        .. seealso::
+
+            :meth:`_schema.MetaData.drop_all`.
+
+        """
+        bind._run_ddl_visitor(ddl.SchemaDropper, self, checkfirst=checkfirst)
+
+    @util.deprecated(
+        "1.4",
+        ":meth:`_schema.Table.tometadata` is renamed to "
+        ":meth:`_schema.Table.to_metadata`",
+    )
+    def tometadata(
+        self,
+        metadata: MetaData,
+        schema: Union[str, Literal[SchemaConst.RETAIN_SCHEMA]] = RETAIN_SCHEMA,
+        referred_schema_fn: Optional[
+            Callable[
+                [Table, Optional[str], ForeignKeyConstraint, Optional[str]],
+                Optional[str],
+            ]
+        ] = None,
+        name: Optional[str] = None,
+    ) -> Table:
+        """Return a copy of this :class:`_schema.Table`
+        associated with a different
+        :class:`_schema.MetaData`.
+
+        See :meth:`_schema.Table.to_metadata` for a full description.
+
+        """
+        return self.to_metadata(
+            metadata,
+            schema=schema,
+            referred_schema_fn=referred_schema_fn,
+            name=name,
+        )
+
+    def to_metadata(
+        self,
+        metadata: MetaData,
+        schema: Union[str, Literal[SchemaConst.RETAIN_SCHEMA]] = RETAIN_SCHEMA,
+        referred_schema_fn: Optional[
+            Callable[
+                [Table, Optional[str], ForeignKeyConstraint, Optional[str]],
+                Optional[str],
+            ]
+        ] = None,
+        name: Optional[str] = None,
+    ) -> Table:
+        """Return a copy of this :class:`_schema.Table` associated with a
+        different :class:`_schema.MetaData`.
+
+        E.g.::
+
+            m1 = MetaData()
+
+            user = Table("user", m1, Column("id", Integer, primary_key=True))
+
+            m2 = MetaData()
+            user_copy = user.to_metadata(m2)
+
+        .. versionchanged:: 1.4  The :meth:`_schema.Table.to_metadata` function
+           was renamed from :meth:`_schema.Table.tometadata`.
+
+
+        :param metadata: Target :class:`_schema.MetaData` object,
+         into which the
+         new :class:`_schema.Table` object will be created.
+
+        :param schema: optional string name indicating the target schema.
+         Defaults to the special symbol :attr:`.RETAIN_SCHEMA` which indicates
+         that no change to the schema name should be made in the new
+         :class:`_schema.Table`.  If set to a string name, the new
+         :class:`_schema.Table`
+         will have this new name as the ``.schema``.  If set to ``None``, the
+         schema will be set to that of the schema set on the target
+         :class:`_schema.MetaData`, which is typically ``None`` as well,
+         unless
+         set explicitly::
+
+            m2 = MetaData(schema="newschema")
+
+            # user_copy_one will have "newschema" as the schema name
+            user_copy_one = user.to_metadata(m2, schema=None)
+
+            m3 = MetaData()  # schema defaults to None
+
+            # user_copy_two will have None as the schema name
+            user_copy_two = user.to_metadata(m3, schema=None)
+
+        :param referred_schema_fn: optional callable which can be supplied
+         in order to provide for the schema name that should be assigned
+         to the referenced table of a :class:`_schema.ForeignKeyConstraint`.
+         The callable accepts this parent :class:`_schema.Table`, the
+         target schema that we are changing to, the
+         :class:`_schema.ForeignKeyConstraint` object, and the existing
+         "target schema" of that constraint.  The function should return the
+         string schema name that should be applied.    To reset the schema
+         to "none", return the symbol :data:`.BLANK_SCHEMA`.  To effect no
+         change, return ``None`` or :data:`.RETAIN_SCHEMA`.
+
+         .. versionchanged:: 1.4.33  The ``referred_schema_fn`` function
+            may return the :data:`.BLANK_SCHEMA` or :data:`.RETAIN_SCHEMA`
+            symbols.
+
+         E.g.::
+
+                def referred_schema_fn(table, to_schema, constraint, referred_schema):
+                    if referred_schema == "base_tables":
+                        return referred_schema
+                    else:
+                        return to_schema
+
+
+                new_table = table.to_metadata(
+                    m2, schema="alt_schema", referred_schema_fn=referred_schema_fn
+                )
+
+        :param name: optional string name indicating the target table name.
+         If not specified or None, the table name is retained.  This allows
+         a :class:`_schema.Table` to be copied to the same
+         :class:`_schema.MetaData` target
+         with a new name.
+
+        """  # noqa: E501
+        if name is None:
+            name = self.name
+
+        actual_schema: Optional[str]
+
+        if schema is RETAIN_SCHEMA:
+            actual_schema = self.schema
+        elif schema is None:
+            actual_schema = metadata.schema
+        else:
+            actual_schema = schema
+        key = _get_table_key(name, actual_schema)
+        if key in metadata.tables:
+            util.warn(
+                f"Table '{self.description}' already exists within the given "
+                "MetaData - not copying."
+            )
+            return metadata.tables[key]
+
+        args = []
+        for col in self.columns:
+            args.append(col._copy(schema=actual_schema, _to_metadata=metadata))
+        table = Table(
+            name,
+            metadata,
+            schema=actual_schema,
+            comment=self.comment,
+            *args,
+            **self.kwargs,
+        )
+        for const in self.constraints:
+            if isinstance(const, ForeignKeyConstraint):
+                referred_schema = const._referred_schema
+                if referred_schema_fn:
+                    fk_constraint_schema = referred_schema_fn(
+                        self, actual_schema, const, referred_schema
+                    )
+                else:
+                    fk_constraint_schema = (
+                        actual_schema
+                        if referred_schema == self.schema
+                        else None
+                    )
+                table.append_constraint(
+                    const._copy(
+                        schema=fk_constraint_schema, target_table=table
+                    )
+                )
+            elif not const._type_bound:
+                # skip unique constraints that would be generated
+                # by the 'unique' flag on Column
+                if const._column_flag:
+                    continue
+
+                table.append_constraint(
+                    const._copy(schema=actual_schema, target_table=table)
+                )
+        for index in self.indexes:
+            # skip indexes that would be generated
+            # by the 'index' flag on Column
+            if index._column_flag:
+                continue
+            Index(
+                index.name,
+                unique=index.unique,
+                *[
+                    _copy_expression(expr, self, table)
+                    for expr in index._table_bound_expressions
+                ],
+                _table=table,
+                **index.kwargs,
+            )
+        return self._schema_item_copy(table)
+
+
+class Column(DialectKWArgs, SchemaItem, ColumnClause[_T]):
+    """Represents a column in a database table."""
+
+    __visit_name__ = "column"
+
+    inherit_cache = True
+    key: str
+
+    server_default: Optional[FetchedValue]
+
+    def __init__(
+        self,
+        __name_pos: Optional[
+            Union[str, _TypeEngineArgument[_T], SchemaEventTarget]
+        ] = None,
+        __type_pos: Optional[
+            Union[_TypeEngineArgument[_T], SchemaEventTarget]
+        ] = None,
+        *args: SchemaEventTarget,
+        name: Optional[str] = None,
+        type_: Optional[_TypeEngineArgument[_T]] = None,
+        autoincrement: _AutoIncrementType = "auto",
+        default: Optional[Any] = _NoArg.NO_ARG,
+        insert_default: Optional[Any] = _NoArg.NO_ARG,
+        doc: Optional[str] = None,
+        key: Optional[str] = None,
+        index: Optional[bool] = None,
+        unique: Optional[bool] = None,
+        info: Optional[_InfoType] = None,
+        nullable: Optional[
+            Union[bool, Literal[SchemaConst.NULL_UNSPECIFIED]]
+        ] = SchemaConst.NULL_UNSPECIFIED,
+        onupdate: Optional[Any] = None,
+        primary_key: bool = False,
+        server_default: Optional[_ServerDefaultArgument] = None,
+        server_onupdate: Optional[_ServerOnUpdateArgument] = None,
+        quote: Optional[bool] = None,
+        system: bool = False,
+        comment: Optional[str] = None,
+        insert_sentinel: bool = False,
+        _omit_from_statements: bool = False,
+        _proxies: Optional[Any] = None,
+        **dialect_kwargs: Any,
+    ):
+        r"""
+        Construct a new ``Column`` object.
+
+        :param name: The name of this column as represented in the database.
+          This argument may be the first positional argument, or specified
+          via keyword.
+
+          Names which contain no upper case characters
+          will be treated as case insensitive names, and will not be quoted
+          unless they are a reserved word.  Names with any number of upper
+          case characters will be quoted and sent exactly.  Note that this
+          behavior applies even for databases which standardize upper
+          case names as case insensitive such as Oracle Database.
+
+          The name field may be omitted at construction time and applied
+          later, at any time before the Column is associated with a
+          :class:`_schema.Table`.  This is to support convenient
+          usage within the :mod:`~sqlalchemy.ext.declarative` extension.
+
+        :param type\_: The column's type, indicated using an instance which
+          subclasses :class:`~sqlalchemy.types.TypeEngine`.  If no arguments
+          are required for the type, the class of the type can be sent
+          as well, e.g.::
+
+            # use a type with arguments
+            Column("data", String(50))
+
+            # use no arguments
+            Column("level", Integer)
+
+          The ``type`` argument may be the second positional argument
+          or specified by keyword.
+
+          If the ``type`` is ``None`` or is omitted, it will first default to
+          the special type :class:`.NullType`.  If and when this
+          :class:`_schema.Column` is made to refer to another column using
+          :class:`_schema.ForeignKey` and/or
+          :class:`_schema.ForeignKeyConstraint`, the type
+          of the remote-referenced column will be copied to this column as
+          well, at the moment that the foreign key is resolved against that
+          remote :class:`_schema.Column` object.
+
+        :param \*args: Additional positional arguments include various
+          :class:`.SchemaItem` derived constructs which will be applied
+          as options to the column.  These include instances of
+          :class:`.Constraint`, :class:`_schema.ForeignKey`,
+          :class:`.ColumnDefault`, :class:`.Sequence`, :class:`.Computed`
+          :class:`.Identity`.  In some cases an
+          equivalent keyword argument is available such as ``server_default``,
+          ``default`` and ``unique``.
+
+        :param autoincrement: Set up "auto increment" semantics for an
+          **integer primary key column with no foreign key dependencies**
+          (see later in this docstring for a more specific definition).
+          This may influence the :term:`DDL` that will be emitted for
+          this column during a table create, as well as how the column
+          will be considered when INSERT statements are compiled and
+          executed.
+
+          The default value is the string ``"auto"``,
+          which indicates that a single-column (i.e. non-composite) primary key
+          that is of an INTEGER type with no other client-side or server-side
+          default constructs indicated should receive auto increment semantics
+          automatically. Other values include ``True`` (force this column to
+          have auto-increment semantics for a :term:`composite primary key` as
+          well), ``False`` (this column should never have auto-increment
+          semantics), and the string ``"ignore_fk"`` (special-case for foreign
+          key columns, see below).
+
+          The term "auto increment semantics" refers both to the kind of DDL
+          that will be emitted for the column within a CREATE TABLE statement,
+          when methods such as :meth:`.MetaData.create_all` and
+          :meth:`.Table.create` are invoked, as well as how the column will be
+          considered when an INSERT statement is compiled and emitted to the
+          database:
+
+          * **DDL rendering** (i.e. :meth:`.MetaData.create_all`,
+            :meth:`.Table.create`): When used on a :class:`.Column` that has
+            no other
+            default-generating construct associated with it (such as a
+            :class:`.Sequence` or :class:`.Identity` construct), the parameter
+            will imply that database-specific keywords such as PostgreSQL
+            ``SERIAL``, MySQL ``AUTO_INCREMENT``, or ``IDENTITY`` on SQL Server
+            should also be rendered.  Not every database backend has an
+            "implied" default generator available; for example the Oracle Database
+            backends alway needs an explicit construct such as
+            :class:`.Identity` to be included with a :class:`.Column` in order
+            for the DDL rendered to include auto-generating constructs to also
+            be produced in the database.
+
+          * **INSERT semantics** (i.e. when a :func:`_sql.insert` construct is
+            compiled into a SQL string and is then executed on a database using
+            :meth:`_engine.Connection.execute` or equivalent): A single-row
+            INSERT statement will be known to produce a new integer primary key
+            value automatically for this column, which will be accessible
+            after the statement is invoked via the
+            :attr:`.CursorResult.inserted_primary_key` attribute upon the
+            :class:`_result.Result` object.   This also applies towards use of the
+            ORM when ORM-mapped objects are persisted to the database,
+            indicating that a new integer primary key will be available to
+            become part of the :term:`identity key` for that object.  This
+            behavior takes place regardless of what DDL constructs are
+            associated with the :class:`_schema.Column` and is independent
+            of the "DDL Rendering" behavior discussed in the previous note
+            above.
+
+          The parameter may be set to ``True`` to indicate that a column which
+          is part of a composite (i.e. multi-column) primary key should
+          have autoincrement semantics, though note that only one column
+          within a primary key may have this setting.    It can also
+          be set to ``True`` to indicate autoincrement semantics on a
+          column that has a client-side or server-side default configured,
+          however note that not all dialects can accommodate all styles
+          of default as an "autoincrement".  It can also be
+          set to ``False`` on a single-column primary key that has a
+          datatype of INTEGER in order to disable auto increment semantics
+          for that column.
+
+          The setting *only* has an effect for columns which are:
+
+          * Integer derived (i.e. INT, SMALLINT, BIGINT).
+
+          * Part of the primary key
+
+          * Not referring to another column via :class:`_schema.ForeignKey`,
+            unless
+            the value is specified as ``'ignore_fk'``::
+
+                # turn on autoincrement for this column despite
+                # the ForeignKey()
+                Column(
+                    "id",
+                    ForeignKey("other.id"),
+                    primary_key=True,
+                    autoincrement="ignore_fk",
+                )
+
+          It is typically not desirable to have "autoincrement" enabled on a
+          column that refers to another via foreign key, as such a column is
+          required to refer to a value that originates from elsewhere.
+
+          The setting has these effects on columns that meet the
+          above criteria:
+
+          * DDL issued for the column, if the column does not already include
+            a default generating construct supported by the backend such as
+            :class:`.Identity`, will include database-specific
+            keywords intended to signify this column as an
+            "autoincrement" column for specific backends.   Behavior for
+            primary SQLAlchemy dialects includes:
+
+            * AUTO INCREMENT on MySQL and MariaDB
+            * SERIAL on PostgreSQL
+            * IDENTITY on MS-SQL - this occurs even without the
+              :class:`.Identity` construct as the
+              :paramref:`.Column.autoincrement` parameter pre-dates this
+              construct.
+            * SQLite - SQLite integer primary key columns are implicitly
+              "auto incrementing" and no additional keywords are rendered;
+              to render the special SQLite keyword ``AUTOINCREMENT``
+              is not included as this is unnecessary and not recommended
+              by the database vendor.  See the section
+              :ref:`sqlite_autoincrement` for more background.
+            * Oracle Database - The Oracle Database dialects have no default "autoincrement"
+              feature available at this time, instead the :class:`.Identity`
+              construct is recommended to achieve this (the :class:`.Sequence`
+              construct may also be used).
+            * Third-party dialects - consult those dialects' documentation
+              for details on their specific behaviors.
+
+          * When a single-row :func:`_sql.insert` construct is compiled and
+            executed, which does not set the :meth:`_sql.Insert.inline`
+            modifier, newly generated primary key values for this column
+            will be automatically retrieved upon statement execution
+            using a method specific to the database driver in use:
+
+            * MySQL, SQLite - calling upon ``cursor.lastrowid()``
+              (see
+              `https://www.python.org/dev/peps/pep-0249/#lastrowid
+              <https://www.python.org/dev/peps/pep-0249/#lastrowid>`_)
+            * PostgreSQL, SQL Server, Oracle Database - use RETURNING or an equivalent
+              construct when rendering an INSERT statement, and then retrieving
+              the newly generated primary key values after execution
+            * PostgreSQL, Oracle Database for :class:`_schema.Table` objects that
+              set :paramref:`_schema.Table.implicit_returning` to False -
+              for a :class:`.Sequence` only, the :class:`.Sequence` is invoked
+              explicitly before the INSERT statement takes place so that the
+              newly generated primary key value is available to the client
+            * SQL Server for :class:`_schema.Table` objects that
+              set :paramref:`_schema.Table.implicit_returning` to False -
+              the ``SELECT scope_identity()`` construct is used after the
+              INSERT statement is invoked to retrieve the newly generated
+              primary key value.
+            * Third-party dialects - consult those dialects' documentation
+              for details on their specific behaviors.
+
+          * For multiple-row :func:`_sql.insert` constructs invoked with
+            a list of parameters (i.e. "executemany" semantics), primary-key
+            retrieving behaviors are generally disabled, however there may
+            be special APIs that may be used to retrieve lists of new
+            primary key values for an "executemany", such as the psycopg2
+            "fast insertmany" feature.  Such features are very new and
+            may not yet be well covered in documentation.
+
+        :param default: A scalar, Python callable, or
+            :class:`_expression.ColumnElement` expression representing the
+            *default value* for this column, which will be invoked upon insert
+            if this column is otherwise not specified in the VALUES clause of
+            the insert. This is a shortcut to using :class:`.ColumnDefault` as
+            a positional argument; see that class for full detail on the
+            structure of the argument.
+
+            Contrast this argument to
+            :paramref:`_schema.Column.server_default`
+            which creates a default generator on the database side.
+
+            .. seealso::
+
+                :ref:`metadata_defaults_toplevel`
+
+        :param insert_default: An alias of :paramref:`.Column.default`
+            for compatibility with :func:`_orm.mapped_column`.
+
+            .. versionadded: 2.0.31
+
+        :param doc: optional String that can be used by the ORM or similar
+            to document attributes on the Python side.   This attribute does
+            **not** render SQL comments; use the
+            :paramref:`_schema.Column.comment`
+            parameter for this purpose.
+
+        :param key: An optional string identifier which will identify this
+            ``Column`` object on the :class:`_schema.Table`.
+            When a key is provided,
+            this is the only identifier referencing the ``Column`` within the
+            application, including ORM attribute mapping; the ``name`` field
+            is used only when rendering SQL.
+
+        :param index: When ``True``, indicates that a :class:`_schema.Index`
+            construct will be automatically generated for this
+            :class:`_schema.Column`, which will result in a "CREATE INDEX"
+            statement being emitted for the :class:`_schema.Table` when the DDL
+            create operation is invoked.
+
+            Using this flag is equivalent to making use of the
+            :class:`_schema.Index` construct explicitly at the level of the
+            :class:`_schema.Table` construct itself::
+
+                Table(
+                    "some_table",
+                    metadata,
+                    Column("x", Integer),
+                    Index("ix_some_table_x", "x"),
+                )
+
+            To add the :paramref:`_schema.Index.unique` flag to the
+            :class:`_schema.Index`, set both the
+            :paramref:`_schema.Column.unique` and
+            :paramref:`_schema.Column.index` flags to True simultaneously,
+            which will have the effect of rendering the "CREATE UNIQUE INDEX"
+            DDL instruction instead of "CREATE INDEX".
+
+            The name of the index is generated using the
+            :ref:`default naming convention <constraint_default_naming_convention>`
+            which for the :class:`_schema.Index` construct is of the form
+            ``ix_<tablename>_<columnname>``.
+
+            As this flag is intended only as a convenience for the common case
+            of adding a single-column, default configured index to a table
+            definition, explicit use of the :class:`_schema.Index` construct
+            should be preferred for most use cases, including composite indexes
+            that encompass more than one column, indexes with SQL expressions
+            or ordering, backend-specific index configuration options, and
+            indexes that use a specific name.
+
+            .. note:: the :attr:`_schema.Column.index` attribute on
+               :class:`_schema.Column`
+               **does not indicate** if this column is indexed or not, only
+               if this flag was explicitly set here.  To view indexes on
+               a column, view the :attr:`_schema.Table.indexes` collection
+               or use :meth:`_reflection.Inspector.get_indexes`.
+
+            .. seealso::
+
+                :ref:`schema_indexes`
+
+                :ref:`constraint_naming_conventions`
+
+                :paramref:`_schema.Column.unique`
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        :param nullable: When set to ``False``, will cause the "NOT NULL"
+            phrase to be added when generating DDL for the column.   When
+            ``True``, will normally generate nothing (in SQL this defaults to
+            "NULL"), except in some very specific backend-specific edge cases
+            where "NULL" may render explicitly.
+            Defaults to ``True`` unless :paramref:`_schema.Column.primary_key`
+            is also ``True`` or the column specifies a :class:`_sql.Identity`,
+            in which case it defaults to ``False``.
+            This parameter is only used when issuing CREATE TABLE statements.
+
+            .. note::
+
+                When the column specifies a :class:`_sql.Identity` this
+                parameter is in general ignored by the DDL compiler. The
+                PostgreSQL database allows nullable identity column by
+                setting this parameter to ``True`` explicitly.
+
+        :param onupdate: A scalar, Python callable, or
+            :class:`~sqlalchemy.sql.expression.ClauseElement` representing a
+            default value to be applied to the column within UPDATE
+            statements, which will be invoked upon update if this column is not
+            present in the SET clause of the update. This is a shortcut to
+            using :class:`.ColumnDefault` as a positional argument with
+            ``for_update=True``.
+
+            .. seealso::
+
+                :ref:`metadata_defaults` - complete discussion of onupdate
+
+        :param primary_key: If ``True``, marks this column as a primary key
+            column. Multiple columns can have this flag set to specify
+            composite primary keys. As an alternative, the primary key of a
+            :class:`_schema.Table` can be specified via an explicit
+            :class:`.PrimaryKeyConstraint` object.
+
+        :param server_default: A :class:`.FetchedValue` instance, str, Unicode
+            or :func:`~sqlalchemy.sql.expression.text` construct representing
+            the DDL DEFAULT value for the column.
+
+            String types will be emitted as-is, surrounded by single quotes::
+
+                Column("x", Text, server_default="val")
+
+            will render:
+
+            .. sourcecode:: sql
+
+                x TEXT DEFAULT 'val'
+
+            A :func:`~sqlalchemy.sql.expression.text` expression will be
+            rendered as-is, without quotes::
+
+                Column("y", DateTime, server_default=text("NOW()"))
+
+            will render:
+
+            .. sourcecode:: sql
+
+                y DATETIME DEFAULT NOW()
+
+            Strings and text() will be converted into a
+            :class:`.DefaultClause` object upon initialization.
+
+            This parameter can also accept complex combinations of contextually
+            valid SQLAlchemy expressions or constructs::
+
+                from sqlalchemy import create_engine
+                from sqlalchemy import Table, Column, MetaData, ARRAY, Text
+                from sqlalchemy.dialects.postgresql import array
+
+                engine = create_engine(
+                    "postgresql+psycopg2://scott:tiger@localhost/mydatabase"
+                )
+                metadata_obj = MetaData()
+                tbl = Table(
+                    "foo",
+                    metadata_obj,
+                    Column(
+                        "bar", ARRAY(Text), server_default=array(["biz", "bang", "bash"])
+                    ),
+                )
+                metadata_obj.create_all(engine)
+
+            The above results in a table created with the following SQL:
+
+            .. sourcecode:: sql
+
+                CREATE TABLE foo (
+                    bar TEXT[] DEFAULT ARRAY['biz', 'bang', 'bash']
+                )
+
+            Use :class:`.FetchedValue` to indicate that an already-existing
+            column will generate a default value on the database side which
+            will be available to SQLAlchemy for post-fetch after inserts. This
+            construct does not specify any DDL and the implementation is left
+            to the database, such as via a trigger.
+
+            .. seealso::
+
+                :ref:`server_defaults` - complete discussion of server side
+                defaults
+
+        :param server_onupdate: A :class:`.FetchedValue` instance
+            representing a database-side default generation function,
+            such as a trigger. This
+            indicates to SQLAlchemy that a newly generated value will be
+            available after updates. This construct does not actually
+            implement any kind of generation function within the database,
+            which instead must be specified separately.
+
+
+            .. warning:: This directive **does not** currently produce MySQL's
+               "ON UPDATE CURRENT_TIMESTAMP()" clause.  See
+               :ref:`mysql_timestamp_onupdate` for background on how to
+               produce this clause.
+
+            .. seealso::
+
+                :ref:`triggered_columns`
+
+        :param quote: Force quoting of this column's name on or off,
+             corresponding to ``True`` or ``False``. When left at its default
+             of ``None``, the column identifier will be quoted according to
+             whether the name is case sensitive (identifiers with at least one
+             upper case character are treated as case sensitive), or if it's a
+             reserved word. This flag is only needed to force quoting of a
+             reserved word which is not known by the SQLAlchemy dialect.
+
+        :param unique: When ``True``, and the :paramref:`_schema.Column.index`
+            parameter is left at its default value of ``False``,
+            indicates that a :class:`_schema.UniqueConstraint`
+            construct will be automatically generated for this
+            :class:`_schema.Column`,
+            which will result in a "UNIQUE CONSTRAINT" clause referring
+            to this column being included
+            in the ``CREATE TABLE`` statement emitted, when the DDL create
+            operation for the :class:`_schema.Table` object is invoked.
+
+            When this flag is ``True`` while the
+            :paramref:`_schema.Column.index` parameter is simultaneously
+            set to ``True``, the effect instead is that a
+            :class:`_schema.Index` construct which includes the
+            :paramref:`_schema.Index.unique` parameter set to ``True``
+            is generated.  See the documentation for
+            :paramref:`_schema.Column.index` for additional detail.
+
+            Using this flag is equivalent to making use of the
+            :class:`_schema.UniqueConstraint` construct explicitly at the
+            level of the :class:`_schema.Table` construct itself::
+
+                Table("some_table", metadata, Column("x", Integer), UniqueConstraint("x"))
+
+            The :paramref:`_schema.UniqueConstraint.name` parameter
+            of the unique constraint object is left at its default value
+            of ``None``; in the absence of a :ref:`naming convention <constraint_naming_conventions>`
+            for the enclosing :class:`_schema.MetaData`, the UNIQUE CONSTRAINT
+            construct will be emitted as unnamed, which typically invokes
+            a database-specific naming convention to take place.
+
+            As this flag is intended only as a convenience for the common case
+            of adding a single-column, default configured unique constraint to a table
+            definition, explicit use of the :class:`_schema.UniqueConstraint` construct
+            should be preferred for most use cases, including composite constraints
+            that encompass more than one column, backend-specific index configuration options, and
+            constraints that use a specific name.
+
+            .. note:: the :attr:`_schema.Column.unique` attribute on
+                :class:`_schema.Column`
+                **does not indicate** if this column has a unique constraint or
+                not, only if this flag was explicitly set here.  To view
+                indexes and unique constraints that may involve this column,
+                view the
+                :attr:`_schema.Table.indexes` and/or
+                :attr:`_schema.Table.constraints` collections or use
+                :meth:`_reflection.Inspector.get_indexes` and/or
+                :meth:`_reflection.Inspector.get_unique_constraints`
+
+            .. seealso::
+
+                :ref:`schema_unique_constraint`
+
+                :ref:`constraint_naming_conventions`
+
+                :paramref:`_schema.Column.index`
+
+        :param system: When ``True``, indicates this is a "system" column,
+             that is a column which is automatically made available by the
+             database, and should not be included in the columns list for a
+             ``CREATE TABLE`` statement.
+
+             For more elaborate scenarios where columns should be
+             conditionally rendered differently on different backends,
+             consider custom compilation rules for :class:`.CreateColumn`.
+
+        :param comment: Optional string that will render an SQL comment on
+             table creation.
+
+             .. versionadded:: 1.2 Added the
+                :paramref:`_schema.Column.comment`
+                parameter to :class:`_schema.Column`.
+
+        :param insert_sentinel: Marks this :class:`_schema.Column` as an
+         :term:`insert sentinel` used for optimizing the performance of the
+         :term:`insertmanyvalues` feature for tables that don't
+         otherwise have qualifying primary key configurations.
+
+         .. versionadded:: 2.0.10
+
+         .. seealso::
+
+            :func:`_schema.insert_sentinel` - all in one helper for declaring
+            sentinel columns
+
+            :ref:`engine_insertmanyvalues`
+
+            :ref:`engine_insertmanyvalues_sentinel_columns`
+
+
+        """  # noqa: E501, RST201, RST202
+
+        l_args = [__name_pos, __type_pos] + list(args)
+        del args
+
+        if l_args:
+            if isinstance(l_args[0], str):
+                if name is not None:
+                    raise exc.ArgumentError(
+                        "May not pass name positionally and as a keyword."
+                    )
+                name = l_args.pop(0)  # type: ignore
+            elif l_args[0] is None:
+                l_args.pop(0)
+        if l_args:
+            coltype = l_args[0]
+
+            if hasattr(coltype, "_sqla_type"):
+                if type_ is not None:
+                    raise exc.ArgumentError(
+                        "May not pass type_ positionally and as a keyword."
+                    )
+                type_ = l_args.pop(0)  # type: ignore
+            elif l_args[0] is None:
+                l_args.pop(0)
+
+        if name is not None:
+            name = quoted_name(name, quote)
+        elif quote is not None:
+            raise exc.ArgumentError(
+                "Explicit 'name' is required when sending 'quote' argument"
+            )
+
+        # name = None is expected to be an interim state
+        # note this use case is legacy now that ORM declarative has a
+        # dedicated "column" construct local to the ORM
+        super().__init__(name, type_)  # type: ignore
+
+        self.key = key if key is not None else name  # type: ignore
+        self.primary_key = primary_key
+        self._insert_sentinel = insert_sentinel
+        self._omit_from_statements = _omit_from_statements
+        self._user_defined_nullable = udn = nullable
+        if udn is not NULL_UNSPECIFIED:
+            self.nullable = udn
+        else:
+            self.nullable = not primary_key
+
+        # these default to None because .index and .unique is *not*
+        # an informational flag about Column - there can still be an
+        # Index or UniqueConstraint referring to this Column.
+        self.index = index
+        self.unique = unique
+
+        self.system = system
+        self.doc = doc
+        self.autoincrement: _AutoIncrementType = autoincrement
+        self.constraints = set()
+        self.foreign_keys = set()
+        self.comment = comment
+        self.computed = None
+        self.identity = None
+
+        # check if this Column is proxying another column
+
+        if _proxies is not None:
+            self._proxies = _proxies
+        else:
+            # otherwise, add DDL-related events
+            self._set_type(self.type)
+
+        if insert_default is not _NoArg.NO_ARG:
+            resolved_default = insert_default
+        elif default is not _NoArg.NO_ARG:
+            resolved_default = default
+        else:
+            resolved_default = None
+
+        if resolved_default is not None:
+            if not isinstance(resolved_default, (ColumnDefault, Sequence)):
+                resolved_default = ColumnDefault(resolved_default)
+
+            self.default = resolved_default
+            l_args.append(resolved_default)
+        else:
+            self.default = None
+
+        if onupdate is not None:
+            if not isinstance(onupdate, (ColumnDefault, Sequence)):
+                onupdate = ColumnDefault(onupdate, for_update=True)
+
+            self.onupdate = onupdate
+            l_args.append(onupdate)
+        else:
+            self.onupdate = None
+
+        if server_default is not None:
+            if isinstance(server_default, FetchedValue):
+                server_default = server_default._as_for_update(False)
+                l_args.append(server_default)
+            else:
+                server_default = DefaultClause(server_default)
+                l_args.append(server_default)
+        self.server_default = server_default
+
+        if server_onupdate is not None:
+            if isinstance(server_onupdate, FetchedValue):
+                server_onupdate = server_onupdate._as_for_update(True)
+                l_args.append(server_onupdate)
+            else:
+                server_onupdate = DefaultClause(
+                    server_onupdate, for_update=True
+                )
+                l_args.append(server_onupdate)
+        self.server_onupdate = server_onupdate
+
+        self._init_items(*cast(_typing_Sequence[SchemaItem], l_args))
+
+        util.set_creation_order(self)
+
+        if info is not None:
+            self.info = info
+
+        self._extra_kwargs(**dialect_kwargs)
+
+    table: Table
+
+    constraints: Set[Constraint]
+
+    foreign_keys: Set[ForeignKey]
+    """A collection of all :class:`_schema.ForeignKey` marker objects
+       associated with this :class:`_schema.Column`.
+
+       Each object is a member of a :class:`_schema.Table`-wide
+       :class:`_schema.ForeignKeyConstraint`.
+
+       .. seealso::
+
+           :attr:`_schema.Table.foreign_keys`
+
+    """
+
+    index: Optional[bool]
+    """The value of the :paramref:`_schema.Column.index` parameter.
+
+       Does not indicate if this :class:`_schema.Column` is actually indexed
+       or not; use :attr:`_schema.Table.indexes`.
+
+       .. seealso::
+
+           :attr:`_schema.Table.indexes`
+    """
+
+    unique: Optional[bool]
+    """The value of the :paramref:`_schema.Column.unique` parameter.
+
+       Does not indicate if this :class:`_schema.Column` is actually subject to
+       a unique constraint or not; use :attr:`_schema.Table.indexes` and
+       :attr:`_schema.Table.constraints`.
+
+       .. seealso::
+
+           :attr:`_schema.Table.indexes`
+
+           :attr:`_schema.Table.constraints`.
+
+    """
+
+    computed: Optional[Computed]
+
+    identity: Optional[Identity]
+
+    def _set_type(self, type_: TypeEngine[Any]) -> None:
+        assert self.type._isnull or type_ is self.type
+
+        self.type = type_
+        if isinstance(self.type, SchemaEventTarget):
+            self.type._set_parent_with_dispatch(self)
+        for impl in self.type._variant_mapping.values():
+            if isinstance(impl, SchemaEventTarget):
+                impl._set_parent_with_dispatch(self)
+
+    @HasMemoized.memoized_attribute
+    def _default_description_tuple(self) -> _DefaultDescriptionTuple:
+        """used by default.py -> _process_execute_defaults()"""
+
+        return _DefaultDescriptionTuple._from_column_default(self.default)
+
+    @HasMemoized.memoized_attribute
+    def _onupdate_description_tuple(self) -> _DefaultDescriptionTuple:
+        """used by default.py -> _process_execute_defaults()"""
+        return _DefaultDescriptionTuple._from_column_default(self.onupdate)
+
+    @util.memoized_property
+    def _gen_static_annotations_cache_key(self) -> bool:  # type: ignore
+        """special attribute used by cache key gen, if true, we will
+        use a static cache key for the annotations dictionary, else we
+        will generate a new cache key for annotations each time.
+
+        Added for #8790
+
+        """
+        return self.table is not None and self.table._is_table
+
+    def _extra_kwargs(self, **kwargs: Any) -> None:
+        self._validate_dialect_kwargs(kwargs)
+
+    def __str__(self) -> str:
+        if self.name is None:
+            return "(no name)"
+        elif self.table is not None:
+            if self.table.named_with_column:
+                return self.table.description + "." + self.description
+            else:
+                return self.description
+        else:
+            return self.description
+
+    def references(self, column: Column[Any]) -> bool:
+        """Return True if this Column references the given column via foreign
+        key."""
+
+        for fk in self.foreign_keys:
+            if fk.column.proxy_set.intersection(column.proxy_set):
+                return True
+        else:
+            return False
+
+    def append_foreign_key(self, fk: ForeignKey) -> None:
+        fk._set_parent_with_dispatch(self)
+
+    def __repr__(self) -> str:
+        kwarg = []
+        if self.key != self.name:
+            kwarg.append("key")
+        if self.primary_key:
+            kwarg.append("primary_key")
+        if not self.nullable:
+            kwarg.append("nullable")
+        if self.onupdate:
+            kwarg.append("onupdate")
+        if self.default:
+            kwarg.append("default")
+        if self.server_default:
+            kwarg.append("server_default")
+        if self.comment:
+            kwarg.append("comment")
+        return "Column(%s)" % ", ".join(
+            [repr(self.name)]
+            + [repr(self.type)]
+            + [repr(x) for x in self.foreign_keys if x is not None]
+            + [repr(x) for x in self.constraints]
+            + [
+                (
+                    self.table is not None
+                    and "table=<%s>" % self.table.description
+                    or "table=None"
+                )
+            ]
+            + ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]
+        )
+
+    def _set_parent(  # type: ignore[override]
+        self,
+        parent: SchemaEventTarget,
+        *,
+        all_names: Dict[str, Column[Any]],
+        allow_replacements: bool,
+        **kw: Any,
+    ) -> None:
+        table = parent
+        assert isinstance(table, Table)
+        if not self.name:
+            raise exc.ArgumentError(
+                "Column must be constructed with a non-blank name or "
+                "assign a non-blank .name before adding to a Table."
+            )
+
+        self._reset_memoizations()
+
+        if self.key is None:
+            self.key = self.name
+
+        existing = getattr(self, "table", None)
+        if existing is not None and existing is not table:
+            raise exc.ArgumentError(
+                f"Column object '{self.key}' already "
+                f"assigned to Table '{existing.description}'"
+            )
+
+        extra_remove = None
+        existing_col = None
+        conflicts_on = ""
+
+        if self.key in table._columns:
+            existing_col = table._columns[self.key]
+            if self.key == self.name:
+                conflicts_on = "name"
+            else:
+                conflicts_on = "key"
+        elif self.name in all_names:
+            existing_col = all_names[self.name]
+            extra_remove = {existing_col}
+            conflicts_on = "name"
+
+        if existing_col is not None:
+            if existing_col is not self:
+                if not allow_replacements:
+                    raise exc.DuplicateColumnError(
+                        f"A column with {conflicts_on} "
+                        f"""'{
+                            self.key if conflicts_on == 'key' else self.name
+                        }' """
+                        f"is already present in table '{table.name}'."
+                    )
+                for fk in existing_col.foreign_keys:
+                    table.foreign_keys.remove(fk)
+                    if fk.constraint in table.constraints:
+                        # this might have been removed
+                        # already, if it's a composite constraint
+                        # and more than one col being replaced
+                        table.constraints.remove(fk.constraint)
+
+        if extra_remove and existing_col is not None and self.key == self.name:
+            util.warn(
+                f'Column with user-specified key "{existing_col.key}" is '
+                "being replaced with "
+                f'plain named column "{self.name}", '
+                f'key "{existing_col.key}" is being removed.  If this is a '
+                "reflection operation, specify autoload_replace=False to "
+                "prevent this replacement."
+            )
+        table._columns.replace(self, extra_remove=extra_remove)
+        all_names[self.name] = self
+        self.table = table
+
+        if self._insert_sentinel:
+            if self.table._sentinel_column is not None:
+                raise exc.ArgumentError(
+                    "a Table may have only one explicit sentinel column"
+                )
+            self.table._sentinel_column = self
+
+        if self.primary_key:
+            table.primary_key._replace(self)
+        elif self.key in table.primary_key:
+            raise exc.ArgumentError(
+                f"Trying to redefine primary-key column '{self.key}' as a "
+                f"non-primary-key column on table '{table.fullname}'"
+            )
+
+        if self.index:
+            if isinstance(self.index, str):
+                raise exc.ArgumentError(
+                    "The 'index' keyword argument on Column is boolean only. "
+                    "To create indexes with a specific name, create an "
+                    "explicit Index object external to the Table."
+                )
+            table.append_constraint(
+                Index(
+                    None, self.key, unique=bool(self.unique), _column_flag=True
+                )
+            )
+
+        elif self.unique:
+            if isinstance(self.unique, str):
+                raise exc.ArgumentError(
+                    "The 'unique' keyword argument on Column is boolean "
+                    "only. To create unique constraints or indexes with a "
+                    "specific name, append an explicit UniqueConstraint to "
+                    "the Table's list of elements, or create an explicit "
+                    "Index object external to the Table."
+                )
+            table.append_constraint(
+                UniqueConstraint(self.key, _column_flag=True)
+            )
+
+        self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table))
+
+        if self.identity and (
+            isinstance(self.default, Sequence)
+            or isinstance(self.onupdate, Sequence)
+        ):
+            raise exc.ArgumentError(
+                "An column cannot specify both Identity and Sequence."
+            )
+
+    def _setup_on_memoized_fks(self, fn: Callable[..., Any]) -> None:
+        fk_keys = [
+            ((self.table.key, self.key), False),
+            ((self.table.key, self.name), True),
+        ]
+        for fk_key, link_to_name in fk_keys:
+            if fk_key in self.table.metadata._fk_memos:
+                for fk in self.table.metadata._fk_memos[fk_key]:
+                    if fk.link_to_name is link_to_name:
+                        fn(fk)
+
+    def _on_table_attach(self, fn: Callable[..., Any]) -> None:
+        if self.table is not None:
+            fn(self, self.table)
+        else:
+            event.listen(self, "after_parent_attach", fn)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.Column.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(self, **kw: Any) -> Column[Any]:
+        return self._copy(**kw)
+
+    def _copy(self, **kw: Any) -> Column[Any]:
+        """Create a copy of this ``Column``, uninitialized.
+
+        This is used in :meth:`_schema.Table.to_metadata` and by the ORM.
+
+        """
+
+        # Constraint objects plus non-constraint-bound ForeignKey objects
+        args: List[SchemaItem] = [
+            c._copy(**kw) for c in self.constraints if not c._type_bound
+        ] + [c._copy(**kw) for c in self.foreign_keys if not c.constraint]
+
+        # ticket #5276
+        column_kwargs = {}
+        for dialect_name in self.dialect_options:
+            dialect_options = self.dialect_options[dialect_name]._non_defaults
+            for (
+                dialect_option_key,
+                dialect_option_value,
+            ) in dialect_options.items():
+                column_kwargs[dialect_name + "_" + dialect_option_key] = (
+                    dialect_option_value
+                )
+
+        server_default = self.server_default
+        server_onupdate = self.server_onupdate
+        if isinstance(server_default, (Computed, Identity)):
+            # TODO: likely should be copied in all cases
+            # TODO: if a Sequence, we would need to transfer the Sequence
+            # .metadata as well
+            args.append(server_default._copy(**kw))
+            server_default = server_onupdate = None
+
+        type_ = self.type
+        if isinstance(type_, SchemaEventTarget):
+            type_ = type_.copy(**kw)
+
+        # TODO: DefaultGenerator is not copied here!  it's just used again
+        # with _set_parent() pointing to the old column.  see the new
+        # use of _copy() in the new _merge() method
+
+        c = self._constructor(
+            name=self.name,
+            type_=type_,
+            key=self.key,
+            primary_key=self.primary_key,
+            unique=self.unique,
+            system=self.system,
+            # quote=self.quote,  # disabled 2013-08-27 (commit 031ef080)
+            index=self.index,
+            autoincrement=self.autoincrement,
+            default=self.default,
+            server_default=server_default,
+            onupdate=self.onupdate,
+            server_onupdate=server_onupdate,
+            doc=self.doc,
+            comment=self.comment,
+            _omit_from_statements=self._omit_from_statements,
+            insert_sentinel=self._insert_sentinel,
+            *args,
+            **column_kwargs,
+        )
+
+        # copy the state of "nullable" exactly, to accommodate for
+        # ORM flipping the .nullable flag directly
+        c.nullable = self.nullable
+        c._user_defined_nullable = self._user_defined_nullable
+
+        return self._schema_item_copy(c)
+
+    def _merge(self, other: Column[Any]) -> None:
+        """merge the elements of another column into this one.
+
+        this is used by ORM pep-593 merge and will likely need a lot
+        of fixes.
+
+
+        """
+
+        if self.primary_key:
+            other.primary_key = True
+
+        if self.autoincrement != "auto" and other.autoincrement == "auto":
+            other.autoincrement = self.autoincrement
+
+        if self.system:
+            other.system = self.system
+
+        if self.info:
+            other.info.update(self.info)
+
+        type_ = self.type
+        if not type_._isnull and other.type._isnull:
+            if isinstance(type_, SchemaEventTarget):
+                type_ = type_.copy()
+
+            other.type = type_
+
+            if isinstance(type_, SchemaEventTarget):
+                type_._set_parent_with_dispatch(other)
+
+            for impl in type_._variant_mapping.values():
+                if isinstance(impl, SchemaEventTarget):
+                    impl._set_parent_with_dispatch(other)
+
+        if (
+            self._user_defined_nullable is not NULL_UNSPECIFIED
+            and other._user_defined_nullable is NULL_UNSPECIFIED
+        ):
+            other.nullable = self.nullable
+            other._user_defined_nullable = self._user_defined_nullable
+
+        if self.default is not None and other.default is None:
+            new_default = self.default._copy()
+            new_default._set_parent(other)
+
+        if self.server_default and other.server_default is None:
+            new_server_default = self.server_default
+            if isinstance(new_server_default, FetchedValue):
+                new_server_default = new_server_default._copy()
+                new_server_default._set_parent(other)
+            else:
+                other.server_default = new_server_default
+
+        if self.server_onupdate and other.server_onupdate is None:
+            new_server_onupdate = self.server_onupdate
+            new_server_onupdate = new_server_onupdate._copy()
+            new_server_onupdate._set_parent(other)
+
+        if self.onupdate and other.onupdate is None:
+            new_onupdate = self.onupdate._copy()
+            new_onupdate._set_parent(other)
+
+        if self.index in (True, False) and other.index is None:
+            other.index = self.index
+
+        if self.unique in (True, False) and other.unique is None:
+            other.unique = self.unique
+
+        if self.doc and other.doc is None:
+            other.doc = self.doc
+
+        if self.comment and other.comment is None:
+            other.comment = self.comment
+
+        for const in self.constraints:
+            if not const._type_bound:
+                new_const = const._copy()
+                new_const._set_parent(other)
+
+        for fk in self.foreign_keys:
+            if not fk.constraint:
+                new_fk = fk._copy()
+                new_fk._set_parent(other)
+
+    def _make_proxy(
+        self,
+        selectable: FromClause,
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        name: Optional[str] = None,
+        key: Optional[str] = None,
+        name_is_truncatable: bool = False,
+        compound_select_cols: Optional[
+            _typing_Sequence[ColumnElement[Any]]
+        ] = None,
+        **kw: Any,
+    ) -> Tuple[str, ColumnClause[_T]]:
+        """Create a *proxy* for this column.
+
+        This is a copy of this ``Column`` referenced by a different parent
+        (such as an alias or select statement).  The column should
+        be used only in select scenarios, as its full DDL/default
+        information is not transferred.
+
+        """
+
+        fk = [
+            ForeignKey(
+                col if col is not None else f._colspec,
+                _unresolvable=col is None,
+                _constraint=f.constraint,
+            )
+            for f, col in [
+                (fk, fk._resolve_column(raiseerr=False))
+                for fk in self.foreign_keys
+            ]
+        ]
+
+        if name is None and self.name is None:
+            raise exc.InvalidRequestError(
+                "Cannot initialize a sub-selectable"
+                " with this Column object until its 'name' has "
+                "been assigned."
+            )
+        try:
+            c = self._constructor(
+                (
+                    coercions.expect(
+                        roles.TruncatedLabelRole, name if name else self.name
+                    )
+                    if name_is_truncatable
+                    else (name or self.name)
+                ),
+                self.type,
+                # this may actually be ._proxy_key when the key is incoming
+                key=key if key else name if name else self.key,
+                primary_key=self.primary_key,
+                nullable=self.nullable,
+                _proxies=(
+                    list(compound_select_cols)
+                    if compound_select_cols
+                    else [self]
+                ),
+                *fk,
+            )
+        except TypeError as err:
+            raise TypeError(
+                "Could not create a copy of this %r object.  "
+                "Ensure the class includes a _constructor() "
+                "attribute or method which accepts the "
+                "standard Column constructor arguments, or "
+                "references the Column class itself." % self.__class__
+            ) from err
+
+        c.table = selectable
+        c._propagate_attrs = selectable._propagate_attrs
+        if selectable._is_clone_of is not None:
+            c._is_clone_of = selectable._is_clone_of.columns.get(c.key)
+
+        if self.primary_key:
+            primary_key.add(c)
+
+        if fk:
+            foreign_keys.update(fk)  # type: ignore
+
+        return c.key, c
+
+
+def insert_sentinel(
+    name: Optional[str] = None,
+    type_: Optional[_TypeEngineArgument[_T]] = None,
+    *,
+    default: Optional[Any] = None,
+    omit_from_statements: bool = True,
+) -> Column[Any]:
+    """Provides a surrogate :class:`_schema.Column` that will act as a
+    dedicated insert :term:`sentinel` column, allowing efficient bulk
+    inserts with deterministic RETURNING sorting for tables that
+    don't otherwise have qualifying primary key configurations.
+
+    Adding this column to a :class:`.Table` object requires that a
+    corresponding database table actually has this column present, so if adding
+    it to an existing model, existing database tables would need to be migrated
+    (e.g. using ALTER TABLE or similar) to include this column.
+
+    For background on how this object is used, see the section
+    :ref:`engine_insertmanyvalues_sentinel_columns` as part of the
+    section :ref:`engine_insertmanyvalues`.
+
+    The :class:`_schema.Column` returned will be a nullable integer column by
+    default and make use of a sentinel-specific default generator used only in
+    "insertmanyvalues" operations.
+
+    .. seealso::
+
+        :func:`_orm.orm_insert_sentinel`
+
+        :paramref:`_schema.Column.insert_sentinel`
+
+        :ref:`engine_insertmanyvalues`
+
+        :ref:`engine_insertmanyvalues_sentinel_columns`
+
+
+    .. versionadded:: 2.0.10
+
+    """
+    return Column(
+        name=name,
+        type_=type_api.INTEGERTYPE if type_ is None else type_,
+        default=(
+            default if default is not None else _InsertSentinelColumnDefault()
+        ),
+        _omit_from_statements=omit_from_statements,
+        insert_sentinel=True,
+    )
+
+
+class ForeignKey(DialectKWArgs, SchemaItem):
+    """Defines a dependency between two columns.
+
+    ``ForeignKey`` is specified as an argument to a :class:`_schema.Column`
+    object,
+    e.g.::
+
+        t = Table(
+            "remote_table",
+            metadata,
+            Column("remote_id", ForeignKey("main_table.id")),
+        )
+
+    Note that ``ForeignKey`` is only a marker object that defines
+    a dependency between two columns.   The actual constraint
+    is in all cases represented by the :class:`_schema.ForeignKeyConstraint`
+    object.   This object will be generated automatically when
+    a ``ForeignKey`` is associated with a :class:`_schema.Column` which
+    in turn is associated with a :class:`_schema.Table`.   Conversely,
+    when :class:`_schema.ForeignKeyConstraint` is applied to a
+    :class:`_schema.Table`,
+    ``ForeignKey`` markers are automatically generated to be
+    present on each associated :class:`_schema.Column`, which are also
+    associated with the constraint object.
+
+    Note that you cannot define a "composite" foreign key constraint,
+    that is a constraint between a grouping of multiple parent/child
+    columns, using ``ForeignKey`` objects.   To define this grouping,
+    the :class:`_schema.ForeignKeyConstraint` object must be used, and applied
+    to the :class:`_schema.Table`.   The associated ``ForeignKey`` objects
+    are created automatically.
+
+    The ``ForeignKey`` objects associated with an individual
+    :class:`_schema.Column`
+    object are available in the `foreign_keys` collection
+    of that column.
+
+    Further examples of foreign key configuration are in
+    :ref:`metadata_foreignkeys`.
+
+    """
+
+    __visit_name__ = "foreign_key"
+
+    parent: Column[Any]
+
+    _table_column: Optional[Column[Any]]
+
+    def __init__(
+        self,
+        column: _DDLColumnArgument,
+        _constraint: Optional[ForeignKeyConstraint] = None,
+        use_alter: bool = False,
+        name: _ConstraintNameArgument = None,
+        onupdate: Optional[str] = None,
+        ondelete: Optional[str] = None,
+        deferrable: Optional[bool] = None,
+        initially: Optional[str] = None,
+        link_to_name: bool = False,
+        match: Optional[str] = None,
+        info: Optional[_InfoType] = None,
+        comment: Optional[str] = None,
+        _unresolvable: bool = False,
+        **dialect_kw: Any,
+    ):
+        r"""
+        Construct a column-level FOREIGN KEY.
+
+        The :class:`_schema.ForeignKey` object when constructed generates a
+        :class:`_schema.ForeignKeyConstraint`
+        which is associated with the parent
+        :class:`_schema.Table` object's collection of constraints.
+
+        :param column: A single target column for the key relationship. A
+            :class:`_schema.Column` object or a column name as a string:
+            ``tablename.columnkey`` or ``schema.tablename.columnkey``.
+            ``columnkey`` is the ``key`` which has been assigned to the column
+            (defaults to the column name itself), unless ``link_to_name`` is
+            ``True`` in which case the rendered name of the column is used.
+
+        :param name: Optional string. An in-database name for the key if
+            `constraint` is not provided.
+
+        :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+            issuing DDL for this constraint. Typical values include CASCADE,
+            DELETE and RESTRICT.
+
+        :param ondelete: Optional string. If set, emit ON DELETE <value> when
+            issuing DDL for this constraint. Typical values include CASCADE,
+            SET NULL and RESTRICT.
+
+        :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
+            DEFERRABLE when issuing DDL for this constraint.
+
+        :param initially: Optional string. If set, emit INITIALLY <value> when
+            issuing DDL for this constraint.
+
+        :param link_to_name: if True, the string name given in ``column`` is
+            the rendered name of the referenced column, not its locally
+            assigned ``key``.
+
+        :param use_alter: passed to the underlying
+            :class:`_schema.ForeignKeyConstraint`
+            to indicate the constraint should
+            be generated/dropped externally from the CREATE TABLE/ DROP TABLE
+            statement.  See :paramref:`_schema.ForeignKeyConstraint.use_alter`
+            for further description.
+
+            .. seealso::
+
+                :paramref:`_schema.ForeignKeyConstraint.use_alter`
+
+                :ref:`use_alter`
+
+        :param match: Optional string. If set, emit MATCH <value> when issuing
+            DDL for this constraint. Typical values include SIMPLE, PARTIAL
+            and FULL.
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        :param comment: Optional string that will render an SQL comment on
+          foreign key constraint creation.
+
+            .. versionadded:: 2.0
+
+        :param \**dialect_kw:  Additional keyword arguments are dialect
+            specific, and passed in the form ``<dialectname>_<argname>``.  The
+            arguments are ultimately handled by a corresponding
+            :class:`_schema.ForeignKeyConstraint`.
+            See the documentation regarding
+            an individual dialect at :ref:`dialect_toplevel` for detail on
+            documented arguments.
+
+        """
+
+        self._colspec = coercions.expect(roles.DDLReferredColumnRole, column)
+        self._unresolvable = _unresolvable
+
+        if isinstance(self._colspec, str):
+            self._table_column = None
+        else:
+            self._table_column = self._colspec
+
+            if not isinstance(
+                self._table_column.table, (type(None), TableClause)
+            ):
+                raise exc.ArgumentError(
+                    "ForeignKey received Column not bound "
+                    "to a Table, got: %r" % self._table_column.table
+                )
+
+        # the linked ForeignKeyConstraint.
+        # ForeignKey will create this when parent Column
+        # is attached to a Table, *or* ForeignKeyConstraint
+        # object passes itself in when creating ForeignKey
+        # markers.
+        self.constraint = _constraint
+
+        # .parent is not Optional under normal use
+        self.parent = None  # type: ignore
+
+        self.use_alter = use_alter
+        self.name = name
+        self.onupdate = onupdate
+        self.ondelete = ondelete
+        self.deferrable = deferrable
+        self.initially = initially
+        self.link_to_name = link_to_name
+        self.match = match
+        self.comment = comment
+        if info:
+            self.info = info
+        self._unvalidated_dialect_kw = dialect_kw
+
+    def __repr__(self) -> str:
+        return "ForeignKey(%r)" % self._get_colspec()
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.ForeignKey.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(self, *, schema: Optional[str] = None, **kw: Any) -> ForeignKey:
+        return self._copy(schema=schema, **kw)
+
+    def _copy(self, *, schema: Optional[str] = None, **kw: Any) -> ForeignKey:
+        """Produce a copy of this :class:`_schema.ForeignKey` object.
+
+        The new :class:`_schema.ForeignKey` will not be bound
+        to any :class:`_schema.Column`.
+
+        This method is usually used by the internal
+        copy procedures of :class:`_schema.Column`, :class:`_schema.Table`,
+        and :class:`_schema.MetaData`.
+
+        :param schema: The returned :class:`_schema.ForeignKey` will
+          reference the original table and column name, qualified
+          by the given string schema name.
+
+        """
+        fk = ForeignKey(
+            self._get_colspec(schema=schema),
+            use_alter=self.use_alter,
+            name=self.name,
+            onupdate=self.onupdate,
+            ondelete=self.ondelete,
+            deferrable=self.deferrable,
+            initially=self.initially,
+            link_to_name=self.link_to_name,
+            match=self.match,
+            comment=self.comment,
+            **self._unvalidated_dialect_kw,
+        )
+        return self._schema_item_copy(fk)
+
+    def _get_colspec(
+        self,
+        schema: Optional[
+            Union[
+                str,
+                Literal[SchemaConst.RETAIN_SCHEMA, SchemaConst.BLANK_SCHEMA],
+            ]
+        ] = None,
+        table_name: Optional[str] = None,
+        _is_copy: bool = False,
+    ) -> str:
+        """Return a string based 'column specification' for this
+        :class:`_schema.ForeignKey`.
+
+        This is usually the equivalent of the string-based "tablename.colname"
+        argument first passed to the object's constructor.
+
+        """
+        if schema not in (None, RETAIN_SCHEMA):
+            _schema, tname, colname = self._column_tokens
+            if table_name is not None:
+                tname = table_name
+            if schema is BLANK_SCHEMA:
+                return "%s.%s" % (tname, colname)
+            else:
+                return "%s.%s.%s" % (schema, tname, colname)
+        elif table_name:
+            schema, tname, colname = self._column_tokens
+            if schema:
+                return "%s.%s.%s" % (schema, table_name, colname)
+            else:
+                return "%s.%s" % (table_name, colname)
+        elif self._table_column is not None:
+            if self._table_column.table is None:
+                if _is_copy:
+                    raise exc.InvalidRequestError(
+                        f"Can't copy ForeignKey object which refers to "
+                        f"non-table bound Column {self._table_column!r}"
+                    )
+                else:
+                    return self._table_column.key
+            return "%s.%s" % (
+                self._table_column.table.fullname,
+                self._table_column.key,
+            )
+        else:
+            assert isinstance(self._colspec, str)
+            return self._colspec
+
+    @property
+    def _referred_schema(self) -> Optional[str]:
+        return self._column_tokens[0]
+
+    def _table_key(self) -> Any:
+        if self._table_column is not None:
+            if self._table_column.table is None:
+                return None
+            else:
+                return self._table_column.table.key
+        else:
+            schema, tname, colname = self._column_tokens
+            return _get_table_key(tname, schema)
+
+    target_fullname = property(_get_colspec)
+
+    def references(self, table: Table) -> bool:
+        """Return True if the given :class:`_schema.Table`
+        is referenced by this
+        :class:`_schema.ForeignKey`."""
+
+        return table.corresponding_column(self.column) is not None
+
+    def get_referent(self, table: FromClause) -> Optional[Column[Any]]:
+        """Return the :class:`_schema.Column` in the given
+        :class:`_schema.Table` (or any :class:`.FromClause`)
+        referenced by this :class:`_schema.ForeignKey`.
+
+        Returns None if this :class:`_schema.ForeignKey`
+        does not reference the given
+        :class:`_schema.Table`.
+
+        """
+        # our column is a Column, and any subquery etc. proxying us
+        # would be doing so via another Column, so that's what would
+        # be returned here
+        return table.columns.corresponding_column(self.column)  # type: ignore
+
+    @util.memoized_property
+    def _column_tokens(self) -> Tuple[Optional[str], str, Optional[str]]:
+        """parse a string-based _colspec into its component parts."""
+
+        m = self._get_colspec().split(".")
+        if m is None:
+            raise exc.ArgumentError(
+                f"Invalid foreign key column specification: {self._colspec}"
+            )
+        if len(m) == 1:
+            tname = m.pop()
+            colname = None
+        else:
+            colname = m.pop()
+            tname = m.pop()
+
+        # A FK between column 'bar' and table 'foo' can be
+        # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
+        # 'otherdb.dbo.foo.bar'. Once we have the column name and
+        # the table name, treat everything else as the schema
+        # name. Some databases (e.g. Sybase) support
+        # inter-database foreign keys. See tickets#1341 and --
+        # indirectly related -- Ticket #594. This assumes that '.'
+        # will never appear *within* any component of the FK.
+
+        if len(m) > 0:
+            schema = ".".join(m)
+        else:
+            schema = None
+        return schema, tname, colname
+
+    def _resolve_col_tokens(self) -> Tuple[Table, str, Optional[str]]:
+        if self.parent is None:
+            raise exc.InvalidRequestError(
+                "this ForeignKey object does not yet have a "
+                "parent Column associated with it."
+            )
+
+        elif self.parent.table is None:
+            raise exc.InvalidRequestError(
+                "this ForeignKey's parent column is not yet associated "
+                "with a Table."
+            )
+
+        parenttable = self.parent.table
+
+        if self._unresolvable:
+            schema, tname, colname = self._column_tokens
+            tablekey = _get_table_key(tname, schema)
+            return parenttable, tablekey, colname
+
+        # assertion
+        # basically Column._make_proxy() sends the actual
+        # target Column to the ForeignKey object, so the
+        # string resolution here is never called.
+        for c in self.parent.base_columns:
+            if isinstance(c, Column):
+                assert c.table is parenttable
+                break
+        else:
+            assert False
+        ######################
+
+        schema, tname, colname = self._column_tokens
+
+        if schema is None and parenttable.metadata.schema is not None:
+            schema = parenttable.metadata.schema
+
+        tablekey = _get_table_key(tname, schema)
+        return parenttable, tablekey, colname
+
+    def _link_to_col_by_colstring(
+        self, parenttable: Table, table: Table, colname: Optional[str]
+    ) -> Column[Any]:
+        _column = None
+        if colname is None:
+            # colname is None in the case that ForeignKey argument
+            # was specified as table name only, in which case we
+            # match the column name to the same column on the
+            # parent.
+            # this use case wasn't working in later 1.x series
+            # as it had no test coverage; fixed in 2.0
+            parent = self.parent
+            assert parent is not None
+            key = parent.key
+            _column = table.c.get(key, None)
+        elif self.link_to_name:
+            key = colname
+            for c in table.c:
+                if c.name == colname:
+                    _column = c
+        else:
+            key = colname
+            _column = table.c.get(colname, None)
+
+        if _column is None:
+            raise exc.NoReferencedColumnError(
+                "Could not initialize target column "
+                f"for ForeignKey '{self._colspec}' "
+                f"on table '{parenttable.name}': "
+                f"table '{table.name}' has no column named '{key}'",
+                table.name,
+                key,
+            )
+
+        return _column
+
+    def _set_target_column(self, column: Column[Any]) -> None:
+        assert self.parent is not None
+
+        # propagate TypeEngine to parent if it didn't have one
+        if self.parent.type._isnull:
+            self.parent.type = column.type
+
+        # super-edgy case, if other FKs point to our column,
+        # they'd get the type propagated out also.
+
+        def set_type(fk: ForeignKey) -> None:
+            if fk.parent.type._isnull:
+                fk.parent.type = column.type
+
+        self.parent._setup_on_memoized_fks(set_type)
+
+        self.column = column  # type: ignore
+
+    @util.ro_memoized_property
+    def column(self) -> Column[Any]:
+        """Return the target :class:`_schema.Column` referenced by this
+        :class:`_schema.ForeignKey`.
+
+        If no target column has been established, an exception
+        is raised.
+
+        """
+
+        return self._resolve_column()
+
+    @overload
+    def _resolve_column(
+        self, *, raiseerr: Literal[True] = ...
+    ) -> Column[Any]: ...
+
+    @overload
+    def _resolve_column(
+        self, *, raiseerr: bool = ...
+    ) -> Optional[Column[Any]]: ...
+
+    def _resolve_column(
+        self, *, raiseerr: bool = True
+    ) -> Optional[Column[Any]]:
+        _column: Column[Any]
+
+        if isinstance(self._colspec, str):
+            parenttable, tablekey, colname = self._resolve_col_tokens()
+
+            if self._unresolvable or tablekey not in parenttable.metadata:
+                if not raiseerr:
+                    return None
+                raise exc.NoReferencedTableError(
+                    f"Foreign key associated with column "
+                    f"'{self.parent}' could not find "
+                    f"table '{tablekey}' with which to generate a "
+                    f"foreign key to target column '{colname}'",
+                    tablekey,
+                )
+            elif parenttable.key not in parenttable.metadata:
+                if not raiseerr:
+                    return None
+                raise exc.InvalidRequestError(
+                    f"Table {parenttable} is no longer associated with its "
+                    "parent MetaData"
+                )
+            else:
+                table = parenttable.metadata.tables[tablekey]
+                return self._link_to_col_by_colstring(
+                    parenttable, table, colname
+                )
+
+        elif hasattr(self._colspec, "__clause_element__"):
+            _column = self._colspec.__clause_element__()
+            return _column
+        else:
+            _column = self._colspec
+            return _column
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        assert isinstance(parent, Column)
+
+        if self.parent is not None and self.parent is not parent:
+            raise exc.InvalidRequestError(
+                "This ForeignKey already has a parent !"
+            )
+        self.parent = parent
+        self.parent.foreign_keys.add(self)
+        self.parent._on_table_attach(self._set_table)
+
+    def _set_remote_table(self, table: Table) -> None:
+        parenttable, _, colname = self._resolve_col_tokens()
+        _column = self._link_to_col_by_colstring(parenttable, table, colname)
+        self._set_target_column(_column)
+        assert self.constraint is not None
+        self.constraint._validate_dest_table(table)
+
+    def _remove_from_metadata(self, metadata: MetaData) -> None:
+        parenttable, table_key, colname = self._resolve_col_tokens()
+        fk_key = (table_key, colname)
+
+        if self in metadata._fk_memos[fk_key]:
+            # TODO: no test coverage for self not in memos
+            metadata._fk_memos[fk_key].remove(self)
+
+    def _set_table(self, column: Column[Any], table: Table) -> None:
+        # standalone ForeignKey - create ForeignKeyConstraint
+        # on the hosting Table when attached to the Table.
+        assert isinstance(table, Table)
+        if self.constraint is None:
+            self.constraint = ForeignKeyConstraint(
+                [],
+                [],
+                use_alter=self.use_alter,
+                name=self.name,
+                onupdate=self.onupdate,
+                ondelete=self.ondelete,
+                deferrable=self.deferrable,
+                initially=self.initially,
+                match=self.match,
+                comment=self.comment,
+                **self._unvalidated_dialect_kw,
+            )
+            self.constraint._append_element(column, self)
+            self.constraint._set_parent_with_dispatch(table)
+        table.foreign_keys.add(self)
+        # set up remote ".column" attribute, or a note to pick it
+        # up when the other Table/Column shows up
+        if isinstance(self._colspec, str):
+            parenttable, table_key, colname = self._resolve_col_tokens()
+            fk_key = (table_key, colname)
+            if table_key in parenttable.metadata.tables:
+                table = parenttable.metadata.tables[table_key]
+                try:
+                    _column = self._link_to_col_by_colstring(
+                        parenttable, table, colname
+                    )
+                except exc.NoReferencedColumnError:
+                    # this is OK, we'll try later
+                    pass
+                else:
+                    self._set_target_column(_column)
+
+            parenttable.metadata._fk_memos[fk_key].append(self)
+        elif hasattr(self._colspec, "__clause_element__"):
+            _column = self._colspec.__clause_element__()
+            self._set_target_column(_column)
+        else:
+            _column = self._colspec
+            self._set_target_column(_column)
+
+
+if TYPE_CHECKING:
+
+    def default_is_sequence(
+        obj: Optional[DefaultGenerator],
+    ) -> TypeGuard[Sequence]: ...
+
+    def default_is_clause_element(
+        obj: Optional[DefaultGenerator],
+    ) -> TypeGuard[ColumnElementColumnDefault]: ...
+
+    def default_is_scalar(
+        obj: Optional[DefaultGenerator],
+    ) -> TypeGuard[ScalarElementColumnDefault]: ...
+
+else:
+    default_is_sequence = operator.attrgetter("is_sequence")
+
+    default_is_clause_element = operator.attrgetter("is_clause_element")
+
+    default_is_scalar = operator.attrgetter("is_scalar")
+
+
+class DefaultGenerator(Executable, SchemaItem):
+    """Base class for column *default* values.
+
+    This object is only present on column.default or column.onupdate.
+    It's not valid as a server default.
+
+    """
+
+    __visit_name__ = "default_generator"
+
+    _is_default_generator = True
+    is_sequence = False
+    is_identity = False
+    is_server_default = False
+    is_clause_element = False
+    is_callable = False
+    is_scalar = False
+    has_arg = False
+    is_sentinel = False
+    column: Optional[Column[Any]]
+
+    def __init__(self, for_update: bool = False) -> None:
+        self.for_update = for_update
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        if TYPE_CHECKING:
+            assert isinstance(parent, Column)
+        self.column = parent
+        if self.for_update:
+            self.column.onupdate = self
+        else:
+            self.column.default = self
+
+    def _copy(self) -> DefaultGenerator:
+        raise NotImplementedError()
+
+    def _execute_on_connection(
+        self,
+        connection: Connection,
+        distilled_params: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> Any:
+        util.warn_deprecated(
+            "Using the .execute() method to invoke a "
+            "DefaultGenerator object is deprecated; please use "
+            "the .scalar() method.",
+            "2.0",
+        )
+        return self._execute_on_scalar(
+            connection, distilled_params, execution_options
+        )
+
+    def _execute_on_scalar(
+        self,
+        connection: Connection,
+        distilled_params: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> Any:
+        return connection._execute_default(
+            self, distilled_params, execution_options
+        )
+
+
+class ColumnDefault(DefaultGenerator, ABC):
+    """A plain default value on a column.
+
+    This could correspond to a constant, a callable function,
+    or a SQL clause.
+
+    :class:`.ColumnDefault` is generated automatically
+    whenever the ``default``, ``onupdate`` arguments of
+    :class:`_schema.Column` are used.  A :class:`.ColumnDefault`
+    can be passed positionally as well.
+
+    For example, the following::
+
+        Column("foo", Integer, default=50)
+
+    Is equivalent to::
+
+        Column("foo", Integer, ColumnDefault(50))
+
+    """
+
+    arg: Any
+
+    @overload
+    def __new__(
+        cls, arg: Callable[..., Any], for_update: bool = ...
+    ) -> CallableColumnDefault: ...
+
+    @overload
+    def __new__(
+        cls, arg: ColumnElement[Any], for_update: bool = ...
+    ) -> ColumnElementColumnDefault: ...
+
+    # if I return ScalarElementColumnDefault here, which is what's actually
+    # returned, mypy complains that
+    # overloads overlap w/ incompatible return types.
+    @overload
+    def __new__(cls, arg: object, for_update: bool = ...) -> ColumnDefault: ...
+
+    def __new__(
+        cls, arg: Any = None, for_update: bool = False
+    ) -> ColumnDefault:
+        """Construct a new :class:`.ColumnDefault`.
+
+
+        :param arg: argument representing the default value.
+         May be one of the following:
+
+         * a plain non-callable Python value, such as a
+           string, integer, boolean, or other simple type.
+           The default value will be used as is each time.
+         * a SQL expression, that is one which derives from
+           :class:`_expression.ColumnElement`.  The SQL expression will
+           be rendered into the INSERT or UPDATE statement,
+           or in the case of a primary key column when
+           RETURNING is not used may be
+           pre-executed before an INSERT within a SELECT.
+         * A Python callable.  The function will be invoked for each
+           new row subject to an INSERT or UPDATE.
+           The callable must accept exactly
+           zero or one positional arguments.  The one-argument form
+           will receive an instance of the :class:`.ExecutionContext`,
+           which provides contextual information as to the current
+           :class:`_engine.Connection` in use as well as the current
+           statement and parameters.
+
+        """
+
+        if isinstance(arg, FetchedValue):
+            raise exc.ArgumentError(
+                "ColumnDefault may not be a server-side default type."
+            )
+        elif callable(arg):
+            cls = CallableColumnDefault
+        elif isinstance(arg, ClauseElement):
+            cls = ColumnElementColumnDefault
+        elif arg is not None:
+            cls = ScalarElementColumnDefault
+
+        return object.__new__(cls)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self.arg!r})"
+
+
+class ScalarElementColumnDefault(ColumnDefault):
+    """default generator for a fixed scalar Python value
+
+    .. versionadded: 2.0
+
+    """
+
+    is_scalar = True
+    has_arg = True
+
+    def __init__(self, arg: Any, for_update: bool = False) -> None:
+        self.for_update = for_update
+        self.arg = arg
+
+    def _copy(self) -> ScalarElementColumnDefault:
+        return ScalarElementColumnDefault(
+            arg=self.arg, for_update=self.for_update
+        )
+
+
+class _InsertSentinelColumnDefault(ColumnDefault):
+    """Default generator that's specific to the use of a "sentinel" column
+    when using the insertmanyvalues feature.
+
+    This default is used as part of the :func:`_schema.insert_sentinel`
+    construct.
+
+    """
+
+    is_sentinel = True
+    for_update = False
+    arg = None
+
+    def __new__(cls) -> _InsertSentinelColumnDefault:
+        return object.__new__(cls)
+
+    def __init__(self) -> None:
+        pass
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        col = cast("Column[Any]", parent)
+        if not col._insert_sentinel:
+            raise exc.ArgumentError(
+                "The _InsertSentinelColumnDefault may only be applied to a "
+                "Column marked as insert_sentinel=True"
+            )
+        elif not col.nullable:
+            raise exc.ArgumentError(
+                "The _InsertSentinelColumnDefault may only be applied to a "
+                "Column that is nullable"
+            )
+
+        super()._set_parent(parent, **kw)
+
+    def _copy(self) -> _InsertSentinelColumnDefault:
+        return _InsertSentinelColumnDefault()
+
+
+_SQLExprDefault = Union["ColumnElement[Any]", "TextClause"]
+
+
+class ColumnElementColumnDefault(ColumnDefault):
+    """default generator for a SQL expression
+
+    .. versionadded:: 2.0
+
+    """
+
+    is_clause_element = True
+    has_arg = True
+    arg: _SQLExprDefault
+
+    def __init__(
+        self,
+        arg: _SQLExprDefault,
+        for_update: bool = False,
+    ) -> None:
+        self.for_update = for_update
+        self.arg = arg
+
+    def _copy(self) -> ColumnElementColumnDefault:
+        return ColumnElementColumnDefault(
+            arg=self.arg, for_update=self.for_update
+        )
+
+    @util.memoized_property
+    @util.preload_module("sqlalchemy.sql.sqltypes")
+    def _arg_is_typed(self) -> bool:
+        sqltypes = util.preloaded.sql_sqltypes
+
+        return not isinstance(self.arg.type, sqltypes.NullType)
+
+
+class _CallableColumnDefaultProtocol(Protocol):
+    def __call__(self, context: ExecutionContext) -> Any: ...
+
+
+class CallableColumnDefault(ColumnDefault):
+    """default generator for a callable Python function
+
+    .. versionadded:: 2.0
+
+    """
+
+    is_callable = True
+    arg: _CallableColumnDefaultProtocol
+    has_arg = True
+
+    def __init__(
+        self,
+        arg: Union[_CallableColumnDefaultProtocol, Callable[[], Any]],
+        for_update: bool = False,
+    ) -> None:
+        self.for_update = for_update
+        self.arg = self._maybe_wrap_callable(arg)
+
+    def _copy(self) -> CallableColumnDefault:
+        return CallableColumnDefault(arg=self.arg, for_update=self.for_update)
+
+    def _maybe_wrap_callable(
+        self, fn: Union[_CallableColumnDefaultProtocol, Callable[[], Any]]
+    ) -> _CallableColumnDefaultProtocol:
+        """Wrap callables that don't accept a context.
+
+        This is to allow easy compatibility with default callables
+        that aren't specific to accepting of a context.
+
+        """
+
+        try:
+            argspec = util.get_callable_argspec(fn, no_self=True)
+        except TypeError:
+            return util.wrap_callable(lambda ctx: fn(), fn)  # type: ignore
+
+        defaulted = argspec[3] is not None and len(argspec[3]) or 0
+        positionals = len(argspec[0]) - defaulted
+
+        if positionals == 0:
+            return util.wrap_callable(lambda ctx: fn(), fn)  # type: ignore
+
+        elif positionals == 1:
+            return fn  # type: ignore
+        else:
+            raise exc.ArgumentError(
+                "ColumnDefault Python function takes zero or one "
+                "positional arguments"
+            )
+
+
+class IdentityOptions:
+    """Defines options for a named database sequence or an identity column.
+
+    .. versionadded:: 1.3.18
+
+    .. seealso::
+
+        :class:`.Sequence`
+
+    """
+
+    def __init__(
+        self,
+        start: Optional[int] = None,
+        increment: Optional[int] = None,
+        minvalue: Optional[int] = None,
+        maxvalue: Optional[int] = None,
+        nominvalue: Optional[bool] = None,
+        nomaxvalue: Optional[bool] = None,
+        cycle: Optional[bool] = None,
+        cache: Optional[int] = None,
+        order: Optional[bool] = None,
+    ) -> None:
+        """Construct a :class:`.IdentityOptions` object.
+
+        See the :class:`.Sequence` documentation for a complete description
+        of the parameters.
+
+        :param start: the starting index of the sequence.
+        :param increment: the increment value of the sequence.
+        :param minvalue: the minimum value of the sequence.
+        :param maxvalue: the maximum value of the sequence.
+        :param nominvalue: no minimum value of the sequence.
+        :param nomaxvalue: no maximum value of the sequence.
+        :param cycle: allows the sequence to wrap around when the maxvalue
+         or minvalue has been reached.
+        :param cache: optional integer value; number of future values in the
+         sequence which are calculated in advance.
+        :param order: optional boolean value; if ``True``, renders the
+         ORDER keyword.
+
+        """
+        self.start = start
+        self.increment = increment
+        self.minvalue = minvalue
+        self.maxvalue = maxvalue
+        self.nominvalue = nominvalue
+        self.nomaxvalue = nomaxvalue
+        self.cycle = cycle
+        self.cache = cache
+        self.order = order
+
+    @property
+    def _increment_is_negative(self) -> bool:
+        return self.increment is not None and self.increment < 0
+
+
+class Sequence(HasSchemaAttr, IdentityOptions, DefaultGenerator):
+    """Represents a named database sequence.
+
+    The :class:`.Sequence` object represents the name and configurational
+    parameters of a database sequence.   It also represents
+    a construct that can be "executed" by a SQLAlchemy :class:`_engine.Engine`
+    or :class:`_engine.Connection`,
+    rendering the appropriate "next value" function
+    for the target database and returning a result.
+
+    The :class:`.Sequence` is typically associated with a primary key column::
+
+        some_table = Table(
+            "some_table",
+            metadata,
+            Column(
+                "id",
+                Integer,
+                Sequence("some_table_seq", start=1),
+                primary_key=True,
+            ),
+        )
+
+    When CREATE TABLE is emitted for the above :class:`_schema.Table`, if the
+    target platform supports sequences, a CREATE SEQUENCE statement will
+    be emitted as well.   For platforms that don't support sequences,
+    the :class:`.Sequence` construct is ignored.
+
+    .. seealso::
+
+        :ref:`defaults_sequences`
+
+        :class:`.CreateSequence`
+
+        :class:`.DropSequence`
+
+    """
+
+    __visit_name__ = "sequence"
+
+    is_sequence = True
+
+    column: Optional[Column[Any]]
+    data_type: Optional[TypeEngine[int]]
+
+    def __init__(
+        self,
+        name: str,
+        start: Optional[int] = None,
+        increment: Optional[int] = None,
+        minvalue: Optional[int] = None,
+        maxvalue: Optional[int] = None,
+        nominvalue: Optional[bool] = None,
+        nomaxvalue: Optional[bool] = None,
+        cycle: Optional[bool] = None,
+        schema: Optional[Union[str, Literal[SchemaConst.BLANK_SCHEMA]]] = None,
+        cache: Optional[int] = None,
+        order: Optional[bool] = None,
+        data_type: Optional[_TypeEngineArgument[int]] = None,
+        optional: bool = False,
+        quote: Optional[bool] = None,
+        metadata: Optional[MetaData] = None,
+        quote_schema: Optional[bool] = None,
+        for_update: bool = False,
+    ) -> None:
+        """Construct a :class:`.Sequence` object.
+
+        :param name: the name of the sequence.
+
+        :param start: the starting index of the sequence.  This value is
+         used when the CREATE SEQUENCE command is emitted to the database
+         as the value of the "START WITH" clause. If ``None``, the
+         clause is omitted, which on most platforms indicates a starting
+         value of 1.
+
+         .. versionchanged:: 2.0 The :paramref:`.Sequence.start` parameter
+            is required in order to have DDL emit "START WITH".  This is a
+            reversal of a change made in version 1.4 which would implicitly
+            render "START WITH 1" if the :paramref:`.Sequence.start` were
+            not included.  See :ref:`change_7211` for more detail.
+
+        :param increment: the increment value of the sequence.  This
+         value is used when the CREATE SEQUENCE command is emitted to
+         the database as the value of the "INCREMENT BY" clause.  If ``None``,
+         the clause is omitted, which on most platforms indicates an
+         increment of 1.
+        :param minvalue: the minimum value of the sequence.  This
+         value is used when the CREATE SEQUENCE command is emitted to
+         the database as the value of the "MINVALUE" clause.  If ``None``,
+         the clause is omitted, which on most platforms indicates a
+         minvalue of 1 and -2^63-1 for ascending and descending sequences,
+         respectively.
+
+        :param maxvalue: the maximum value of the sequence.  This
+         value is used when the CREATE SEQUENCE command is emitted to
+         the database as the value of the "MAXVALUE" clause.  If ``None``,
+         the clause is omitted, which on most platforms indicates a
+         maxvalue of 2^63-1 and -1 for ascending and descending sequences,
+         respectively.
+
+        :param nominvalue: no minimum value of the sequence.  This
+         value is used when the CREATE SEQUENCE command is emitted to
+         the database as the value of the "NO MINVALUE" clause.  If ``None``,
+         the clause is omitted, which on most platforms indicates a
+         minvalue of 1 and -2^63-1 for ascending and descending sequences,
+         respectively.
+
+        :param nomaxvalue: no maximum value of the sequence.  This
+         value is used when the CREATE SEQUENCE command is emitted to
+         the database as the value of the "NO MAXVALUE" clause.  If ``None``,
+         the clause is omitted, which on most platforms indicates a
+         maxvalue of 2^63-1 and -1 for ascending and descending sequences,
+         respectively.
+
+        :param cycle: allows the sequence to wrap around when the maxvalue
+         or minvalue has been reached by an ascending or descending sequence
+         respectively.  This value is used when the CREATE SEQUENCE command
+         is emitted to the database as the "CYCLE" clause.  If the limit is
+         reached, the next number generated will be the minvalue or maxvalue,
+         respectively.  If cycle=False (the default) any calls to nextval
+         after the sequence has reached its maximum value will return an
+         error.
+
+        :param schema: optional schema name for the sequence, if located
+         in a schema other than the default.  The rules for selecting the
+         schema name when a :class:`_schema.MetaData`
+         is also present are the same
+         as that of :paramref:`_schema.Table.schema`.
+
+        :param cache: optional integer value; number of future values in the
+         sequence which are calculated in advance.  Renders the CACHE keyword
+         understood by Oracle Database and PostgreSQL.
+
+        :param order: optional boolean value; if ``True``, renders the
+         ORDER keyword, understood by Oracle Database, indicating the sequence
+         is definitively ordered.   May be necessary to provide deterministic
+         ordering using Oracle RAC.
+
+        :param data_type: The type to be returned by the sequence, for
+         dialects that allow us to choose between INTEGER, BIGINT, etc.
+         (e.g., mssql).
+
+         .. versionadded:: 1.4.0
+
+        :param optional: boolean value, when ``True``, indicates that this
+         :class:`.Sequence` object only needs to be explicitly generated
+         on backends that don't provide another way to generate primary
+         key identifiers.  Currently, it essentially means, "don't create
+         this sequence on the PostgreSQL backend, where the SERIAL keyword
+         creates a sequence for us automatically".
+        :param quote: boolean value, when ``True`` or ``False``, explicitly
+         forces quoting of the :paramref:`_schema.Sequence.name` on or off.
+         When left at its default of ``None``, normal quoting rules based
+         on casing and reserved words take place.
+        :param quote_schema: Set the quoting preferences for the ``schema``
+         name.
+
+        :param metadata: optional :class:`_schema.MetaData` object which this
+         :class:`.Sequence` will be associated with.  A :class:`.Sequence`
+         that is associated with a :class:`_schema.MetaData`
+         gains the following
+         capabilities:
+
+         * The :class:`.Sequence` will inherit the
+           :paramref:`_schema.MetaData.schema`
+           parameter specified to the target :class:`_schema.MetaData`, which
+           affects the production of CREATE / DROP DDL, if any.
+
+         * The :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods
+           automatically use the engine bound to the :class:`_schema.MetaData`
+           object, if any.
+
+         * The :meth:`_schema.MetaData.create_all` and
+           :meth:`_schema.MetaData.drop_all`
+           methods will emit CREATE / DROP for this :class:`.Sequence`,
+           even if the :class:`.Sequence` is not associated with any
+           :class:`_schema.Table` / :class:`_schema.Column`
+           that's a member of this
+           :class:`_schema.MetaData`.
+
+         The above behaviors can only occur if the :class:`.Sequence` is
+         explicitly associated with the :class:`_schema.MetaData`
+         via this parameter.
+
+         .. seealso::
+
+            :ref:`sequence_metadata` - full discussion of the
+            :paramref:`.Sequence.metadata` parameter.
+
+        :param for_update: Indicates this :class:`.Sequence`, when associated
+         with a :class:`_schema.Column`,
+         should be invoked for UPDATE statements
+         on that column's table, rather than for INSERT statements, when
+         no value is otherwise present for that column in the statement.
+
+        """
+        DefaultGenerator.__init__(self, for_update=for_update)
+        IdentityOptions.__init__(
+            self,
+            start=start,
+            increment=increment,
+            minvalue=minvalue,
+            maxvalue=maxvalue,
+            nominvalue=nominvalue,
+            nomaxvalue=nomaxvalue,
+            cycle=cycle,
+            cache=cache,
+            order=order,
+        )
+        self.column = None
+        self.name = quoted_name(name, quote)
+        self.optional = optional
+        if schema is BLANK_SCHEMA:
+            self.schema = schema = None
+        elif metadata is not None and schema is None and metadata.schema:
+            self.schema = schema = metadata.schema
+        else:
+            self.schema = quoted_name.construct(schema, quote_schema)
+        self.metadata = metadata
+        self._key = _get_table_key(name, schema)
+        if metadata:
+            self._set_metadata(metadata)
+        if data_type is not None:
+            self.data_type = to_instance(data_type)
+        else:
+            self.data_type = None
+
+    @util.preload_module("sqlalchemy.sql.functions")
+    def next_value(self) -> Function[int]:
+        """Return a :class:`.next_value` function element
+        which will render the appropriate increment function
+        for this :class:`.Sequence` within any SQL expression.
+
+        """
+        return util.preloaded.sql_functions.func.next_value(self)
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        column = parent
+        assert isinstance(column, Column)
+        super()._set_parent(column)
+        column._on_table_attach(self._set_table)
+
+    def _copy(self) -> Sequence:
+        return Sequence(
+            name=self.name,
+            start=self.start,
+            increment=self.increment,
+            minvalue=self.minvalue,
+            maxvalue=self.maxvalue,
+            nominvalue=self.nominvalue,
+            nomaxvalue=self.nomaxvalue,
+            cycle=self.cycle,
+            schema=self.schema,
+            cache=self.cache,
+            order=self.order,
+            data_type=self.data_type,
+            optional=self.optional,
+            metadata=self.metadata,
+            for_update=self.for_update,
+        )
+
+    def _set_table(self, column: Column[Any], table: Table) -> None:
+        self._set_metadata(table.metadata)
+
+    def _set_metadata(self, metadata: MetaData) -> None:
+        self.metadata = metadata
+        self.metadata._sequences[self._key] = self
+
+    def create(self, bind: _CreateDropBind, checkfirst: bool = True) -> None:
+        """Creates this sequence in the database."""
+
+        bind._run_ddl_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
+
+    def drop(self, bind: _CreateDropBind, checkfirst: bool = True) -> None:
+        """Drops this sequence from the database."""
+
+        bind._run_ddl_visitor(ddl.SchemaDropper, self, checkfirst=checkfirst)
+
+    def _not_a_column_expr(self) -> NoReturn:
+        raise exc.InvalidRequestError(
+            f"This {self.__class__.__name__} cannot be used directly "
+            "as a column expression.  Use func.next_value(sequence) "
+            "to produce a 'next value' function that's usable "
+            "as a column element."
+        )
+
+
+@inspection._self_inspects
+class FetchedValue(SchemaEventTarget):
+    """A marker for a transparent database-side default.
+
+    Use :class:`.FetchedValue` when the database is configured
+    to provide some automatic default for a column.
+
+    E.g.::
+
+        Column("foo", Integer, FetchedValue())
+
+    Would indicate that some trigger or default generator
+    will create a new value for the ``foo`` column during an
+    INSERT.
+
+    .. seealso::
+
+        :ref:`triggered_columns`
+
+    """
+
+    is_server_default = True
+    reflected = False
+    has_argument = False
+    is_clause_element = False
+    is_identity = False
+
+    column: Optional[Column[Any]]
+
+    def __init__(self, for_update: bool = False) -> None:
+        self.for_update = for_update
+
+    def _as_for_update(self, for_update: bool) -> FetchedValue:
+        if for_update == self.for_update:
+            return self
+        else:
+            return self._clone(for_update)
+
+    def _copy(self) -> FetchedValue:
+        return FetchedValue(self.for_update)
+
+    def _clone(self, for_update: bool) -> Self:
+        n = self.__class__.__new__(self.__class__)
+        n.__dict__.update(self.__dict__)
+        n.__dict__.pop("column", None)
+        n.for_update = for_update
+        return n
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        column = parent
+        assert isinstance(column, Column)
+        self.column = column
+        if self.for_update:
+            self.column.server_onupdate = self
+        else:
+            self.column.server_default = self
+
+    def __repr__(self) -> str:
+        return util.generic_repr(self)
+
+
+class DefaultClause(FetchedValue):
+    """A DDL-specified DEFAULT column value.
+
+    :class:`.DefaultClause` is a :class:`.FetchedValue`
+    that also generates a "DEFAULT" clause when
+    "CREATE TABLE" is emitted.
+
+    :class:`.DefaultClause` is generated automatically
+    whenever the ``server_default``, ``server_onupdate`` arguments of
+    :class:`_schema.Column` are used.  A :class:`.DefaultClause`
+    can be passed positionally as well.
+
+    For example, the following::
+
+        Column("foo", Integer, server_default="50")
+
+    Is equivalent to::
+
+        Column("foo", Integer, DefaultClause("50"))
+
+    """
+
+    has_argument = True
+
+    def __init__(
+        self,
+        arg: Union[str, ClauseElement, TextClause],
+        for_update: bool = False,
+        _reflected: bool = False,
+    ) -> None:
+        util.assert_arg_type(arg, (str, ClauseElement, TextClause), "arg")
+        super().__init__(for_update)
+        self.arg = arg
+        self.reflected = _reflected
+
+    def _copy(self) -> DefaultClause:
+        return DefaultClause(
+            arg=self.arg, for_update=self.for_update, _reflected=self.reflected
+        )
+
+    def __repr__(self) -> str:
+        return "DefaultClause(%r, for_update=%r)" % (self.arg, self.for_update)
+
+
+class Constraint(DialectKWArgs, HasConditionalDDL, SchemaItem):
+    """A table-level SQL constraint.
+
+    :class:`_schema.Constraint` serves as the base class for the series of
+    constraint objects that can be associated with :class:`_schema.Table`
+    objects, including :class:`_schema.PrimaryKeyConstraint`,
+    :class:`_schema.ForeignKeyConstraint`
+    :class:`_schema.UniqueConstraint`, and
+    :class:`_schema.CheckConstraint`.
+
+    """
+
+    __visit_name__ = "constraint"
+
+    _creation_order: int
+    _column_flag: bool
+
+    def __init__(
+        self,
+        name: _ConstraintNameArgument = None,
+        deferrable: Optional[bool] = None,
+        initially: Optional[str] = None,
+        info: Optional[_InfoType] = None,
+        comment: Optional[str] = None,
+        _create_rule: Optional[Any] = None,
+        _type_bound: bool = False,
+        **dialect_kw: Any,
+    ) -> None:
+        r"""Create a SQL constraint.
+
+        :param name:
+          Optional, the in-database name of this ``Constraint``.
+
+        :param deferrable:
+          Optional bool.  If set, emit DEFERRABLE or NOT DEFERRABLE when
+          issuing DDL for this constraint.
+
+        :param initially:
+          Optional string.  If set, emit INITIALLY <value> when issuing DDL
+          for this constraint.
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        :param comment: Optional string that will render an SQL comment on
+          foreign key constraint creation.
+
+            .. versionadded:: 2.0
+
+        :param \**dialect_kw:  Additional keyword arguments are dialect
+            specific, and passed in the form ``<dialectname>_<argname>``.  See
+            the documentation regarding an individual dialect at
+            :ref:`dialect_toplevel` for detail on documented arguments.
+
+        :param _create_rule:
+          used internally by some datatypes that also create constraints.
+
+        :param _type_bound:
+          used internally to indicate that this constraint is associated with
+          a specific datatype.
+
+        """
+
+        self.name = name
+        self.deferrable = deferrable
+        self.initially = initially
+        if info:
+            self.info = info
+        self._create_rule = _create_rule
+        self._type_bound = _type_bound
+        util.set_creation_order(self)
+        self._validate_dialect_kwargs(dialect_kw)
+        self.comment = comment
+
+    def _should_create_for_compiler(
+        self, compiler: DDLCompiler, **kw: Any
+    ) -> bool:
+        if self._create_rule is not None and not self._create_rule(compiler):
+            return False
+        elif self._ddl_if is not None:
+            return self._ddl_if._should_execute(
+                ddl.CreateConstraint(self), self, None, compiler=compiler, **kw
+            )
+        else:
+            return True
+
+    @property
+    def table(self) -> Table:
+        try:
+            if isinstance(self.parent, Table):
+                return self.parent
+        except AttributeError:
+            pass
+        raise exc.InvalidRequestError(
+            "This constraint is not bound to a table.  Did you "
+            "mean to call table.append_constraint(constraint) ?"
+        )
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        assert isinstance(parent, (Table, Column))
+        self.parent = parent
+        parent.constraints.add(self)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.Constraint.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(self, **kw: Any) -> Self:
+        return self._copy(**kw)
+
+    def _copy(self, **kw: Any) -> Self:
+        raise NotImplementedError()
+
+
+class ColumnCollectionMixin:
+    """A :class:`_expression.ColumnCollection` of :class:`_schema.Column`
+    objects.
+
+    This collection represents the columns which are referred to by
+    this object.
+
+    """
+
+    _columns: DedupeColumnCollection[Column[Any]]
+
+    _allow_multiple_tables = False
+
+    _pending_colargs: List[Optional[Union[str, Column[Any]]]]
+
+    if TYPE_CHECKING:
+
+        def _set_parent_with_dispatch(
+            self, parent: SchemaEventTarget, **kw: Any
+        ) -> None: ...
+
+    def __init__(
+        self,
+        *columns: _DDLColumnArgument,
+        _autoattach: bool = True,
+        _column_flag: bool = False,
+        _gather_expressions: Optional[
+            List[Union[str, ColumnElement[Any]]]
+        ] = None,
+    ) -> None:
+        self._column_flag = _column_flag
+        self._columns = DedupeColumnCollection()
+
+        processed_expressions: Optional[
+            List[Union[ColumnElement[Any], str]]
+        ] = _gather_expressions
+
+        if processed_expressions is not None:
+
+            # this is expected to be an empty list
+            assert not processed_expressions
+
+            self._pending_colargs = []
+            for (
+                expr,
+                _,
+                _,
+                add_element,
+            ) in coercions.expect_col_expression_collection(
+                roles.DDLConstraintColumnRole, columns
+            ):
+                self._pending_colargs.append(add_element)
+                processed_expressions.append(expr)
+        else:
+            self._pending_colargs = [
+                coercions.expect(roles.DDLConstraintColumnRole, column)
+                for column in columns
+            ]
+
+        if _autoattach and self._pending_colargs:
+            self._check_attach()
+
+    def _check_attach(self, evt: bool = False) -> None:
+        col_objs = [c for c in self._pending_colargs if isinstance(c, Column)]
+
+        cols_w_table = [c for c in col_objs if isinstance(c.table, Table)]
+
+        cols_wo_table = set(col_objs).difference(cols_w_table)
+        if cols_wo_table:
+            # feature #3341 - place event listeners for Column objects
+            # such that when all those cols are attached, we autoattach.
+            assert not evt, "Should not reach here on event call"
+
+            # issue #3411 - don't do the per-column auto-attach if some of the
+            # columns are specified as strings.
+            has_string_cols = {
+                c for c in self._pending_colargs if c is not None
+            }.difference(col_objs)
+            if not has_string_cols:
+
+                def _col_attached(column: Column[Any], table: Table) -> None:
+                    # this isinstance() corresponds with the
+                    # isinstance() above; only want to count Table-bound
+                    # columns
+                    if isinstance(table, Table):
+                        cols_wo_table.discard(column)
+                        if not cols_wo_table:
+                            self._check_attach(evt=True)
+
+                self._cols_wo_table = cols_wo_table
+                for col in cols_wo_table:
+                    col._on_table_attach(_col_attached)
+                return
+
+        columns = cols_w_table
+
+        tables = {c.table for c in columns}
+        if len(tables) == 1:
+            self._set_parent_with_dispatch(tables.pop())
+        elif len(tables) > 1 and not self._allow_multiple_tables:
+            table = columns[0].table
+            others = [c for c in columns[1:] if c.table is not table]
+            if others:
+                # black could not format this inline
+                other_str = ", ".join("'%s'" % c for c in others)
+                raise exc.ArgumentError(
+                    f"Column(s) {other_str} "
+                    f"are not part of table '{table.description}'."
+                )
+
+    @util.ro_memoized_property
+    def columns(self) -> ReadOnlyColumnCollection[str, Column[Any]]:
+        return self._columns.as_readonly()
+
+    @util.ro_memoized_property
+    def c(self) -> ReadOnlyColumnCollection[str, Column[Any]]:
+        return self._columns.as_readonly()
+
+    def _col_expressions(
+        self, parent: Union[Table, Column[Any]]
+    ) -> List[Optional[Column[Any]]]:
+        if isinstance(parent, Column):
+            result: List[Optional[Column[Any]]] = [
+                c for c in self._pending_colargs if isinstance(c, Column)
+            ]
+            assert len(result) == len(self._pending_colargs)
+            return result
+        else:
+            try:
+                return [
+                    parent.c[col] if isinstance(col, str) else col
+                    for col in self._pending_colargs
+                ]
+            except KeyError as ke:
+                raise exc.ConstraintColumnNotFoundError(
+                    f"Can't create {self.__class__.__name__} "
+                    f"on table '{parent.description}': no column "
+                    f"named '{ke.args[0]}' is present."
+                ) from ke
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        assert isinstance(parent, (Table, Column))
+
+        for col in self._col_expressions(parent):
+            if col is not None:
+                self._columns.add(col)
+
+
+class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
+    """A constraint that proxies a ColumnCollection."""
+
+    def __init__(
+        self,
+        *columns: _DDLColumnArgument,
+        name: _ConstraintNameArgument = None,
+        deferrable: Optional[bool] = None,
+        initially: Optional[str] = None,
+        info: Optional[_InfoType] = None,
+        _autoattach: bool = True,
+        _column_flag: bool = False,
+        _gather_expressions: Optional[List[_DDLColumnArgument]] = None,
+        **dialect_kw: Any,
+    ) -> None:
+        r"""
+        :param \*columns:
+          A sequence of column names or Column objects.
+
+        :param name:
+          Optional, the in-database name of this constraint.
+
+        :param deferrable:
+          Optional bool.  If set, emit DEFERRABLE or NOT DEFERRABLE when
+          issuing DDL for this constraint.
+
+        :param initially:
+          Optional string.  If set, emit INITIALLY <value> when issuing DDL
+          for this constraint.
+
+        :param \**dialect_kw: other keyword arguments including
+          dialect-specific arguments are propagated to the :class:`.Constraint`
+          superclass.
+
+        """
+        Constraint.__init__(
+            self,
+            name=name,
+            deferrable=deferrable,
+            initially=initially,
+            info=info,
+            **dialect_kw,
+        )
+        ColumnCollectionMixin.__init__(
+            self, *columns, _autoattach=_autoattach, _column_flag=_column_flag
+        )
+
+    columns: ReadOnlyColumnCollection[str, Column[Any]]
+    """A :class:`_expression.ColumnCollection` representing the set of columns
+    for this constraint.
+
+    """
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        assert isinstance(parent, (Column, Table))
+        Constraint._set_parent(self, parent)
+        ColumnCollectionMixin._set_parent(self, parent)
+
+    def __contains__(self, x: Any) -> bool:
+        return x in self._columns
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.ColumnCollectionConstraint.copy` method "
+        "is deprecated and will be removed in a future release.",
+    )
+    def copy(
+        self,
+        *,
+        target_table: Optional[Table] = None,
+        **kw: Any,
+    ) -> ColumnCollectionConstraint:
+        return self._copy(target_table=target_table, **kw)
+
+    def _copy(
+        self,
+        *,
+        target_table: Optional[Table] = None,
+        **kw: Any,
+    ) -> ColumnCollectionConstraint:
+        # ticket #5276
+        constraint_kwargs = {}
+        for dialect_name in self.dialect_options:
+            dialect_options = self.dialect_options[dialect_name]._non_defaults
+            for (
+                dialect_option_key,
+                dialect_option_value,
+            ) in dialect_options.items():
+                constraint_kwargs[dialect_name + "_" + dialect_option_key] = (
+                    dialect_option_value
+                )
+
+        assert isinstance(self.parent, Table)
+        c = self.__class__(
+            name=self.name,
+            deferrable=self.deferrable,
+            initially=self.initially,
+            *[
+                _copy_expression(expr, self.parent, target_table)
+                for expr in self._columns
+            ],
+            comment=self.comment,
+            **constraint_kwargs,
+        )
+        return self._schema_item_copy(c)
+
+    def contains_column(self, col: Column[Any]) -> bool:
+        """Return True if this constraint contains the given column.
+
+        Note that this object also contains an attribute ``.columns``
+        which is a :class:`_expression.ColumnCollection` of
+        :class:`_schema.Column` objects.
+
+        """
+
+        return self._columns.contains_column(col)
+
+    def __iter__(self) -> Iterator[Column[Any]]:
+        return iter(self._columns)
+
+    def __len__(self) -> int:
+        return len(self._columns)
+
+
+class CheckConstraint(ColumnCollectionConstraint):
+    """A table- or column-level CHECK constraint.
+
+    Can be included in the definition of a Table or Column.
+    """
+
+    _allow_multiple_tables = True
+
+    __visit_name__ = "table_or_column_check_constraint"
+
+    @_document_text_coercion(
+        "sqltext",
+        ":class:`.CheckConstraint`",
+        ":paramref:`.CheckConstraint.sqltext`",
+    )
+    def __init__(
+        self,
+        sqltext: _TextCoercedExpressionArgument[Any],
+        name: _ConstraintNameArgument = None,
+        deferrable: Optional[bool] = None,
+        initially: Optional[str] = None,
+        table: Optional[Table] = None,
+        info: Optional[_InfoType] = None,
+        _create_rule: Optional[Any] = None,
+        _autoattach: bool = True,
+        _type_bound: bool = False,
+        **dialect_kw: Any,
+    ) -> None:
+        r"""Construct a CHECK constraint.
+
+        :param sqltext:
+         A string containing the constraint definition, which will be used
+         verbatim, or a SQL expression construct.   If given as a string,
+         the object is converted to a :func:`_expression.text` object.
+         If the textual
+         string includes a colon character, escape this using a backslash::
+
+           CheckConstraint(r"foo ~ E'a(?\:b|c)d")
+
+        :param name:
+          Optional, the in-database name of the constraint.
+
+        :param deferrable:
+          Optional bool.  If set, emit DEFERRABLE or NOT DEFERRABLE when
+          issuing DDL for this constraint.
+
+        :param initially:
+          Optional string.  If set, emit INITIALLY <value> when issuing DDL
+          for this constraint.
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        """
+
+        self.sqltext = coercions.expect(roles.DDLExpressionRole, sqltext)
+        columns: List[Column[Any]] = []
+        visitors.traverse(self.sqltext, {}, {"column": columns.append})
+
+        super().__init__(
+            name=name,
+            deferrable=deferrable,
+            initially=initially,
+            _create_rule=_create_rule,
+            info=info,
+            _type_bound=_type_bound,
+            _autoattach=_autoattach,
+            *columns,
+            **dialect_kw,
+        )
+        if table is not None:
+            self._set_parent_with_dispatch(table)
+
+    @property
+    def is_column_level(self) -> bool:
+        return not isinstance(self.parent, Table)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.CheckConstraint.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(
+        self, *, target_table: Optional[Table] = None, **kw: Any
+    ) -> CheckConstraint:
+        return self._copy(target_table=target_table, **kw)
+
+    def _copy(
+        self, *, target_table: Optional[Table] = None, **kw: Any
+    ) -> CheckConstraint:
+        if target_table is not None:
+            # note that target_table is None for the copy process of
+            # a column-bound CheckConstraint, so this path is not reached
+            # in that case.
+            sqltext = _copy_expression(self.sqltext, self.table, target_table)
+        else:
+            sqltext = self.sqltext
+        c = CheckConstraint(
+            sqltext,
+            name=self.name,
+            initially=self.initially,
+            deferrable=self.deferrable,
+            _create_rule=self._create_rule,
+            table=target_table,
+            comment=self.comment,
+            _autoattach=False,
+            _type_bound=self._type_bound,
+        )
+        return self._schema_item_copy(c)
+
+
+class ForeignKeyConstraint(ColumnCollectionConstraint):
+    """A table-level FOREIGN KEY constraint.
+
+    Defines a single column or composite FOREIGN KEY ... REFERENCES
+    constraint. For a no-frills, single column foreign key, adding a
+    :class:`_schema.ForeignKey` to the definition of a :class:`_schema.Column`
+    is a
+    shorthand equivalent for an unnamed, single column
+    :class:`_schema.ForeignKeyConstraint`.
+
+    Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
+
+    """
+
+    __visit_name__ = "foreign_key_constraint"
+
+    def __init__(
+        self,
+        columns: _typing_Sequence[_DDLColumnArgument],
+        refcolumns: _typing_Sequence[_DDLColumnArgument],
+        name: _ConstraintNameArgument = None,
+        onupdate: Optional[str] = None,
+        ondelete: Optional[str] = None,
+        deferrable: Optional[bool] = None,
+        initially: Optional[str] = None,
+        use_alter: bool = False,
+        link_to_name: bool = False,
+        match: Optional[str] = None,
+        table: Optional[Table] = None,
+        info: Optional[_InfoType] = None,
+        comment: Optional[str] = None,
+        **dialect_kw: Any,
+    ) -> None:
+        r"""Construct a composite-capable FOREIGN KEY.
+
+        :param columns: A sequence of local column names. The named columns
+          must be defined and present in the parent Table. The names should
+          match the ``key`` given to each column (defaults to the name) unless
+          ``link_to_name`` is True.
+
+        :param refcolumns: A sequence of foreign column names or Column
+          objects. The columns must all be located within the same Table.
+
+        :param name: Optional, the in-database name of the key.
+
+        :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+          issuing DDL for this constraint. Typical values include CASCADE,
+          DELETE and RESTRICT.
+
+        :param ondelete: Optional string. If set, emit ON DELETE <value> when
+          issuing DDL for this constraint. Typical values include CASCADE,
+          SET NULL and RESTRICT.
+
+        :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
+          DEFERRABLE when issuing DDL for this constraint.
+
+        :param initially: Optional string. If set, emit INITIALLY <value> when
+          issuing DDL for this constraint.
+
+        :param link_to_name: if True, the string name given in ``column`` is
+          the rendered name of the referenced column, not its locally assigned
+          ``key``.
+
+        :param use_alter: If True, do not emit the DDL for this constraint as
+          part of the CREATE TABLE definition. Instead, generate it via an
+          ALTER TABLE statement issued after the full collection of tables
+          have been created, and drop it via an ALTER TABLE statement before
+          the full collection of tables are dropped.
+
+          The use of :paramref:`_schema.ForeignKeyConstraint.use_alter` is
+          particularly geared towards the case where two or more tables
+          are established within a mutually-dependent foreign key constraint
+          relationship; however, the :meth:`_schema.MetaData.create_all` and
+          :meth:`_schema.MetaData.drop_all`
+          methods will perform this resolution
+          automatically, so the flag is normally not needed.
+
+          .. seealso::
+
+                :ref:`use_alter`
+
+        :param match: Optional string. If set, emit MATCH <value> when issuing
+          DDL for this constraint. Typical values include SIMPLE, PARTIAL
+          and FULL.
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        :param comment: Optional string that will render an SQL comment on
+          foreign key constraint creation.
+
+            .. versionadded:: 2.0
+
+        :param \**dialect_kw:  Additional keyword arguments are dialect
+          specific, and passed in the form ``<dialectname>_<argname>``.  See
+          the documentation regarding an individual dialect at
+          :ref:`dialect_toplevel` for detail on documented arguments.
+
+        """
+
+        Constraint.__init__(
+            self,
+            name=name,
+            deferrable=deferrable,
+            initially=initially,
+            info=info,
+            comment=comment,
+            **dialect_kw,
+        )
+        self.onupdate = onupdate
+        self.ondelete = ondelete
+        self.link_to_name = link_to_name
+        self.use_alter = use_alter
+        self.match = match
+
+        if len(set(columns)) != len(refcolumns):
+            if len(set(columns)) != len(columns):
+                # e.g. FOREIGN KEY (a, a) REFERENCES r (b, c)
+                raise exc.ArgumentError(
+                    "ForeignKeyConstraint with duplicate source column "
+                    "references are not supported."
+                )
+            else:
+                # e.g. FOREIGN KEY (a) REFERENCES r (b, c)
+                # paraphrasing
+                # https://www.postgresql.org/docs/current/static/ddl-constraints.html
+                raise exc.ArgumentError(
+                    "ForeignKeyConstraint number "
+                    "of constrained columns must match the number of "
+                    "referenced columns."
+                )
+
+        # standalone ForeignKeyConstraint - create
+        # associated ForeignKey objects which will be applied to hosted
+        # Column objects (in col.foreign_keys), either now or when attached
+        # to the Table for string-specified names
+        self.elements = [
+            ForeignKey(
+                refcol,
+                _constraint=self,
+                name=self.name,
+                onupdate=self.onupdate,
+                ondelete=self.ondelete,
+                use_alter=self.use_alter,
+                link_to_name=self.link_to_name,
+                match=self.match,
+                deferrable=self.deferrable,
+                initially=self.initially,
+                **self.dialect_kwargs,
+            )
+            for refcol in refcolumns
+        ]
+
+        ColumnCollectionMixin.__init__(self, *columns)
+        if table is not None:
+            if hasattr(self, "parent"):
+                assert table is self.parent
+            self._set_parent_with_dispatch(table)
+
+    def _append_element(self, column: Column[Any], fk: ForeignKey) -> None:
+        self._columns.add(column)
+        self.elements.append(fk)
+
+    columns: ReadOnlyColumnCollection[str, Column[Any]]
+    """A :class:`_expression.ColumnCollection` representing the set of columns
+    for this constraint.
+
+    """
+
+    elements: List[ForeignKey]
+    """A sequence of :class:`_schema.ForeignKey` objects.
+
+    Each :class:`_schema.ForeignKey`
+    represents a single referring column/referred
+    column pair.
+
+    This collection is intended to be read-only.
+
+    """
+
+    @property
+    def _elements(self) -> util.OrderedDict[str, ForeignKey]:
+        # legacy - provide a dictionary view of (column_key, fk)
+        return util.OrderedDict(zip(self.column_keys, self.elements))
+
+    @property
+    def _referred_schema(self) -> Optional[str]:
+        for elem in self.elements:
+            return elem._referred_schema
+        else:
+            return None
+
+    @property
+    def referred_table(self) -> Table:
+        """The :class:`_schema.Table` object to which this
+        :class:`_schema.ForeignKeyConstraint` references.
+
+        This is a dynamically calculated attribute which may not be available
+        if the constraint and/or parent table is not yet associated with
+        a metadata collection that contains the referred table.
+
+        """
+        return self.elements[0].column.table
+
+    def _validate_dest_table(self, table: Table) -> None:
+        table_keys = {elem._table_key() for elem in self.elements}
+        if None not in table_keys and len(table_keys) > 1:
+            elem0, elem1 = sorted(table_keys)[0:2]
+            raise exc.ArgumentError(
+                f"ForeignKeyConstraint on "
+                f"{table.fullname}({self._col_description}) refers to "
+                f"multiple remote tables: {elem0} and {elem1}"
+            )
+
+    @property
+    def column_keys(self) -> _typing_Sequence[str]:
+        """Return a list of string keys representing the local
+        columns in this :class:`_schema.ForeignKeyConstraint`.
+
+        This list is either the original string arguments sent
+        to the constructor of the :class:`_schema.ForeignKeyConstraint`,
+        or if the constraint has been initialized with :class:`_schema.Column`
+        objects, is the string ``.key`` of each element.
+
+        """
+        if hasattr(self, "parent"):
+            return self._columns.keys()
+        else:
+            return [
+                col.key if isinstance(col, ColumnElement) else str(col)
+                for col in self._pending_colargs
+            ]
+
+    @property
+    def _col_description(self) -> str:
+        return ", ".join(self.column_keys)
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        table = parent
+        assert isinstance(table, Table)
+        Constraint._set_parent(self, table)
+
+        ColumnCollectionConstraint._set_parent(self, table)
+
+        for col, fk in zip(self._columns, self.elements):
+            if not hasattr(fk, "parent") or fk.parent is not col:
+                fk._set_parent_with_dispatch(col)
+
+        self._validate_dest_table(table)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.ForeignKeyConstraint.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(
+        self,
+        *,
+        schema: Optional[str] = None,
+        target_table: Optional[Table] = None,
+        **kw: Any,
+    ) -> ForeignKeyConstraint:
+        return self._copy(schema=schema, target_table=target_table, **kw)
+
+    def _copy(
+        self,
+        *,
+        schema: Optional[str] = None,
+        target_table: Optional[Table] = None,
+        **kw: Any,
+    ) -> ForeignKeyConstraint:
+        fkc = ForeignKeyConstraint(
+            [x.parent.key for x in self.elements],
+            [
+                x._get_colspec(
+                    schema=schema,
+                    table_name=(
+                        target_table.name
+                        if target_table is not None
+                        and x._table_key() == x.parent.table.key
+                        else None
+                    ),
+                    _is_copy=True,
+                )
+                for x in self.elements
+            ],
+            name=self.name,
+            onupdate=self.onupdate,
+            ondelete=self.ondelete,
+            use_alter=self.use_alter,
+            deferrable=self.deferrable,
+            initially=self.initially,
+            link_to_name=self.link_to_name,
+            match=self.match,
+            comment=self.comment,
+        )
+        for self_fk, other_fk in zip(self.elements, fkc.elements):
+            self_fk._schema_item_copy(other_fk)
+        return self._schema_item_copy(fkc)
+
+
+class PrimaryKeyConstraint(ColumnCollectionConstraint):
+    """A table-level PRIMARY KEY constraint.
+
+    The :class:`.PrimaryKeyConstraint` object is present automatically
+    on any :class:`_schema.Table` object; it is assigned a set of
+    :class:`_schema.Column` objects corresponding to those marked with
+    the :paramref:`_schema.Column.primary_key` flag::
+
+        >>> my_table = Table(
+        ...     "mytable",
+        ...     metadata,
+        ...     Column("id", Integer, primary_key=True),
+        ...     Column("version_id", Integer, primary_key=True),
+        ...     Column("data", String(50)),
+        ... )
+        >>> my_table.primary_key
+        PrimaryKeyConstraint(
+            Column('id', Integer(), table=<mytable>,
+                   primary_key=True, nullable=False),
+            Column('version_id', Integer(), table=<mytable>,
+                   primary_key=True, nullable=False)
+        )
+
+    The primary key of a :class:`_schema.Table` can also be specified by using
+    a :class:`.PrimaryKeyConstraint` object explicitly; in this mode of usage,
+    the "name" of the constraint can also be specified, as well as other
+    options which may be recognized by dialects::
+
+        my_table = Table(
+            "mytable",
+            metadata,
+            Column("id", Integer),
+            Column("version_id", Integer),
+            Column("data", String(50)),
+            PrimaryKeyConstraint("id", "version_id", name="mytable_pk"),
+        )
+
+    The two styles of column-specification should generally not be mixed.
+    An warning is emitted if the columns present in the
+    :class:`.PrimaryKeyConstraint`
+    don't match the columns that were marked as ``primary_key=True``, if both
+    are present; in this case, the columns are taken strictly from the
+    :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise
+    marked as ``primary_key=True`` are ignored.  This behavior is intended to
+    be backwards compatible with previous behavior.
+
+    For the use case where specific options are to be specified on the
+    :class:`.PrimaryKeyConstraint`, but the usual style of using
+    ``primary_key=True`` flags is still desirable, an empty
+    :class:`.PrimaryKeyConstraint` may be specified, which will take on the
+    primary key column collection from the :class:`_schema.Table` based on the
+    flags::
+
+        my_table = Table(
+            "mytable",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("version_id", Integer, primary_key=True),
+            Column("data", String(50)),
+            PrimaryKeyConstraint(name="mytable_pk", mssql_clustered=True),
+        )
+
+    """
+
+    __visit_name__ = "primary_key_constraint"
+
+    def __init__(
+        self,
+        *columns: _DDLColumnArgument,
+        name: Optional[str] = None,
+        deferrable: Optional[bool] = None,
+        initially: Optional[str] = None,
+        info: Optional[_InfoType] = None,
+        _implicit_generated: bool = False,
+        **dialect_kw: Any,
+    ) -> None:
+        self._implicit_generated = _implicit_generated
+        super().__init__(
+            *columns,
+            name=name,
+            deferrable=deferrable,
+            initially=initially,
+            info=info,
+            **dialect_kw,
+        )
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        table = parent
+        assert isinstance(table, Table)
+        super()._set_parent(table)
+
+        if table.primary_key is not self:
+            table.constraints.discard(table.primary_key)
+            table.primary_key = self  # type: ignore
+            table.constraints.add(self)
+
+        table_pks = [c for c in table.c if c.primary_key]
+        if (
+            self._columns
+            and table_pks
+            and set(table_pks) != set(self._columns)
+        ):
+            # black could not format these inline
+            table_pk_str = ", ".join("'%s'" % c.name for c in table_pks)
+            col_str = ", ".join("'%s'" % c.name for c in self._columns)
+
+            util.warn(
+                f"Table '{table.name}' specifies columns "
+                f"{table_pk_str} as "
+                f"primary_key=True, "
+                f"not matching locally specified columns {col_str}; "
+                f"setting the "
+                f"current primary key columns to "
+                f"{col_str}. "
+                f"This warning "
+                f"may become an exception in a future release"
+            )
+            table_pks[:] = []
+
+        for c in self._columns:
+            c.primary_key = True
+            if c._user_defined_nullable is NULL_UNSPECIFIED:
+                c.nullable = False
+        if table_pks:
+            self._columns.extend(table_pks)
+
+    def _reload(self, columns: Iterable[Column[Any]]) -> None:
+        """repopulate this :class:`.PrimaryKeyConstraint` given
+        a set of columns.
+
+        Existing columns in the table that are marked as primary_key=True
+        are maintained.
+
+        Also fires a new event.
+
+        This is basically like putting a whole new
+        :class:`.PrimaryKeyConstraint` object on the parent
+        :class:`_schema.Table` object without actually replacing the object.
+
+        The ordering of the given list of columns is also maintained; these
+        columns will be appended to the list of columns after any which
+        are already present.
+
+        """
+        # set the primary key flag on new columns.
+        # note any existing PK cols on the table also have their
+        # flag still set.
+        for col in columns:
+            col.primary_key = True
+
+        self._columns.extend(columns)
+
+        PrimaryKeyConstraint._autoincrement_column._reset(self)  # type: ignore
+        self._set_parent_with_dispatch(self.table)
+
+    def _replace(self, col: Column[Any]) -> None:
+        PrimaryKeyConstraint._autoincrement_column._reset(self)  # type: ignore
+        self._columns.replace(col)
+
+        self.dispatch._sa_event_column_added_to_pk_constraint(self, col)
+
+    @property
+    def columns_autoinc_first(self) -> List[Column[Any]]:
+        autoinc = self._autoincrement_column
+
+        if autoinc is not None:
+            return [autoinc] + [c for c in self._columns if c is not autoinc]
+        else:
+            return list(self._columns)
+
+    @util.ro_memoized_property
+    def _autoincrement_column(self) -> Optional[Column[int]]:
+        def _validate_autoinc(col: Column[Any], autoinc_true: bool) -> bool:
+            if col.type._type_affinity is None or not issubclass(
+                col.type._type_affinity,
+                (
+                    type_api.INTEGERTYPE._type_affinity,
+                    type_api.NUMERICTYPE._type_affinity,
+                ),
+            ):
+                if autoinc_true:
+                    raise exc.ArgumentError(
+                        f"Column type {col.type} on column '{col}' is not "
+                        f"compatible with autoincrement=True"
+                    )
+                else:
+                    return False
+            elif (
+                not isinstance(col.default, (type(None), Sequence))
+                and not autoinc_true
+            ):
+                return False
+            elif (
+                col.server_default is not None
+                and not isinstance(col.server_default, Identity)
+                and not autoinc_true
+            ):
+                return False
+            elif col.foreign_keys and col.autoincrement not in (
+                True,
+                "ignore_fk",
+            ):
+                return False
+            return True
+
+        if len(self._columns) == 1:
+            col = list(self._columns)[0]
+
+            if col.autoincrement is True:
+                _validate_autoinc(col, True)
+                return col
+            elif col.autoincrement in (
+                "auto",
+                "ignore_fk",
+            ) and _validate_autoinc(col, False):
+                return col
+            else:
+                return None
+
+        else:
+            autoinc = None
+            for col in self._columns:
+                if col.autoincrement is True:
+                    _validate_autoinc(col, True)
+                    if autoinc is not None:
+                        raise exc.ArgumentError(
+                            f"Only one Column may be marked "
+                            f"autoincrement=True, found both "
+                            f"{col.name} and {autoinc.name}."
+                        )
+                    else:
+                        autoinc = col
+
+            return autoinc
+
+
+class UniqueConstraint(ColumnCollectionConstraint):
+    """A table-level UNIQUE constraint.
+
+    Defines a single column or composite UNIQUE constraint. For a no-frills,
+    single column constraint, adding ``unique=True`` to the ``Column``
+    definition is a shorthand equivalent for an unnamed, single column
+    UniqueConstraint.
+    """
+
+    __visit_name__ = "unique_constraint"
+
+
+class Index(
+    DialectKWArgs, ColumnCollectionMixin, HasConditionalDDL, SchemaItem
+):
+    """A table-level INDEX.
+
+    Defines a composite (one or more column) INDEX.
+
+    E.g.::
+
+        sometable = Table(
+            "sometable",
+            metadata,
+            Column("name", String(50)),
+            Column("address", String(100)),
+        )
+
+        Index("some_index", sometable.c.name)
+
+    For a no-frills, single column index, adding
+    :class:`_schema.Column` also supports ``index=True``::
+
+        sometable = Table(
+            "sometable", metadata, Column("name", String(50), index=True)
+        )
+
+    For a composite index, multiple columns can be specified::
+
+        Index("some_index", sometable.c.name, sometable.c.address)
+
+    Functional indexes are supported as well, typically by using the
+    :data:`.func` construct in conjunction with table-bound
+    :class:`_schema.Column` objects::
+
+        Index("some_index", func.lower(sometable.c.name))
+
+    An :class:`.Index` can also be manually associated with a
+    :class:`_schema.Table`,
+    either through inline declaration or using
+    :meth:`_schema.Table.append_constraint`.  When this approach is used,
+    the names
+    of the indexed columns can be specified as strings::
+
+        Table(
+            "sometable",
+            metadata,
+            Column("name", String(50)),
+            Column("address", String(100)),
+            Index("some_index", "name", "address"),
+        )
+
+    To support functional or expression-based indexes in this form, the
+    :func:`_expression.text` construct may be used::
+
+        from sqlalchemy import text
+
+        Table(
+            "sometable",
+            metadata,
+            Column("name", String(50)),
+            Column("address", String(100)),
+            Index("some_index", text("lower(name)")),
+        )
+
+    .. seealso::
+
+        :ref:`schema_indexes` - General information on :class:`.Index`.
+
+        :ref:`postgresql_indexes` - PostgreSQL-specific options available for
+        the :class:`.Index` construct.
+
+        :ref:`mysql_indexes` - MySQL-specific options available for the
+        :class:`.Index` construct.
+
+        :ref:`mssql_indexes` - MSSQL-specific options available for the
+        :class:`.Index` construct.
+
+    """
+
+    __visit_name__ = "index"
+
+    table: Optional[Table]
+    expressions: _typing_Sequence[Union[str, ColumnElement[Any]]]
+    _table_bound_expressions: _typing_Sequence[ColumnElement[Any]]
+
+    def __init__(
+        self,
+        name: Optional[str],
+        *expressions: _DDLColumnArgument,
+        unique: bool = False,
+        quote: Optional[bool] = None,
+        info: Optional[_InfoType] = None,
+        _table: Optional[Table] = None,
+        _column_flag: bool = False,
+        **dialect_kw: Any,
+    ) -> None:
+        r"""Construct an index object.
+
+        :param name:
+          The name of the index
+
+        :param \*expressions:
+          Column expressions to include in the index.   The expressions
+          are normally instances of :class:`_schema.Column`, but may also
+          be arbitrary SQL expressions which ultimately refer to a
+          :class:`_schema.Column`.
+
+        :param unique=False:
+            Keyword only argument; if True, create a unique index.
+
+        :param quote=None:
+            Keyword only argument; whether to apply quoting to the name of
+            the index.  Works in the same manner as that of
+            :paramref:`_schema.Column.quote`.
+
+        :param info=None: Optional data dictionary which will be populated
+            into the :attr:`.SchemaItem.info` attribute of this object.
+
+        :param \**dialect_kw: Additional keyword arguments not mentioned above
+            are dialect specific, and passed in the form
+            ``<dialectname>_<argname>``. See the documentation regarding an
+            individual dialect at :ref:`dialect_toplevel` for detail on
+            documented arguments.
+
+        """
+        self.table = table = None
+
+        self.name = quoted_name.construct(name, quote)
+        self.unique = unique
+        if info is not None:
+            self.info = info
+
+        # TODO: consider "table" argument being public, but for
+        # the purpose of the fix here, it starts as private.
+        if _table is not None:
+            table = _table
+
+        self._validate_dialect_kwargs(dialect_kw)
+
+        self.expressions = []
+        # will call _set_parent() if table-bound column
+        # objects are present
+        ColumnCollectionMixin.__init__(
+            self,
+            *expressions,
+            _column_flag=_column_flag,
+            _gather_expressions=self.expressions,
+        )
+        if table is not None:
+            self._set_parent(table)
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        table = parent
+        assert isinstance(table, Table)
+        ColumnCollectionMixin._set_parent(self, table)
+
+        if self.table is not None and table is not self.table:
+            raise exc.ArgumentError(
+                f"Index '{self.name}' is against table "
+                f"'{self.table.description}', and "
+                f"cannot be associated with table '{table.description}'."
+            )
+        self.table = table
+        table.indexes.add(self)
+
+        expressions = self.expressions
+        col_expressions = self._col_expressions(table)
+        assert len(expressions) == len(col_expressions)
+
+        exprs = []
+        for expr, colexpr in zip(expressions, col_expressions):
+            if isinstance(expr, ClauseElement):
+                exprs.append(expr)
+            elif colexpr is not None:
+                exprs.append(colexpr)
+            else:
+                assert False
+        self.expressions = self._table_bound_expressions = exprs
+
+    def create(self, bind: _CreateDropBind, checkfirst: bool = False) -> None:
+        """Issue a ``CREATE`` statement for this
+        :class:`.Index`, using the given
+        :class:`.Connection` or :class:`.Engine`` for connectivity.
+
+        .. seealso::
+
+            :meth:`_schema.MetaData.create_all`.
+
+        """
+        bind._run_ddl_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
+
+    def drop(self, bind: _CreateDropBind, checkfirst: bool = False) -> None:
+        """Issue a ``DROP`` statement for this
+        :class:`.Index`, using the given
+        :class:`.Connection` or :class:`.Engine` for connectivity.
+
+        .. seealso::
+
+            :meth:`_schema.MetaData.drop_all`.
+
+        """
+        bind._run_ddl_visitor(ddl.SchemaDropper, self, checkfirst=checkfirst)
+
+    def __repr__(self) -> str:
+        exprs: _typing_Sequence[Any]  # noqa: F842
+
+        return "Index(%s)" % (
+            ", ".join(
+                [repr(self.name)]
+                + [repr(e) for e in self.expressions]
+                + (self.unique and ["unique=True"] or [])
+            )
+        )
+
+
+_NamingSchemaCallable = Callable[[Constraint, Table], str]
+_NamingSchemaDirective = Union[str, _NamingSchemaCallable]
+
+
+class _NamingSchemaTD(TypedDict, total=False):
+    fk: _NamingSchemaDirective
+    pk: _NamingSchemaDirective
+    ix: _NamingSchemaDirective
+    ck: _NamingSchemaDirective
+    uq: _NamingSchemaDirective
+
+
+_NamingSchemaParameter = Union[
+    # it seems like the TypedDict here is useful for pylance typeahead,
+    # and not much else
+    _NamingSchemaTD,
+    # there is no form that allows Union[Type[Any], str] to work in all
+    # cases, including breaking out Mapping[] entries for each combination
+    # even, therefore keys must be `Any` (see #10264)
+    Mapping[Any, _NamingSchemaDirective],
+]
+
+
+DEFAULT_NAMING_CONVENTION: _NamingSchemaParameter = util.immutabledict(
+    {"ix": "ix_%(column_0_label)s"}
+)
+
+
+class MetaData(HasSchemaAttr):
+    """A collection of :class:`_schema.Table`
+    objects and their associated schema
+    constructs.
+
+    Holds a collection of :class:`_schema.Table` objects as well as
+    an optional binding to an :class:`_engine.Engine` or
+    :class:`_engine.Connection`.  If bound, the :class:`_schema.Table` objects
+    in the collection and their columns may participate in implicit SQL
+    execution.
+
+    The :class:`_schema.Table` objects themselves are stored in the
+    :attr:`_schema.MetaData.tables` dictionary.
+
+    :class:`_schema.MetaData` is a thread-safe object for read operations.
+    Construction of new tables within a single :class:`_schema.MetaData`
+    object,
+    either explicitly or via reflection, may not be completely thread-safe.
+
+    .. seealso::
+
+        :ref:`metadata_describing` - Introduction to database metadata
+
+    """
+
+    __visit_name__ = "metadata"
+
+    def __init__(
+        self,
+        schema: Optional[str] = None,
+        quote_schema: Optional[bool] = None,
+        naming_convention: Optional[_NamingSchemaParameter] = None,
+        info: Optional[_InfoType] = None,
+    ) -> None:
+        """Create a new MetaData object.
+
+        :param schema:
+           The default schema to use for the :class:`_schema.Table`,
+           :class:`.Sequence`, and potentially other objects associated with
+           this :class:`_schema.MetaData`. Defaults to ``None``.
+
+           .. seealso::
+
+                :ref:`schema_metadata_schema_name` - details on how the
+                :paramref:`_schema.MetaData.schema` parameter is used.
+
+                :paramref:`_schema.Table.schema`
+
+                :paramref:`.Sequence.schema`
+
+        :param quote_schema:
+            Sets the ``quote_schema`` flag for those :class:`_schema.Table`,
+            :class:`.Sequence`, and other objects which make usage of the
+            local ``schema`` name.
+
+        :param info: Optional data dictionary which will be populated into the
+            :attr:`.SchemaItem.info` attribute of this object.
+
+        :param naming_convention: a dictionary referring to values which
+          will establish default naming conventions for :class:`.Constraint`
+          and :class:`.Index` objects, for those objects which are not given
+          a name explicitly.
+
+          The keys of this dictionary may be:
+
+          * a constraint or Index class, e.g. the :class:`.UniqueConstraint`,
+            :class:`_schema.ForeignKeyConstraint` class, the :class:`.Index`
+            class
+
+          * a string mnemonic for one of the known constraint classes;
+            ``"fk"``, ``"pk"``, ``"ix"``, ``"ck"``, ``"uq"`` for foreign key,
+            primary key, index, check, and unique constraint, respectively.
+
+          * the string name of a user-defined "token" that can be used
+            to define new naming tokens.
+
+          The values associated with each "constraint class" or "constraint
+          mnemonic" key are string naming templates, such as
+          ``"uq_%(table_name)s_%(column_0_name)s"``,
+          which describe how the name should be composed.  The values
+          associated with user-defined "token" keys should be callables of the
+          form ``fn(constraint, table)``, which accepts the constraint/index
+          object and :class:`_schema.Table` as arguments, returning a string
+          result.
+
+          The built-in names are as follows, some of which may only be
+          available for certain types of constraint:
+
+            * ``%(table_name)s`` - the name of the :class:`_schema.Table`
+              object
+              associated with the constraint.
+
+            * ``%(referred_table_name)s`` - the name of the
+              :class:`_schema.Table`
+              object associated with the referencing target of a
+              :class:`_schema.ForeignKeyConstraint`.
+
+            * ``%(column_0_name)s`` - the name of the :class:`_schema.Column`
+              at
+              index position "0" within the constraint.
+
+            * ``%(column_0N_name)s`` - the name of all :class:`_schema.Column`
+              objects in order within the constraint, joined without a
+              separator.
+
+            * ``%(column_0_N_name)s`` - the name of all
+              :class:`_schema.Column`
+              objects in order within the constraint, joined with an
+              underscore as a separator.
+
+            * ``%(column_0_label)s``, ``%(column_0N_label)s``,
+              ``%(column_0_N_label)s`` - the label of either the zeroth
+              :class:`_schema.Column` or all :class:`.Columns`, separated with
+              or without an underscore
+
+            * ``%(column_0_key)s``, ``%(column_0N_key)s``,
+              ``%(column_0_N_key)s`` - the key of either the zeroth
+              :class:`_schema.Column` or all :class:`.Columns`, separated with
+              or without an underscore
+
+            * ``%(referred_column_0_name)s``, ``%(referred_column_0N_name)s``
+              ``%(referred_column_0_N_name)s``,  ``%(referred_column_0_key)s``,
+              ``%(referred_column_0N_key)s``, ...  column tokens which
+              render the names/keys/labels of columns that are referenced
+              by a  :class:`_schema.ForeignKeyConstraint`.
+
+            * ``%(constraint_name)s`` - a special key that refers to the
+              existing name given to the constraint.  When this key is
+              present, the :class:`.Constraint` object's existing name will be
+              replaced with one that is composed from template string that
+              uses this token. When this token is present, it is required that
+              the :class:`.Constraint` is given an explicit name ahead of time.
+
+            * user-defined: any additional token may be implemented by passing
+              it along with a ``fn(constraint, table)`` callable to the
+              naming_convention dictionary.
+
+          .. versionadded:: 1.3.0 - added new ``%(column_0N_name)s``,
+             ``%(column_0_N_name)s``, and related tokens that produce
+             concatenations of names, keys, or labels for all columns referred
+             to by a given constraint.
+
+          .. seealso::
+
+                :ref:`constraint_naming_conventions` - for detailed usage
+                examples.
+
+        """
+        if schema is not None and not isinstance(schema, str):
+            raise exc.ArgumentError(
+                "expected schema argument to be a string, "
+                f"got {type(schema)}."
+            )
+        self.tables = util.FacadeDict()
+        self.schema = quoted_name.construct(schema, quote_schema)
+        self.naming_convention = (
+            naming_convention
+            if naming_convention
+            else DEFAULT_NAMING_CONVENTION
+        )
+        if info:
+            self.info = info
+        self._schemas: Set[str] = set()
+        self._sequences: Dict[str, Sequence] = {}
+        self._fk_memos: Dict[Tuple[str, Optional[str]], List[ForeignKey]] = (
+            collections.defaultdict(list)
+        )
+
+    tables: util.FacadeDict[str, Table]
+    """A dictionary of :class:`_schema.Table`
+    objects keyed to their name or "table key".
+
+    The exact key is that determined by the :attr:`_schema.Table.key`
+    attribute;
+    for a table with no :attr:`_schema.Table.schema` attribute,
+    this is the same
+    as :attr:`_schema.Table.name`.  For a table with a schema,
+    it is typically of the
+    form ``schemaname.tablename``.
+
+    .. seealso::
+
+        :attr:`_schema.MetaData.sorted_tables`
+
+    """
+
+    def __repr__(self) -> str:
+        return "MetaData()"
+
+    def __contains__(self, table_or_key: Union[str, Table]) -> bool:
+        if not isinstance(table_or_key, str):
+            table_or_key = table_or_key.key
+        return table_or_key in self.tables
+
+    def _add_table(
+        self, name: str, schema: Optional[str], table: Table
+    ) -> None:
+        key = _get_table_key(name, schema)
+        self.tables._insert_item(key, table)
+        if schema:
+            self._schemas.add(schema)
+
+    def _remove_table(self, name: str, schema: Optional[str]) -> None:
+        key = _get_table_key(name, schema)
+        removed = dict.pop(self.tables, key, None)
+        if removed is not None:
+            for fk in removed.foreign_keys:
+                fk._remove_from_metadata(self)
+        if self._schemas:
+            self._schemas = {
+                t.schema for t in self.tables.values() if t.schema is not None
+            }
+
+    def __getstate__(self) -> Dict[str, Any]:
+        return {
+            "tables": self.tables,
+            "schema": self.schema,
+            "schemas": self._schemas,
+            "sequences": self._sequences,
+            "fk_memos": self._fk_memos,
+            "naming_convention": self.naming_convention,
+        }
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        self.tables = state["tables"]
+        self.schema = state["schema"]
+        self.naming_convention = state["naming_convention"]
+        self._sequences = state["sequences"]
+        self._schemas = state["schemas"]
+        self._fk_memos = state["fk_memos"]
+
+    def clear(self) -> None:
+        """Clear all Table objects from this MetaData."""
+
+        dict.clear(self.tables)  # type: ignore
+        self._schemas.clear()
+        self._fk_memos.clear()
+
+    def remove(self, table: Table) -> None:
+        """Remove the given Table object from this MetaData."""
+
+        self._remove_table(table.name, table.schema)
+
+    @property
+    def sorted_tables(self) -> List[Table]:
+        """Returns a list of :class:`_schema.Table` objects sorted in order of
+        foreign key dependency.
+
+        The sorting will place :class:`_schema.Table`
+        objects that have dependencies
+        first, before the dependencies themselves, representing the
+        order in which they can be created.   To get the order in which
+        the tables would be dropped, use the ``reversed()`` Python built-in.
+
+        .. warning::
+
+            The :attr:`.MetaData.sorted_tables` attribute cannot by itself
+            accommodate automatic resolution of dependency cycles between
+            tables, which are usually caused by mutually dependent foreign key
+            constraints. When these cycles are detected, the foreign keys
+            of these tables are omitted from consideration in the sort.
+            A warning is emitted when this condition occurs, which will be an
+            exception raise in a future release.   Tables which are not part
+            of the cycle will still be returned in dependency order.
+
+            To resolve these cycles, the
+            :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
+            applied to those constraints which create a cycle.  Alternatively,
+            the :func:`_schema.sort_tables_and_constraints` function will
+            automatically return foreign key constraints in a separate
+            collection when cycles are detected so that they may be applied
+            to a schema separately.
+
+            .. versionchanged:: 1.3.17 - a warning is emitted when
+               :attr:`.MetaData.sorted_tables` cannot perform a proper sort
+               due to cyclical dependencies.  This will be an exception in a
+               future release.  Additionally, the sort will continue to return
+               other tables not involved in the cycle in dependency order which
+               was not the case previously.
+
+        .. seealso::
+
+            :func:`_schema.sort_tables`
+
+            :func:`_schema.sort_tables_and_constraints`
+
+            :attr:`_schema.MetaData.tables`
+
+            :meth:`_reflection.Inspector.get_table_names`
+
+            :meth:`_reflection.Inspector.get_sorted_table_and_fkc_names`
+
+
+        """
+        return ddl.sort_tables(
+            sorted(self.tables.values(), key=lambda t: t.key)  # type: ignore
+        )
+
+    # overload needed to work around mypy this mypy
+    # https://github.com/python/mypy/issues/17093
+    @overload
+    def reflect(
+        self,
+        bind: Engine,
+        schema: Optional[str] = ...,
+        views: bool = ...,
+        only: Union[
+            _typing_Sequence[str], Callable[[str, MetaData], bool], None
+        ] = ...,
+        extend_existing: bool = ...,
+        autoload_replace: bool = ...,
+        resolve_fks: bool = ...,
+        **dialect_kwargs: Any,
+    ) -> None: ...
+
+    @overload
+    def reflect(
+        self,
+        bind: Connection,
+        schema: Optional[str] = ...,
+        views: bool = ...,
+        only: Union[
+            _typing_Sequence[str], Callable[[str, MetaData], bool], None
+        ] = ...,
+        extend_existing: bool = ...,
+        autoload_replace: bool = ...,
+        resolve_fks: bool = ...,
+        **dialect_kwargs: Any,
+    ) -> None: ...
+
+    @util.preload_module("sqlalchemy.engine.reflection")
+    def reflect(
+        self,
+        bind: Union[Engine, Connection],
+        schema: Optional[str] = None,
+        views: bool = False,
+        only: Union[
+            _typing_Sequence[str], Callable[[str, MetaData], bool], None
+        ] = None,
+        extend_existing: bool = False,
+        autoload_replace: bool = True,
+        resolve_fks: bool = True,
+        **dialect_kwargs: Any,
+    ) -> None:
+        r"""Load all available table definitions from the database.
+
+        Automatically creates ``Table`` entries in this ``MetaData`` for any
+        table available in the database but not yet present in the
+        ``MetaData``.  May be called multiple times to pick up tables recently
+        added to the database, however no special action is taken if a table
+        in this ``MetaData`` no longer exists in the database.
+
+        :param bind:
+          A :class:`.Connection` or :class:`.Engine` used to access the
+          database.
+
+        :param schema:
+          Optional, query and reflect tables from an alternate schema.
+          If None, the schema associated with this :class:`_schema.MetaData`
+          is used, if any.
+
+        :param views:
+          If True, also reflect views (materialized and plain).
+
+        :param only:
+          Optional.  Load only a sub-set of available named tables.  May be
+          specified as a sequence of names or a callable.
+
+          If a sequence of names is provided, only those tables will be
+          reflected.  An error is raised if a table is requested but not
+          available.  Named tables already present in this ``MetaData`` are
+          ignored.
+
+          If a callable is provided, it will be used as a boolean predicate to
+          filter the list of potential table names.  The callable is called
+          with a table name and this ``MetaData`` instance as positional
+          arguments and should return a true value for any table to reflect.
+
+        :param extend_existing: Passed along to each :class:`_schema.Table` as
+          :paramref:`_schema.Table.extend_existing`.
+
+        :param autoload_replace: Passed along to each :class:`_schema.Table`
+          as
+          :paramref:`_schema.Table.autoload_replace`.
+
+        :param resolve_fks: if True, reflect :class:`_schema.Table`
+         objects linked
+         to :class:`_schema.ForeignKey` objects located in each
+         :class:`_schema.Table`.
+         For :meth:`_schema.MetaData.reflect`,
+         this has the effect of reflecting
+         related tables that might otherwise not be in the list of tables
+         being reflected, for example if the referenced table is in a
+         different schema or is omitted via the
+         :paramref:`.MetaData.reflect.only` parameter.  When False,
+         :class:`_schema.ForeignKey` objects are not followed to the
+         :class:`_schema.Table`
+         in which they link, however if the related table is also part of the
+         list of tables that would be reflected in any case, the
+         :class:`_schema.ForeignKey` object will still resolve to its related
+         :class:`_schema.Table` after the :meth:`_schema.MetaData.reflect`
+         operation is
+         complete.   Defaults to True.
+
+         .. versionadded:: 1.3.0
+
+         .. seealso::
+
+            :paramref:`_schema.Table.resolve_fks`
+
+        :param \**dialect_kwargs: Additional keyword arguments not mentioned
+         above are dialect specific, and passed in the form
+         ``<dialectname>_<argname>``.  See the documentation regarding an
+         individual dialect at :ref:`dialect_toplevel` for detail on
+         documented arguments.
+
+        .. seealso::
+
+            :ref:`metadata_reflection_toplevel`
+
+            :meth:`_events.DDLEvents.column_reflect` - Event used to customize
+            the reflected columns. Usually used to generalize the types using
+            :meth:`_types.TypeEngine.as_generic`
+
+            :ref:`metadata_reflection_dbagnostic_types` - describes how to
+            reflect tables using general types.
+
+        """
+
+        with inspection.inspect(bind)._inspection_context() as insp:
+            reflect_opts: Any = {
+                "autoload_with": insp,
+                "extend_existing": extend_existing,
+                "autoload_replace": autoload_replace,
+                "resolve_fks": resolve_fks,
+                "_extend_on": set(),
+            }
+
+            reflect_opts.update(dialect_kwargs)
+
+            if schema is None:
+                schema = self.schema
+
+            if schema is not None:
+                reflect_opts["schema"] = schema
+
+            kind = util.preloaded.engine_reflection.ObjectKind.TABLE
+            available: util.OrderedSet[str] = util.OrderedSet(
+                insp.get_table_names(schema)
+            )
+            if views:
+                kind = util.preloaded.engine_reflection.ObjectKind.ANY
+                available.update(insp.get_view_names(schema))
+                try:
+                    available.update(insp.get_materialized_view_names(schema))
+                except NotImplementedError:
+                    pass
+
+            if schema is not None:
+                available_w_schema: util.OrderedSet[str] = util.OrderedSet(
+                    [f"{schema}.{name}" for name in available]
+                )
+            else:
+                available_w_schema = available
+
+            current = set(self.tables)
+
+            if only is None:
+                load = [
+                    name
+                    for name, schname in zip(available, available_w_schema)
+                    if extend_existing or schname not in current
+                ]
+            elif callable(only):
+                load = [
+                    name
+                    for name, schname in zip(available, available_w_schema)
+                    if (extend_existing or schname not in current)
+                    and only(name, self)
+                ]
+            else:
+                missing = [name for name in only if name not in available]
+                if missing:
+                    s = schema and (" schema '%s'" % schema) or ""
+                    missing_str = ", ".join(missing)
+                    raise exc.InvalidRequestError(
+                        f"Could not reflect: requested table(s) not available "
+                        f"in {bind.engine!r}{s}: ({missing_str})"
+                    )
+                load = [
+                    name
+                    for name in only
+                    if extend_existing or name not in current
+                ]
+            # pass the available tables so the inspector can
+            # choose to ignore the filter_names
+            _reflect_info = insp._get_reflection_info(
+                schema=schema,
+                filter_names=load,
+                available=available,
+                kind=kind,
+                scope=util.preloaded.engine_reflection.ObjectScope.ANY,
+                **dialect_kwargs,
+            )
+            reflect_opts["_reflect_info"] = _reflect_info
+
+            for name in load:
+                try:
+                    Table(name, self, **reflect_opts)
+                except exc.UnreflectableTableError as uerr:
+                    util.warn(f"Skipping table {name}: {uerr}")
+
+    def create_all(
+        self,
+        bind: _CreateDropBind,
+        tables: Optional[_typing_Sequence[Table]] = None,
+        checkfirst: bool = True,
+    ) -> None:
+        """Create all tables stored in this metadata.
+
+        Conditional by default, will not attempt to recreate tables already
+        present in the target database.
+
+        :param bind:
+          A :class:`.Connection` or :class:`.Engine` used to access the
+          database.
+
+        :param tables:
+          Optional list of ``Table`` objects, which is a subset of the total
+          tables in the ``MetaData`` (others are ignored).
+
+        :param checkfirst:
+          Defaults to True, don't issue CREATEs for tables already present
+          in the target database.
+
+        """
+        bind._run_ddl_visitor(
+            ddl.SchemaGenerator, self, checkfirst=checkfirst, tables=tables
+        )
+
+    def drop_all(
+        self,
+        bind: _CreateDropBind,
+        tables: Optional[_typing_Sequence[Table]] = None,
+        checkfirst: bool = True,
+    ) -> None:
+        """Drop all tables stored in this metadata.
+
+        Conditional by default, will not attempt to drop tables not present in
+        the target database.
+
+        :param bind:
+          A :class:`.Connection` or :class:`.Engine` used to access the
+          database.
+
+        :param tables:
+          Optional list of ``Table`` objects, which is a subset of the
+          total tables in the ``MetaData`` (others are ignored).
+
+        :param checkfirst:
+          Defaults to True, only issue DROPs for tables confirmed to be
+          present in the target database.
+
+        """
+        bind._run_ddl_visitor(
+            ddl.SchemaDropper, self, checkfirst=checkfirst, tables=tables
+        )
+
+
+class Computed(FetchedValue, SchemaItem):
+    """Defines a generated column, i.e. "GENERATED ALWAYS AS" syntax.
+
+    The :class:`.Computed` construct is an inline construct added to the
+    argument list of a :class:`_schema.Column` object::
+
+        from sqlalchemy import Computed
+
+        Table(
+            "square",
+            metadata_obj,
+            Column("side", Float, nullable=False),
+            Column("area", Float, Computed("side * side")),
+        )
+
+    See the linked documentation below for complete details.
+
+    .. versionadded:: 1.3.11
+
+    .. seealso::
+
+        :ref:`computed_ddl`
+
+    """
+
+    __visit_name__ = "computed_column"
+
+    column: Optional[Column[Any]]
+
+    @_document_text_coercion(
+        "sqltext", ":class:`.Computed`", ":paramref:`.Computed.sqltext`"
+    )
+    def __init__(
+        self, sqltext: _DDLColumnArgument, persisted: Optional[bool] = None
+    ) -> None:
+        """Construct a GENERATED ALWAYS AS DDL construct to accompany a
+        :class:`_schema.Column`.
+
+        :param sqltext:
+          A string containing the column generation expression, which will be
+          used verbatim, or a SQL expression construct, such as a
+          :func:`_expression.text`
+          object.   If given as a string, the object is converted to a
+          :func:`_expression.text` object.
+
+        :param persisted:
+          Optional, controls how this column should be persisted by the
+          database.   Possible values are:
+
+          * ``None``, the default, it will use the default persistence
+            defined by the database.
+          * ``True``, will render ``GENERATED ALWAYS AS ... STORED``, or the
+            equivalent for the target database if supported.
+          * ``False``, will render ``GENERATED ALWAYS AS ... VIRTUAL``, or
+            the equivalent for the target database if supported.
+
+          Specifying ``True`` or ``False`` may raise an error when the DDL
+          is emitted to the target database if the database does not support
+          that persistence option.   Leaving this parameter at its default
+          of ``None`` is guaranteed to succeed for all databases that support
+          ``GENERATED ALWAYS AS``.
+
+        """
+        self.sqltext = coercions.expect(roles.DDLExpressionRole, sqltext)
+        self.persisted = persisted
+        self.column = None
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        assert isinstance(parent, Column)
+
+        if not isinstance(
+            parent.server_default, (type(None), Computed)
+        ) or not isinstance(parent.server_onupdate, (type(None), Computed)):
+            raise exc.ArgumentError(
+                "A generated column cannot specify a server_default or a "
+                "server_onupdate argument"
+            )
+        self.column = parent
+        parent.computed = self
+        self.column.server_onupdate = self
+        self.column.server_default = self
+
+    def _as_for_update(self, for_update: bool) -> FetchedValue:
+        return self
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.Computed.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(
+        self, *, target_table: Optional[Table] = None, **kw: Any
+    ) -> Computed:
+        return self._copy(target_table=target_table, **kw)
+
+    def _copy(
+        self, *, target_table: Optional[Table] = None, **kw: Any
+    ) -> Computed:
+        sqltext = _copy_expression(
+            self.sqltext,
+            self.column.table if self.column is not None else None,
+            target_table,
+        )
+        g = Computed(sqltext, persisted=self.persisted)
+
+        return self._schema_item_copy(g)
+
+
+class Identity(IdentityOptions, FetchedValue, SchemaItem):
+    """Defines an identity column, i.e. "GENERATED { ALWAYS | BY DEFAULT }
+    AS IDENTITY" syntax.
+
+    The :class:`.Identity` construct is an inline construct added to the
+    argument list of a :class:`_schema.Column` object::
+
+        from sqlalchemy import Identity
+
+        Table(
+            "foo",
+            metadata_obj,
+            Column("id", Integer, Identity()),
+            Column("description", Text),
+        )
+
+    See the linked documentation below for complete details.
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`identity_ddl`
+
+    """
+
+    __visit_name__ = "identity_column"
+
+    is_identity = True
+
+    def __init__(
+        self,
+        always: bool = False,
+        on_null: Optional[bool] = None,
+        start: Optional[int] = None,
+        increment: Optional[int] = None,
+        minvalue: Optional[int] = None,
+        maxvalue: Optional[int] = None,
+        nominvalue: Optional[bool] = None,
+        nomaxvalue: Optional[bool] = None,
+        cycle: Optional[bool] = None,
+        cache: Optional[int] = None,
+        order: Optional[bool] = None,
+    ) -> None:
+        """Construct a GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY DDL
+        construct to accompany a :class:`_schema.Column`.
+
+        See the :class:`.Sequence` documentation for a complete description
+        of most parameters.
+
+        .. note::
+            MSSQL supports this construct as the preferred alternative to
+            generate an IDENTITY on a column, but it uses non standard
+            syntax that only support :paramref:`_schema.Identity.start`
+            and :paramref:`_schema.Identity.increment`.
+            All other parameters are ignored.
+
+        :param always:
+          A boolean, that indicates the type of identity column.
+          If ``False`` is specified, the default, then the user-specified
+          value takes precedence.
+          If ``True`` is specified, a user-specified value is not accepted (
+          on some backends, like PostgreSQL, OVERRIDING SYSTEM VALUE, or
+          similar, may be specified in an INSERT to override the sequence
+          value).
+          Some backends also have a default value for this parameter,
+          ``None`` can be used to omit rendering this part in the DDL. It
+          will be treated as ``False`` if a backend does not have a default
+          value.
+
+        :param on_null:
+          Set to ``True`` to specify ON NULL in conjunction with a
+          ``always=False`` identity column. This option is only supported on
+          some backends, like Oracle Database.
+
+        :param start: the starting index of the sequence.
+        :param increment: the increment value of the sequence.
+        :param minvalue: the minimum value of the sequence.
+        :param maxvalue: the maximum value of the sequence.
+        :param nominvalue: no minimum value of the sequence.
+        :param nomaxvalue: no maximum value of the sequence.
+        :param cycle: allows the sequence to wrap around when the maxvalue
+         or minvalue has been reached.
+        :param cache: optional integer value; number of future values in the
+         sequence which are calculated in advance.
+        :param order: optional boolean value; if true, renders the
+         ORDER keyword.
+
+        """
+        IdentityOptions.__init__(
+            self,
+            start=start,
+            increment=increment,
+            minvalue=minvalue,
+            maxvalue=maxvalue,
+            nominvalue=nominvalue,
+            nomaxvalue=nomaxvalue,
+            cycle=cycle,
+            cache=cache,
+            order=order,
+        )
+        self.always = always
+        self.on_null = on_null
+        self.column = None
+
+    def _set_parent(self, parent: SchemaEventTarget, **kw: Any) -> None:
+        assert isinstance(parent, Column)
+        if not isinstance(
+            parent.server_default, (type(None), Identity)
+        ) or not isinstance(parent.server_onupdate, type(None)):
+            raise exc.ArgumentError(
+                "A column with an Identity object cannot specify a "
+                "server_default or a server_onupdate argument"
+            )
+        if parent.autoincrement is False:
+            raise exc.ArgumentError(
+                "A column with an Identity object cannot specify "
+                "autoincrement=False"
+            )
+        self.column = parent
+
+        parent.identity = self
+        if parent._user_defined_nullable is NULL_UNSPECIFIED:
+            parent.nullable = False
+
+        parent.server_default = self
+
+    def _as_for_update(self, for_update: bool) -> FetchedValue:
+        return self
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_schema.Identity.copy` method is deprecated "
+        "and will be removed in a future release.",
+    )
+    def copy(self, **kw: Any) -> Identity:
+        return self._copy(**kw)
+
+    def _copy(self, **kw: Any) -> Identity:
+        i = Identity(
+            always=self.always,
+            on_null=self.on_null,
+            start=self.start,
+            increment=self.increment,
+            minvalue=self.minvalue,
+            maxvalue=self.maxvalue,
+            nominvalue=self.nominvalue,
+            nomaxvalue=self.nomaxvalue,
+            cycle=self.cycle,
+            cache=self.cache,
+            order=self.order,
+        )
+
+        return self._schema_item_copy(i)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/selectable.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/selectable.py
new file mode 100644
index 00000000..d137ab50
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/selectable.py
@@ -0,0 +1,7183 @@
+# sql/selectable.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""The :class:`_expression.FromClause` class of SQL expression elements,
+representing
+SQL tables and derived rowsets.
+
+"""
+
+from __future__ import annotations
+
+import collections
+from enum import Enum
+import itertools
+from typing import AbstractSet
+from typing import Any as TODO_Any
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import NamedTuple
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import cache_key
+from . import coercions
+from . import operators
+from . import roles
+from . import traversals
+from . import type_api
+from . import visitors
+from ._typing import _ColumnsClauseArgument
+from ._typing import _no_kw
+from ._typing import _T
+from ._typing import _TP
+from ._typing import is_column_element
+from ._typing import is_select_statement
+from ._typing import is_subquery
+from ._typing import is_table
+from ._typing import is_text_clause
+from .annotation import Annotated
+from .annotation import SupportsCloneAnnotations
+from .base import _clone
+from .base import _cloned_difference
+from .base import _cloned_intersection
+from .base import _entity_namespace_key
+from .base import _EntityNamespace
+from .base import _expand_cloned
+from .base import _from_objects
+from .base import _generative
+from .base import _never_select_column
+from .base import _NoArg
+from .base import _select_iterables
+from .base import CacheableOptions
+from .base import ColumnCollection
+from .base import ColumnSet
+from .base import CompileState
+from .base import DedupeColumnCollection
+from .base import Executable
+from .base import Generative
+from .base import HasCompileState
+from .base import HasMemoized
+from .base import Immutable
+from .coercions import _document_text_coercion
+from .elements import _anonymous_label
+from .elements import BindParameter
+from .elements import BooleanClauseList
+from .elements import ClauseElement
+from .elements import ClauseList
+from .elements import ColumnClause
+from .elements import ColumnElement
+from .elements import DQLDMLClauseElement
+from .elements import GroupedElement
+from .elements import literal_column
+from .elements import TableValuedColumn
+from .elements import UnaryExpression
+from .operators import OperatorType
+from .sqltypes import NULLTYPE
+from .visitors import _TraverseInternalsType
+from .visitors import InternalTraversal
+from .visitors import prefix_anon_map
+from .. import exc
+from .. import util
+from ..util import HasMemoized_ro_memoized_attribute
+from ..util.typing import Literal
+from ..util.typing import Protocol
+from ..util.typing import Self
+
+
+and_ = BooleanClauseList.and_
+
+
+if TYPE_CHECKING:
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _ColumnExpressionOrStrLabelArgument
+    from ._typing import _FromClauseArgument
+    from ._typing import _JoinTargetArgument
+    from ._typing import _LimitOffsetType
+    from ._typing import _MAYBE_ENTITY
+    from ._typing import _NOT_ENTITY
+    from ._typing import _OnClauseArgument
+    from ._typing import _SelectStatementForCompoundArgument
+    from ._typing import _T0
+    from ._typing import _T1
+    from ._typing import _T2
+    from ._typing import _T3
+    from ._typing import _T4
+    from ._typing import _T5
+    from ._typing import _T6
+    from ._typing import _T7
+    from ._typing import _TextCoercedExpressionArgument
+    from ._typing import _TypedColumnClauseArgument as _TCCA
+    from ._typing import _TypeEngineArgument
+    from .base import _AmbiguousTableNameMap
+    from .base import ExecutableOption
+    from .base import ReadOnlyColumnCollection
+    from .cache_key import _CacheKeyTraversalType
+    from .compiler import SQLCompiler
+    from .dml import Delete
+    from .dml import Update
+    from .elements import BinaryExpression
+    from .elements import KeyedColumnElement
+    from .elements import Label
+    from .elements import NamedColumn
+    from .elements import TextClause
+    from .functions import Function
+    from .schema import ForeignKey
+    from .schema import ForeignKeyConstraint
+    from .sqltypes import TableValueType
+    from .type_api import TypeEngine
+    from .visitors import _CloneCallableType
+
+
+_ColumnsClauseElement = Union["FromClause", ColumnElement[Any], "TextClause"]
+_LabelConventionCallable = Callable[
+    [Union["ColumnElement[Any]", "TextClause"]], Optional[str]
+]
+
+
+class _JoinTargetProtocol(Protocol):
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]: ...
+
+    @util.ro_non_memoized_property
+    def entity_namespace(self) -> _EntityNamespace: ...
+
+
+_JoinTargetElement = Union["FromClause", _JoinTargetProtocol]
+_OnClauseElement = Union["ColumnElement[bool]", _JoinTargetProtocol]
+
+_ForUpdateOfArgument = Union[
+    # single column, Table, ORM Entity
+    Union[
+        "_ColumnExpressionArgument[Any]",
+        "_FromClauseArgument",
+    ],
+    # or sequence of single column elements
+    Sequence["_ColumnExpressionArgument[Any]"],
+]
+
+
+_SetupJoinsElement = Tuple[
+    _JoinTargetElement,
+    Optional[_OnClauseElement],
+    Optional["FromClause"],
+    Dict[str, Any],
+]
+
+
+_SelectIterable = Iterable[Union["ColumnElement[Any]", "TextClause"]]
+
+
+class _OffsetLimitParam(BindParameter[int]):
+    inherit_cache = True
+
+    @property
+    def _limit_offset_value(self) -> Optional[int]:
+        return self.effective_value
+
+
+class ReturnsRows(roles.ReturnsRowsRole, DQLDMLClauseElement):
+    """The base-most class for Core constructs that have some concept of
+    columns that can represent rows.
+
+    While the SELECT statement and TABLE are the primary things we think
+    of in this category,  DML like INSERT, UPDATE and DELETE can also specify
+    RETURNING which means they can be used in CTEs and other forms, and
+    PostgreSQL has functions that return rows also.
+
+    .. versionadded:: 1.4
+
+    """
+
+    _is_returns_rows = True
+
+    # sub-elements of returns_rows
+    _is_from_clause = False
+    _is_select_base = False
+    _is_select_statement = False
+    _is_lateral = False
+
+    @property
+    def selectable(self) -> ReturnsRows:
+        return self
+
+    @util.ro_non_memoized_property
+    def _all_selected_columns(self) -> _SelectIterable:
+        """A sequence of column expression objects that represents the
+        "selected" columns of this :class:`_expression.ReturnsRows`.
+
+        This is typically equivalent to .exported_columns except it is
+        delivered in the form of a straight sequence and not  keyed
+        :class:`_expression.ColumnCollection`.
+
+        """
+        raise NotImplementedError()
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        """Return ``True`` if this :class:`.ReturnsRows` is
+        'derived' from the given :class:`.FromClause`.
+
+        An example would be an Alias of a Table is derived from that Table.
+
+        """
+        raise NotImplementedError()
+
+    def _generate_fromclause_column_proxies(
+        self,
+        fromclause: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        """Populate columns into an :class:`.AliasedReturnsRows` object."""
+
+        raise NotImplementedError()
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        """reset internal collections for an incoming column being added."""
+        raise NotImplementedError()
+
+    @property
+    def exported_columns(self) -> ReadOnlyColumnCollection[Any, Any]:
+        """A :class:`_expression.ColumnCollection`
+        that represents the "exported"
+        columns of this :class:`_expression.ReturnsRows`.
+
+        The "exported" columns represent the collection of
+        :class:`_expression.ColumnElement`
+        expressions that are rendered by this SQL
+        construct.   There are primary varieties which are the
+        "FROM clause columns" of a FROM clause, such as a table, join,
+        or subquery, the "SELECTed columns", which are the columns in
+        the "columns clause" of a SELECT statement, and the RETURNING
+        columns in a DML statement..
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_expression.FromClause.exported_columns`
+
+            :attr:`_expression.SelectBase.exported_columns`
+        """
+
+        raise NotImplementedError()
+
+
+class ExecutableReturnsRows(Executable, ReturnsRows):
+    """base for executable statements that return rows."""
+
+
+class TypedReturnsRows(ExecutableReturnsRows, Generic[_TP]):
+    """base for a typed executable statements that return rows."""
+
+
+class Selectable(ReturnsRows):
+    """Mark a class as being selectable."""
+
+    __visit_name__ = "selectable"
+
+    is_selectable = True
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        raise NotImplementedError()
+
+    def lateral(self, name: Optional[str] = None) -> LateralFromClause:
+        """Return a LATERAL alias of this :class:`_expression.Selectable`.
+
+        The return value is the :class:`_expression.Lateral` construct also
+        provided by the top-level :func:`_expression.lateral` function.
+
+        .. seealso::
+
+            :ref:`tutorial_lateral_correlation` -  overview of usage.
+
+        """
+        return Lateral._construct(self, name=name)
+
+    @util.deprecated(
+        "1.4",
+        message="The :meth:`.Selectable.replace_selectable` method is "
+        "deprecated, and will be removed in a future release.  Similar "
+        "functionality is available via the sqlalchemy.sql.visitors module.",
+    )
+    @util.preload_module("sqlalchemy.sql.util")
+    def replace_selectable(self, old: FromClause, alias: Alias) -> Self:
+        """Replace all occurrences of :class:`_expression.FromClause`
+        'old' with the given :class:`_expression.Alias`
+        object, returning a copy of this :class:`_expression.FromClause`.
+
+        """
+        return util.preloaded.sql_util.ClauseAdapter(alias).traverse(self)
+
+    def corresponding_column(
+        self, column: KeyedColumnElement[Any], require_embedded: bool = False
+    ) -> Optional[KeyedColumnElement[Any]]:
+        """Given a :class:`_expression.ColumnElement`, return the exported
+        :class:`_expression.ColumnElement` object from the
+        :attr:`_expression.Selectable.exported_columns`
+        collection of this :class:`_expression.Selectable`
+        which corresponds to that
+        original :class:`_expression.ColumnElement` via a common ancestor
+        column.
+
+        :param column: the target :class:`_expression.ColumnElement`
+                      to be matched.
+
+        :param require_embedded: only return corresponding columns for
+         the given :class:`_expression.ColumnElement`, if the given
+         :class:`_expression.ColumnElement`
+         is actually present within a sub-element
+         of this :class:`_expression.Selectable`.
+         Normally the column will match if
+         it merely shares a common ancestor with one of the exported
+         columns of this :class:`_expression.Selectable`.
+
+        .. seealso::
+
+            :attr:`_expression.Selectable.exported_columns` - the
+            :class:`_expression.ColumnCollection`
+            that is used for the operation.
+
+            :meth:`_expression.ColumnCollection.corresponding_column`
+            - implementation
+            method.
+
+        """
+
+        return self.exported_columns.corresponding_column(
+            column, require_embedded
+        )
+
+
+class HasPrefixes:
+    _prefixes: Tuple[Tuple[DQLDMLClauseElement, str], ...] = ()
+
+    _has_prefixes_traverse_internals: _TraverseInternalsType = [
+        ("_prefixes", InternalTraversal.dp_prefix_sequence)
+    ]
+
+    @_generative
+    @_document_text_coercion(
+        "prefixes",
+        ":meth:`_expression.HasPrefixes.prefix_with`",
+        ":paramref:`.HasPrefixes.prefix_with.*prefixes`",
+    )
+    def prefix_with(
+        self,
+        *prefixes: _TextCoercedExpressionArgument[Any],
+        dialect: str = "*",
+    ) -> Self:
+        r"""Add one or more expressions following the statement keyword, i.e.
+        SELECT, INSERT, UPDATE, or DELETE. Generative.
+
+        This is used to support backend-specific prefix keywords such as those
+        provided by MySQL.
+
+        E.g.::
+
+            stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
+
+            # MySQL 5.7 optimizer hints
+            stmt = select(table).prefix_with("/*+ BKA(t1) */", dialect="mysql")
+
+        Multiple prefixes can be specified by multiple calls
+        to :meth:`_expression.HasPrefixes.prefix_with`.
+
+        :param \*prefixes: textual or :class:`_expression.ClauseElement`
+         construct which
+         will be rendered following the INSERT, UPDATE, or DELETE
+         keyword.
+        :param dialect: optional string dialect name which will
+         limit rendering of this prefix to only that dialect.
+
+        """
+        self._prefixes = self._prefixes + tuple(
+            [
+                (coercions.expect(roles.StatementOptionRole, p), dialect)
+                for p in prefixes
+            ]
+        )
+        return self
+
+
+class HasSuffixes:
+    _suffixes: Tuple[Tuple[DQLDMLClauseElement, str], ...] = ()
+
+    _has_suffixes_traverse_internals: _TraverseInternalsType = [
+        ("_suffixes", InternalTraversal.dp_prefix_sequence)
+    ]
+
+    @_generative
+    @_document_text_coercion(
+        "suffixes",
+        ":meth:`_expression.HasSuffixes.suffix_with`",
+        ":paramref:`.HasSuffixes.suffix_with.*suffixes`",
+    )
+    def suffix_with(
+        self,
+        *suffixes: _TextCoercedExpressionArgument[Any],
+        dialect: str = "*",
+    ) -> Self:
+        r"""Add one or more expressions following the statement as a whole.
+
+        This is used to support backend-specific suffix keywords on
+        certain constructs.
+
+        E.g.::
+
+            stmt = (
+                select(col1, col2)
+                .cte()
+                .suffix_with(
+                    "cycle empno set y_cycle to 1 default 0", dialect="oracle"
+                )
+            )
+
+        Multiple suffixes can be specified by multiple calls
+        to :meth:`_expression.HasSuffixes.suffix_with`.
+
+        :param \*suffixes: textual or :class:`_expression.ClauseElement`
+         construct which
+         will be rendered following the target clause.
+        :param dialect: Optional string dialect name which will
+         limit rendering of this suffix to only that dialect.
+
+        """
+        self._suffixes = self._suffixes + tuple(
+            [
+                (coercions.expect(roles.StatementOptionRole, p), dialect)
+                for p in suffixes
+            ]
+        )
+        return self
+
+
+class HasHints:
+    _hints: util.immutabledict[Tuple[FromClause, str], str] = (
+        util.immutabledict()
+    )
+    _statement_hints: Tuple[Tuple[str, str], ...] = ()
+
+    _has_hints_traverse_internals: _TraverseInternalsType = [
+        ("_statement_hints", InternalTraversal.dp_statement_hint_list),
+        ("_hints", InternalTraversal.dp_table_hint_list),
+    ]
+
+    @_generative
+    def with_statement_hint(self, text: str, dialect_name: str = "*") -> Self:
+        """Add a statement hint to this :class:`_expression.Select` or
+        other selectable object.
+
+        .. tip::
+
+            :meth:`_expression.Select.with_statement_hint` generally adds hints
+            **at the trailing end** of a SELECT statement.  To place
+            dialect-specific hints such as optimizer hints at the **front** of
+            the SELECT statement after the SELECT keyword, use the
+            :meth:`_expression.Select.prefix_with` method for an open-ended
+            space, or for table-specific hints the
+            :meth:`_expression.Select.with_hint` may be used, which places
+            hints in a dialect-specific location.
+
+        This method is similar to :meth:`_expression.Select.with_hint` except
+        that it does not require an individual table, and instead applies to
+        the statement as a whole.
+
+        Hints here are specific to the backend database and may include
+        directives such as isolation levels, file directives, fetch directives,
+        etc.
+
+        .. seealso::
+
+            :meth:`_expression.Select.with_hint`
+
+            :meth:`_expression.Select.prefix_with` - generic SELECT prefixing
+            which also can suit some database-specific HINT syntaxes such as
+            MySQL or Oracle Database optimizer hints
+
+        """
+        return self._with_hint(None, text, dialect_name)
+
+    @_generative
+    def with_hint(
+        self,
+        selectable: _FromClauseArgument,
+        text: str,
+        dialect_name: str = "*",
+    ) -> Self:
+        r"""Add an indexing or other executional context hint for the given
+        selectable to this :class:`_expression.Select` or other selectable
+        object.
+
+        .. tip::
+
+            The :meth:`_expression.Select.with_hint` method adds hints that are
+            **specific to a single table** to a statement, in a location that
+            is **dialect-specific**.  To add generic optimizer hints to the
+            **beginning** of a statement ahead of the SELECT keyword such as
+            for MySQL or Oracle Database, use the
+            :meth:`_expression.Select.prefix_with` method.  To add optimizer
+            hints to the **end** of a statement such as for PostgreSQL, use the
+            :meth:`_expression.Select.with_statement_hint` method.
+
+        The text of the hint is rendered in the appropriate
+        location for the database backend in use, relative
+        to the given :class:`_schema.Table` or :class:`_expression.Alias`
+        passed as the
+        ``selectable`` argument. The dialect implementation
+        typically uses Python string substitution syntax
+        with the token ``%(name)s`` to render the name of
+        the table or alias. E.g. when using Oracle Database, the
+        following::
+
+            select(mytable).with_hint(mytable, "index(%(name)s ix_mytable)")
+
+        Would render SQL as:
+
+        .. sourcecode:: sql
+
+            select /*+ index(mytable ix_mytable) */ ... from mytable
+
+        The ``dialect_name`` option will limit the rendering of a particular
+        hint to a particular backend. Such as, to add hints for both Oracle
+        Database and MSSql simultaneously::
+
+            select(mytable).with_hint(
+                mytable, "index(%(name)s ix_mytable)", "oracle"
+            ).with_hint(mytable, "WITH INDEX ix_mytable", "mssql")
+
+        .. seealso::
+
+            :meth:`_expression.Select.with_statement_hint`
+
+            :meth:`_expression.Select.prefix_with` - generic SELECT prefixing
+            which also can suit some database-specific HINT syntaxes such as
+            MySQL or Oracle Database optimizer hints
+
+        """
+
+        return self._with_hint(selectable, text, dialect_name)
+
+    def _with_hint(
+        self,
+        selectable: Optional[_FromClauseArgument],
+        text: str,
+        dialect_name: str,
+    ) -> Self:
+        if selectable is None:
+            self._statement_hints += ((dialect_name, text),)
+        else:
+            self._hints = self._hints.union(
+                {
+                    (
+                        coercions.expect(roles.FromClauseRole, selectable),
+                        dialect_name,
+                    ): text
+                }
+            )
+        return self
+
+
+class FromClause(roles.AnonymizedFromClauseRole, Selectable):
+    """Represent an element that can be used within the ``FROM``
+    clause of a ``SELECT`` statement.
+
+    The most common forms of :class:`_expression.FromClause` are the
+    :class:`_schema.Table` and the :func:`_expression.select` constructs.  Key
+    features common to all :class:`_expression.FromClause` objects include:
+
+    * a :attr:`.c` collection, which provides per-name access to a collection
+      of :class:`_expression.ColumnElement` objects.
+    * a :attr:`.primary_key` attribute, which is a collection of all those
+      :class:`_expression.ColumnElement`
+      objects that indicate the ``primary_key`` flag.
+    * Methods to generate various derivations of a "from" clause, including
+      :meth:`_expression.FromClause.alias`,
+      :meth:`_expression.FromClause.join`,
+      :meth:`_expression.FromClause.select`.
+
+
+    """
+
+    __visit_name__ = "fromclause"
+    named_with_column = False
+
+    @util.ro_non_memoized_property
+    def _hide_froms(self) -> Iterable[FromClause]:
+        return ()
+
+    _is_clone_of: Optional[FromClause]
+
+    _columns: ColumnCollection[Any, Any]
+
+    schema: Optional[str] = None
+    """Define the 'schema' attribute for this :class:`_expression.FromClause`.
+
+    This is typically ``None`` for most objects except that of
+    :class:`_schema.Table`, where it is taken as the value of the
+    :paramref:`_schema.Table.schema` argument.
+
+    """
+
+    is_selectable = True
+    _is_from_clause = True
+    _is_join = False
+
+    _use_schema_map = False
+
+    def select(self) -> Select[Any]:
+        r"""Return a SELECT of this :class:`_expression.FromClause`.
+
+
+        e.g.::
+
+            stmt = some_table.select().where(some_table.c.id == 5)
+
+        .. seealso::
+
+            :func:`_expression.select` - general purpose
+            method which allows for arbitrary column lists.
+
+        """
+        return Select(self)
+
+    def join(
+        self,
+        right: _FromClauseArgument,
+        onclause: Optional[_ColumnExpressionArgument[bool]] = None,
+        isouter: bool = False,
+        full: bool = False,
+    ) -> Join:
+        """Return a :class:`_expression.Join` from this
+        :class:`_expression.FromClause`
+        to another :class:`FromClause`.
+
+        E.g.::
+
+            from sqlalchemy import join
+
+            j = user_table.join(
+                address_table, user_table.c.id == address_table.c.user_id
+            )
+            stmt = select(user_table).select_from(j)
+
+        would emit SQL along the lines of:
+
+        .. sourcecode:: sql
+
+            SELECT user.id, user.name FROM user
+            JOIN address ON user.id = address.user_id
+
+        :param right: the right side of the join; this is any
+         :class:`_expression.FromClause` object such as a
+         :class:`_schema.Table` object, and
+         may also be a selectable-compatible object such as an ORM-mapped
+         class.
+
+        :param onclause: a SQL expression representing the ON clause of the
+         join.  If left at ``None``, :meth:`_expression.FromClause.join`
+         will attempt to
+         join the two tables based on a foreign key relationship.
+
+        :param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
+
+        :param full: if True, render a FULL OUTER JOIN, instead of LEFT OUTER
+         JOIN.  Implies :paramref:`.FromClause.join.isouter`.
+
+        .. seealso::
+
+            :func:`_expression.join` - standalone function
+
+            :class:`_expression.Join` - the type of object produced
+
+        """
+
+        return Join(self, right, onclause, isouter, full)
+
+    def outerjoin(
+        self,
+        right: _FromClauseArgument,
+        onclause: Optional[_ColumnExpressionArgument[bool]] = None,
+        full: bool = False,
+    ) -> Join:
+        """Return a :class:`_expression.Join` from this
+        :class:`_expression.FromClause`
+        to another :class:`FromClause`, with the "isouter" flag set to
+        True.
+
+        E.g.::
+
+            from sqlalchemy import outerjoin
+
+            j = user_table.outerjoin(
+                address_table, user_table.c.id == address_table.c.user_id
+            )
+
+        The above is equivalent to::
+
+            j = user_table.join(
+                address_table, user_table.c.id == address_table.c.user_id, isouter=True
+            )
+
+        :param right: the right side of the join; this is any
+         :class:`_expression.FromClause` object such as a
+         :class:`_schema.Table` object, and
+         may also be a selectable-compatible object such as an ORM-mapped
+         class.
+
+        :param onclause: a SQL expression representing the ON clause of the
+         join.  If left at ``None``, :meth:`_expression.FromClause.join`
+         will attempt to
+         join the two tables based on a foreign key relationship.
+
+        :param full: if True, render a FULL OUTER JOIN, instead of
+         LEFT OUTER JOIN.
+
+        .. seealso::
+
+            :meth:`_expression.FromClause.join`
+
+            :class:`_expression.Join`
+
+        """  # noqa: E501
+
+        return Join(self, right, onclause, True, full)
+
+    def alias(
+        self, name: Optional[str] = None, flat: bool = False
+    ) -> NamedFromClause:
+        """Return an alias of this :class:`_expression.FromClause`.
+
+        E.g.::
+
+            a2 = some_table.alias("a2")
+
+        The above code creates an :class:`_expression.Alias`
+        object which can be used
+        as a FROM clause in any SELECT statement.
+
+        .. seealso::
+
+            :ref:`tutorial_using_aliases`
+
+            :func:`_expression.alias`
+
+        """
+
+        return Alias._construct(self, name=name)
+
+    def tablesample(
+        self,
+        sampling: Union[float, Function[Any]],
+        name: Optional[str] = None,
+        seed: Optional[roles.ExpressionElementRole[Any]] = None,
+    ) -> TableSample:
+        """Return a TABLESAMPLE alias of this :class:`_expression.FromClause`.
+
+        The return value is the :class:`_expression.TableSample`
+        construct also
+        provided by the top-level :func:`_expression.tablesample` function.
+
+        .. seealso::
+
+            :func:`_expression.tablesample` - usage guidelines and parameters
+
+        """
+        return TableSample._construct(
+            self, sampling=sampling, name=name, seed=seed
+        )
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        """Return ``True`` if this :class:`_expression.FromClause` is
+        'derived' from the given ``FromClause``.
+
+        An example would be an Alias of a Table is derived from that Table.
+
+        """
+        # this is essentially an "identity" check in the base class.
+        # Other constructs override this to traverse through
+        # contained elements.
+        return fromclause in self._cloned_set
+
+    def _is_lexical_equivalent(self, other: FromClause) -> bool:
+        """Return ``True`` if this :class:`_expression.FromClause` and
+        the other represent the same lexical identity.
+
+        This tests if either one is a copy of the other, or
+        if they are the same via annotation identity.
+
+        """
+        return bool(self._cloned_set.intersection(other._cloned_set))
+
+    @util.ro_non_memoized_property
+    def description(self) -> str:
+        """A brief description of this :class:`_expression.FromClause`.
+
+        Used primarily for error message formatting.
+
+        """
+        return getattr(self, "name", self.__class__.__name__ + " object")
+
+    def _generate_fromclause_column_proxies(
+        self,
+        fromclause: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        columns._populate_separate_keys(
+            col._make_proxy(
+                fromclause, primary_key=primary_key, foreign_keys=foreign_keys
+            )
+            for col in self.c
+        )
+
+    @util.ro_non_memoized_property
+    def exported_columns(
+        self,
+    ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        that represents the "exported"
+        columns of this :class:`_expression.Selectable`.
+
+        The "exported" columns for a :class:`_expression.FromClause`
+        object are synonymous
+        with the :attr:`_expression.FromClause.columns` collection.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_expression.Selectable.exported_columns`
+
+            :attr:`_expression.SelectBase.exported_columns`
+
+
+        """
+        return self.c
+
+    @util.ro_non_memoized_property
+    def columns(
+        self,
+    ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        """A named-based collection of :class:`_expression.ColumnElement`
+        objects maintained by this :class:`_expression.FromClause`.
+
+        The :attr:`.columns`, or :attr:`.c` collection, is the gateway
+        to the construction of SQL expressions using table-bound or
+        other selectable-bound columns::
+
+            select(mytable).where(mytable.c.somecolumn == 5)
+
+        :return: a :class:`.ColumnCollection` object.
+
+        """
+        return self.c
+
+    @util.ro_memoized_property
+    def c(self) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        """
+        A synonym for :attr:`.FromClause.columns`
+
+        :return: a :class:`.ColumnCollection`
+
+        """
+        if "_columns" not in self.__dict__:
+            self._setup_collections()
+        return self._columns.as_readonly()
+
+    def _setup_collections(self) -> None:
+        assert "_columns" not in self.__dict__
+        assert "primary_key" not in self.__dict__
+        assert "foreign_keys" not in self.__dict__
+
+        _columns: ColumnCollection[Any, Any] = ColumnCollection()
+        primary_key = ColumnSet()
+        foreign_keys: Set[KeyedColumnElement[Any]] = set()
+
+        self._populate_column_collection(
+            columns=_columns,
+            primary_key=primary_key,
+            foreign_keys=foreign_keys,
+        )
+
+        # assigning these three collections separately is not itself atomic,
+        # but greatly reduces the surface for problems
+        self._columns = _columns
+        self.primary_key = primary_key  # type: ignore
+        self.foreign_keys = foreign_keys  # type: ignore
+
+    @util.ro_non_memoized_property
+    def entity_namespace(self) -> _EntityNamespace:
+        """Return a namespace used for name-based access in SQL expressions.
+
+        This is the namespace that is used to resolve "filter_by()" type
+        expressions, such as::
+
+            stmt.filter_by(address="some address")
+
+        It defaults to the ``.c`` collection, however internally it can
+        be overridden using the "entity_namespace" annotation to deliver
+        alternative results.
+
+        """
+        return self.c
+
+    @util.ro_memoized_property
+    def primary_key(self) -> Iterable[NamedColumn[Any]]:
+        """Return the iterable collection of :class:`_schema.Column` objects
+        which comprise the primary key of this :class:`_selectable.FromClause`.
+
+        For a :class:`_schema.Table` object, this collection is represented
+        by the :class:`_schema.PrimaryKeyConstraint` which itself is an
+        iterable collection of :class:`_schema.Column` objects.
+
+        """
+        self._setup_collections()
+        return self.primary_key
+
+    @util.ro_memoized_property
+    def foreign_keys(self) -> Iterable[ForeignKey]:
+        """Return the collection of :class:`_schema.ForeignKey` marker objects
+        which this FromClause references.
+
+        Each :class:`_schema.ForeignKey` is a member of a
+        :class:`_schema.Table`-wide
+        :class:`_schema.ForeignKeyConstraint`.
+
+        .. seealso::
+
+            :attr:`_schema.Table.foreign_key_constraints`
+
+        """
+        self._setup_collections()
+        return self.foreign_keys
+
+    def _reset_column_collection(self) -> None:
+        """Reset the attributes linked to the ``FromClause.c`` attribute.
+
+        This collection is separate from all the other memoized things
+        as it has shown to be sensitive to being cleared out in situations
+        where enclosing code, typically in a replacement traversal scenario,
+        has already established strong relationships
+        with the exported columns.
+
+        The collection is cleared for the case where a table is having a
+        column added to it as well as within a Join during copy internals.
+
+        """
+
+        for key in ["_columns", "columns", "c", "primary_key", "foreign_keys"]:
+            self.__dict__.pop(key, None)
+
+    @util.ro_non_memoized_property
+    def _select_iterable(self) -> _SelectIterable:
+        return (c for c in self.c if not _never_select_column(c))
+
+    @property
+    def _cols_populated(self) -> bool:
+        return "_columns" in self.__dict__
+
+    def _populate_column_collection(
+        self,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        """Called on subclasses to establish the .c collection.
+
+        Each implementation has a different way of establishing
+        this collection.
+
+        """
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        """Given a column added to the .c collection of an underlying
+        selectable, produce the local version of that column, assuming this
+        selectable ultimately should proxy this column.
+
+        this is used to "ping" a derived selectable to add a new column
+        to its .c. collection when a Column has been added to one of the
+        Table objects it ultimately derives from.
+
+        If the given selectable hasn't populated its .c. collection yet,
+        it should at least pass on the message to the contained selectables,
+        but it will return None.
+
+        This method is currently used by Declarative to allow Table
+        columns to be added to a partially constructed inheritance
+        mapping that may have already produced joins.  The method
+        isn't public right now, as the full span of implications
+        and/or caveats aren't yet clear.
+
+        It's also possible that this functionality could be invoked by
+        default via an event, which would require that
+        selectables maintain a weak referencing collection of all
+        derivations.
+
+        """
+        self._reset_column_collection()
+
+    def _anonymous_fromclause(
+        self, *, name: Optional[str] = None, flat: bool = False
+    ) -> FromClause:
+        return self.alias(name=name)
+
+    if TYPE_CHECKING:
+
+        def self_group(
+            self, against: Optional[OperatorType] = None
+        ) -> Union[FromGrouping, Self]: ...
+
+
+class NamedFromClause(FromClause):
+    """A :class:`.FromClause` that has a name.
+
+    Examples include tables, subqueries, CTEs, aliased tables.
+
+    .. versionadded:: 2.0
+
+    """
+
+    named_with_column = True
+
+    name: str
+
+    @util.preload_module("sqlalchemy.sql.sqltypes")
+    def table_valued(self) -> TableValuedColumn[Any]:
+        """Return a :class:`_sql.TableValuedColumn` object for this
+        :class:`_expression.FromClause`.
+
+        A :class:`_sql.TableValuedColumn` is a :class:`_sql.ColumnElement` that
+        represents a complete row in a table. Support for this construct is
+        backend dependent, and is supported in various forms by backends
+        such as PostgreSQL, Oracle Database and SQL Server.
+
+        E.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> from sqlalchemy import select, column, func, table
+            >>> a = table("a", column("id"), column("x"), column("y"))
+            >>> stmt = select(func.row_to_json(a.table_valued()))
+            >>> print(stmt)
+            {printsql}SELECT row_to_json(a) AS row_to_json_1
+            FROM a
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
+
+        """
+        return TableValuedColumn(self, type_api.TABLEVALUE)
+
+
+class SelectLabelStyle(Enum):
+    """Label style constants that may be passed to
+    :meth:`_sql.Select.set_label_style`."""
+
+    LABEL_STYLE_NONE = 0
+    """Label style indicating no automatic labeling should be applied to the
+    columns clause of a SELECT statement.
+
+    Below, the columns named ``columna`` are both rendered as is, meaning that
+    the name ``columna`` can only refer to the first occurrence of this name
+    within a result set, as well as if the statement were used as a subquery:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy import table, column, select, true, LABEL_STYLE_NONE
+        >>> table1 = table("table1", column("columna"), column("columnb"))
+        >>> table2 = table("table2", column("columna"), column("columnc"))
+        >>> print(
+        ...     select(table1, table2)
+        ...     .join(table2, true())
+        ...     .set_label_style(LABEL_STYLE_NONE)
+        ... )
+        {printsql}SELECT table1.columna, table1.columnb, table2.columna, table2.columnc
+        FROM table1 JOIN table2 ON true
+
+    Used with the :meth:`_sql.Select.set_label_style` method.
+
+    .. versionadded:: 1.4
+
+    """  # noqa: E501
+
+    LABEL_STYLE_TABLENAME_PLUS_COL = 1
+    """Label style indicating all columns should be labeled as
+    ``<tablename>_<columnname>`` when generating the columns clause of a SELECT
+    statement, to disambiguate same-named columns referenced from different
+    tables, aliases, or subqueries.
+
+    Below, all column names are given a label so that the two same-named
+    columns ``columna`` are disambiguated as ``table1_columna`` and
+    ``table2_columna``:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy import (
+        ...     table,
+        ...     column,
+        ...     select,
+        ...     true,
+        ...     LABEL_STYLE_TABLENAME_PLUS_COL,
+        ... )
+        >>> table1 = table("table1", column("columna"), column("columnb"))
+        >>> table2 = table("table2", column("columna"), column("columnc"))
+        >>> print(
+        ...     select(table1, table2)
+        ...     .join(table2, true())
+        ...     .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+        ... )
+        {printsql}SELECT table1.columna AS table1_columna, table1.columnb AS table1_columnb, table2.columna AS table2_columna, table2.columnc AS table2_columnc
+        FROM table1 JOIN table2 ON true
+
+    Used with the :meth:`_sql.GenerativeSelect.set_label_style` method.
+    Equivalent to the legacy method ``Select.apply_labels()``;
+    :data:`_sql.LABEL_STYLE_TABLENAME_PLUS_COL` is SQLAlchemy's legacy
+    auto-labeling style. :data:`_sql.LABEL_STYLE_DISAMBIGUATE_ONLY` provides a
+    less intrusive approach to disambiguation of same-named column expressions.
+
+
+    .. versionadded:: 1.4
+
+    """  # noqa: E501
+
+    LABEL_STYLE_DISAMBIGUATE_ONLY = 2
+    """Label style indicating that columns with a name that conflicts with
+    an existing name should be labeled with a semi-anonymizing label
+    when generating the columns clause of a SELECT statement.
+
+    Below, most column names are left unaffected, except for the second
+    occurrence of the name ``columna``, which is labeled using the
+    label ``columna_1`` to disambiguate it from that of ``tablea.columna``:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy import (
+        ...     table,
+        ...     column,
+        ...     select,
+        ...     true,
+        ...     LABEL_STYLE_DISAMBIGUATE_ONLY,
+        ... )
+        >>> table1 = table("table1", column("columna"), column("columnb"))
+        >>> table2 = table("table2", column("columna"), column("columnc"))
+        >>> print(
+        ...     select(table1, table2)
+        ...     .join(table2, true())
+        ...     .set_label_style(LABEL_STYLE_DISAMBIGUATE_ONLY)
+        ... )
+        {printsql}SELECT table1.columna, table1.columnb, table2.columna AS columna_1, table2.columnc
+        FROM table1 JOIN table2 ON true
+
+    Used with the :meth:`_sql.GenerativeSelect.set_label_style` method,
+    :data:`_sql.LABEL_STYLE_DISAMBIGUATE_ONLY` is the default labeling style
+    for all SELECT statements outside of :term:`1.x style` ORM queries.
+
+    .. versionadded:: 1.4
+
+    """  # noqa: E501
+
+    LABEL_STYLE_DEFAULT = LABEL_STYLE_DISAMBIGUATE_ONLY
+    """The default label style, refers to
+    :data:`_sql.LABEL_STYLE_DISAMBIGUATE_ONLY`.
+
+    .. versionadded:: 1.4
+
+    """
+
+    LABEL_STYLE_LEGACY_ORM = 3
+
+
+(
+    LABEL_STYLE_NONE,
+    LABEL_STYLE_TABLENAME_PLUS_COL,
+    LABEL_STYLE_DISAMBIGUATE_ONLY,
+    _,
+) = list(SelectLabelStyle)
+
+LABEL_STYLE_DEFAULT = LABEL_STYLE_DISAMBIGUATE_ONLY
+
+
+class Join(roles.DMLTableRole, FromClause):
+    """Represent a ``JOIN`` construct between two
+    :class:`_expression.FromClause`
+    elements.
+
+    The public constructor function for :class:`_expression.Join`
+    is the module-level
+    :func:`_expression.join()` function, as well as the
+    :meth:`_expression.FromClause.join` method
+    of any :class:`_expression.FromClause` (e.g. such as
+    :class:`_schema.Table`).
+
+    .. seealso::
+
+        :func:`_expression.join`
+
+        :meth:`_expression.FromClause.join`
+
+    """
+
+    __visit_name__ = "join"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("left", InternalTraversal.dp_clauseelement),
+        ("right", InternalTraversal.dp_clauseelement),
+        ("onclause", InternalTraversal.dp_clauseelement),
+        ("isouter", InternalTraversal.dp_boolean),
+        ("full", InternalTraversal.dp_boolean),
+    ]
+
+    _is_join = True
+
+    left: FromClause
+    right: FromClause
+    onclause: Optional[ColumnElement[bool]]
+    isouter: bool
+    full: bool
+
+    def __init__(
+        self,
+        left: _FromClauseArgument,
+        right: _FromClauseArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        isouter: bool = False,
+        full: bool = False,
+    ):
+        """Construct a new :class:`_expression.Join`.
+
+        The usual entrypoint here is the :func:`_expression.join`
+        function or the :meth:`_expression.FromClause.join` method of any
+        :class:`_expression.FromClause` object.
+
+        """
+
+        # when deannotate was removed here, callcounts went up for ORM
+        # compilation of eager joins, since there were more comparisons of
+        # annotated objects.   test_orm.py -> test_fetch_results
+        # was therefore changed to show a more real-world use case, where the
+        # compilation is cached; there's no change in post-cache callcounts.
+        # callcounts for a single compilation in that particular test
+        # that includes about eight joins about 1100 extra fn calls, from
+        # 29200 -> 30373
+
+        self.left = coercions.expect(
+            roles.FromClauseRole,
+            left,
+        )
+        self.right = coercions.expect(
+            roles.FromClauseRole,
+            right,
+        ).self_group()
+
+        if onclause is None:
+            self.onclause = self._match_primaries(self.left, self.right)
+        else:
+            # note: taken from If91f61527236fd4d7ae3cad1f24c38be921c90ba
+            # not merged yet
+            self.onclause = coercions.expect(
+                roles.OnClauseRole, onclause
+            ).self_group(against=operators._asbool)
+
+        self.isouter = isouter
+        self.full = full
+
+    @util.ro_non_memoized_property
+    def description(self) -> str:
+        return "Join object on %s(%d) and %s(%d)" % (
+            self.left.description,
+            id(self.left),
+            self.right.description,
+            id(self.right),
+        )
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        return (
+            # use hash() to ensure direct comparison to annotated works
+            # as well
+            hash(fromclause) == hash(self)
+            or self.left.is_derived_from(fromclause)
+            or self.right.is_derived_from(fromclause)
+        )
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> FromGrouping:
+        return FromGrouping(self)
+
+    @util.preload_module("sqlalchemy.sql.util")
+    def _populate_column_collection(
+        self,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        sqlutil = util.preloaded.sql_util
+        _columns: List[KeyedColumnElement[Any]] = [c for c in self.left.c] + [
+            c for c in self.right.c
+        ]
+
+        primary_key.extend(  # type: ignore
+            sqlutil.reduce_columns(
+                (c for c in _columns if c.primary_key), self.onclause
+            )
+        )
+        columns._populate_separate_keys(
+            (col._tq_key_label, col) for col in _columns  # type: ignore
+        )
+        foreign_keys.update(
+            itertools.chain(*[col.foreign_keys for col in _columns])  # type: ignore  # noqa: E501
+        )
+
+    def _copy_internals(
+        self, clone: _CloneCallableType = _clone, **kw: Any
+    ) -> None:
+        # see Select._copy_internals() for similar concept
+
+        # here we pre-clone "left" and "right" so that we can
+        # determine the new FROM clauses
+        all_the_froms = set(
+            itertools.chain(
+                _from_objects(self.left),
+                _from_objects(self.right),
+            )
+        )
+
+        # run the clone on those.  these will be placed in the
+        # cache used by the clone function
+        new_froms = {f: clone(f, **kw) for f in all_the_froms}
+
+        # set up a special replace function that will replace for
+        # ColumnClause with parent table referring to those
+        # replaced FromClause objects
+        def replace(
+            obj: Union[BinaryExpression[Any], ColumnClause[Any]],
+            **kw: Any,
+        ) -> Optional[KeyedColumnElement[Any]]:
+            if isinstance(obj, ColumnClause) and obj.table in new_froms:
+                newelem = new_froms[obj.table].corresponding_column(obj)
+                return newelem
+            return None
+
+        kw["replace"] = replace
+
+        # run normal _copy_internals.  the clones for
+        # left and right will come from the clone function's
+        # cache
+        super()._copy_internals(clone=clone, **kw)
+
+        self._reset_memoizations()
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        super()._refresh_for_new_column(column)
+        self.left._refresh_for_new_column(column)
+        self.right._refresh_for_new_column(column)
+
+    def _match_primaries(
+        self,
+        left: FromClause,
+        right: FromClause,
+    ) -> ColumnElement[bool]:
+        if isinstance(left, Join):
+            left_right = left.right
+        else:
+            left_right = None
+        return self._join_condition(left, right, a_subset=left_right)
+
+    @classmethod
+    def _join_condition(
+        cls,
+        a: FromClause,
+        b: FromClause,
+        *,
+        a_subset: Optional[FromClause] = None,
+        consider_as_foreign_keys: Optional[
+            AbstractSet[ColumnClause[Any]]
+        ] = None,
+    ) -> ColumnElement[bool]:
+        """Create a join condition between two tables or selectables.
+
+        See sqlalchemy.sql.util.join_condition() for full docs.
+
+        """
+        constraints = cls._joincond_scan_left_right(
+            a, a_subset, b, consider_as_foreign_keys
+        )
+
+        if len(constraints) > 1:
+            cls._joincond_trim_constraints(
+                a, b, constraints, consider_as_foreign_keys
+            )
+
+        if len(constraints) == 0:
+            if isinstance(b, FromGrouping):
+                hint = (
+                    " Perhaps you meant to convert the right side to a "
+                    "subquery using alias()?"
+                )
+            else:
+                hint = ""
+            raise exc.NoForeignKeysError(
+                "Can't find any foreign key relationships "
+                "between '%s' and '%s'.%s"
+                % (a.description, b.description, hint)
+            )
+
+        crit = [(x == y) for x, y in list(constraints.values())[0]]
+        if len(crit) == 1:
+            return crit[0]
+        else:
+            return and_(*crit)
+
+    @classmethod
+    def _can_join(
+        cls,
+        left: FromClause,
+        right: FromClause,
+        *,
+        consider_as_foreign_keys: Optional[
+            AbstractSet[ColumnClause[Any]]
+        ] = None,
+    ) -> bool:
+        if isinstance(left, Join):
+            left_right = left.right
+        else:
+            left_right = None
+
+        constraints = cls._joincond_scan_left_right(
+            a=left,
+            b=right,
+            a_subset=left_right,
+            consider_as_foreign_keys=consider_as_foreign_keys,
+        )
+
+        return bool(constraints)
+
+    @classmethod
+    @util.preload_module("sqlalchemy.sql.util")
+    def _joincond_scan_left_right(
+        cls,
+        a: FromClause,
+        a_subset: Optional[FromClause],
+        b: FromClause,
+        consider_as_foreign_keys: Optional[AbstractSet[ColumnClause[Any]]],
+    ) -> collections.defaultdict[
+        Optional[ForeignKeyConstraint],
+        List[Tuple[ColumnClause[Any], ColumnClause[Any]]],
+    ]:
+        sql_util = util.preloaded.sql_util
+
+        a = coercions.expect(roles.FromClauseRole, a)
+        b = coercions.expect(roles.FromClauseRole, b)
+
+        constraints: collections.defaultdict[
+            Optional[ForeignKeyConstraint],
+            List[Tuple[ColumnClause[Any], ColumnClause[Any]]],
+        ] = collections.defaultdict(list)
+
+        for left in (a_subset, a):
+            if left is None:
+                continue
+            for fk in sorted(
+                b.foreign_keys,
+                key=lambda fk: fk.parent._creation_order,
+            ):
+                if (
+                    consider_as_foreign_keys is not None
+                    and fk.parent not in consider_as_foreign_keys
+                ):
+                    continue
+                try:
+                    col = fk.get_referent(left)
+                except exc.NoReferenceError as nrte:
+                    table_names = {t.name for t in sql_util.find_tables(left)}
+                    if nrte.table_name in table_names:
+                        raise
+                    else:
+                        continue
+
+                if col is not None:
+                    constraints[fk.constraint].append((col, fk.parent))
+            if left is not b:
+                for fk in sorted(
+                    left.foreign_keys,
+                    key=lambda fk: fk.parent._creation_order,
+                ):
+                    if (
+                        consider_as_foreign_keys is not None
+                        and fk.parent not in consider_as_foreign_keys
+                    ):
+                        continue
+                    try:
+                        col = fk.get_referent(b)
+                    except exc.NoReferenceError as nrte:
+                        table_names = {t.name for t in sql_util.find_tables(b)}
+                        if nrte.table_name in table_names:
+                            raise
+                        else:
+                            continue
+
+                    if col is not None:
+                        constraints[fk.constraint].append((col, fk.parent))
+            if constraints:
+                break
+        return constraints
+
+    @classmethod
+    def _joincond_trim_constraints(
+        cls,
+        a: FromClause,
+        b: FromClause,
+        constraints: Dict[Any, Any],
+        consider_as_foreign_keys: Optional[Any],
+    ) -> None:
+        # more than one constraint matched.  narrow down the list
+        # to include just those FKCs that match exactly to
+        # "consider_as_foreign_keys".
+        if consider_as_foreign_keys:
+            for const in list(constraints):
+                if {f.parent for f in const.elements} != set(
+                    consider_as_foreign_keys
+                ):
+                    del constraints[const]
+
+        # if still multiple constraints, but
+        # they all refer to the exact same end result, use it.
+        if len(constraints) > 1:
+            dedupe = {tuple(crit) for crit in constraints.values()}
+            if len(dedupe) == 1:
+                key = list(constraints)[0]
+                constraints = {key: constraints[key]}
+
+        if len(constraints) != 1:
+            raise exc.AmbiguousForeignKeysError(
+                "Can't determine join between '%s' and '%s'; "
+                "tables have more than one foreign key "
+                "constraint relationship between them. "
+                "Please specify the 'onclause' of this "
+                "join explicitly." % (a.description, b.description)
+            )
+
+    def select(self) -> Select[Any]:
+        r"""Create a :class:`_expression.Select` from this
+        :class:`_expression.Join`.
+
+        E.g.::
+
+            stmt = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
+
+            stmt = stmt.select()
+
+        The above will produce a SQL string resembling:
+
+        .. sourcecode:: sql
+
+            SELECT table_a.id, table_a.col, table_b.id, table_b.a_id
+            FROM table_a JOIN table_b ON table_a.id = table_b.a_id
+
+        """
+        return Select(self.left, self.right).select_from(self)
+
+    @util.preload_module("sqlalchemy.sql.util")
+    def _anonymous_fromclause(
+        self, name: Optional[str] = None, flat: bool = False
+    ) -> TODO_Any:
+        sqlutil = util.preloaded.sql_util
+        if flat:
+            if isinstance(self.left, (FromGrouping, Join)):
+                left_name = name  # will recurse
+            else:
+                if name and isinstance(self.left, NamedFromClause):
+                    left_name = f"{name}_{self.left.name}"
+                else:
+                    left_name = name
+            if isinstance(self.right, (FromGrouping, Join)):
+                right_name = name  # will recurse
+            else:
+                if name and isinstance(self.right, NamedFromClause):
+                    right_name = f"{name}_{self.right.name}"
+                else:
+                    right_name = name
+            left_a, right_a = (
+                self.left._anonymous_fromclause(name=left_name, flat=flat),
+                self.right._anonymous_fromclause(name=right_name, flat=flat),
+            )
+            adapter = sqlutil.ClauseAdapter(left_a).chain(
+                sqlutil.ClauseAdapter(right_a)
+            )
+
+            return left_a.join(
+                right_a,
+                adapter.traverse(self.onclause),
+                isouter=self.isouter,
+                full=self.full,
+            )
+        else:
+            return (
+                self.select()
+                .set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
+                .correlate(None)
+                .alias(name)
+            )
+
+    @util.ro_non_memoized_property
+    def _hide_froms(self) -> Iterable[FromClause]:
+        return itertools.chain(
+            *[_from_objects(x.left, x.right) for x in self._cloned_set]
+        )
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        self_list: List[FromClause] = [self]
+        return self_list + self.left._from_objects + self.right._from_objects
+
+
+class NoInit:
+    def __init__(self, *arg: Any, **kw: Any):
+        raise NotImplementedError(
+            "The %s class is not intended to be constructed "
+            "directly.  Please use the %s() standalone "
+            "function or the %s() method available from appropriate "
+            "selectable objects."
+            % (
+                self.__class__.__name__,
+                self.__class__.__name__.lower(),
+                self.__class__.__name__.lower(),
+            )
+        )
+
+
+class LateralFromClause(NamedFromClause):
+    """mark a FROM clause as being able to render directly as LATERAL"""
+
+
+# FromClause ->
+#   AliasedReturnsRows
+#        -> Alias   only for FromClause
+#        -> Subquery  only for SelectBase
+#        -> CTE only for HasCTE -> SelectBase, DML
+#        -> Lateral -> FromClause, but we accept SelectBase
+#           w/ non-deprecated coercion
+#        -> TableSample -> only for FromClause
+
+
+class AliasedReturnsRows(NoInit, NamedFromClause):
+    """Base class of aliases against tables, subqueries, and other
+    selectables."""
+
+    _is_from_container = True
+
+    _supports_derived_columns = False
+
+    element: ReturnsRows
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("name", InternalTraversal.dp_anon_name),
+    ]
+
+    @classmethod
+    def _construct(
+        cls,
+        selectable: Any,
+        *,
+        name: Optional[str] = None,
+        **kw: Any,
+    ) -> Self:
+        obj = cls.__new__(cls)
+        obj._init(selectable, name=name, **kw)
+        return obj
+
+    def _init(self, selectable: Any, *, name: Optional[str] = None) -> None:
+        self.element = coercions.expect(
+            roles.ReturnsRowsRole, selectable, apply_propagate_attrs=self
+        )
+        self.element = selectable
+        self._orig_name = name
+        if name is None:
+            if (
+                isinstance(selectable, FromClause)
+                and selectable.named_with_column
+            ):
+                name = getattr(selectable, "name", None)
+                if isinstance(name, _anonymous_label):
+                    name = None
+            name = _anonymous_label.safe_construct(id(self), name or "anon")
+        self.name = name
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        super()._refresh_for_new_column(column)
+        self.element._refresh_for_new_column(column)
+
+    def _populate_column_collection(
+        self,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        self.element._generate_fromclause_column_proxies(
+            self, columns, primary_key=primary_key, foreign_keys=foreign_keys
+        )
+
+    @util.ro_non_memoized_property
+    def description(self) -> str:
+        name = self.name
+        if isinstance(name, _anonymous_label):
+            name = "anon_1"
+
+        return name
+
+    @util.ro_non_memoized_property
+    def implicit_returning(self) -> bool:
+        return self.element.implicit_returning  # type: ignore
+
+    @property
+    def original(self) -> ReturnsRows:
+        """Legacy for dialects that are referring to Alias.original."""
+        return self.element
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        if fromclause in self._cloned_set:
+            return True
+        return self.element.is_derived_from(fromclause)
+
+    def _copy_internals(
+        self, clone: _CloneCallableType = _clone, **kw: Any
+    ) -> None:
+        existing_element = self.element
+
+        super()._copy_internals(clone=clone, **kw)
+
+        # the element clone is usually against a Table that returns the
+        # same object.  don't reset exported .c. collections and other
+        # memoized details if it was not changed.  this saves a lot on
+        # performance.
+        if existing_element is not self.element:
+            self._reset_column_collection()
+
+    @property
+    def _from_objects(self) -> List[FromClause]:
+        return [self]
+
+
+class FromClauseAlias(AliasedReturnsRows):
+    element: FromClause
+
+
+class Alias(roles.DMLTableRole, FromClauseAlias):
+    """Represents an table or selectable alias (AS).
+
+    Represents an alias, as typically applied to any table or
+    sub-select within a SQL statement using the ``AS`` keyword (or
+    without the keyword on certain databases such as Oracle Database).
+
+    This object is constructed from the :func:`_expression.alias` module
+    level function as well as the :meth:`_expression.FromClause.alias`
+    method available
+    on all :class:`_expression.FromClause` subclasses.
+
+    .. seealso::
+
+        :meth:`_expression.FromClause.alias`
+
+    """
+
+    __visit_name__ = "alias"
+
+    inherit_cache = True
+
+    element: FromClause
+
+    @classmethod
+    def _factory(
+        cls,
+        selectable: FromClause,
+        name: Optional[str] = None,
+        flat: bool = False,
+    ) -> NamedFromClause:
+        return coercions.expect(
+            roles.FromClauseRole, selectable, allow_select=True
+        ).alias(name=name, flat=flat)
+
+
+class TableValuedAlias(LateralFromClause, Alias):
+    """An alias against a "table valued" SQL function.
+
+    This construct provides for a SQL function that returns columns
+    to be used in the FROM clause of a SELECT statement.   The
+    object is generated using the :meth:`_functions.FunctionElement.table_valued`
+    method, e.g.:
+
+    .. sourcecode:: pycon+sql
+
+        >>> from sqlalchemy import select, func
+        >>> fn = func.json_array_elements_text('["one", "two", "three"]').table_valued(
+        ...     "value"
+        ... )
+        >>> print(select(fn.c.value))
+        {printsql}SELECT anon_1.value
+        FROM json_array_elements_text(:json_array_elements_text_1) AS anon_1
+
+    .. versionadded:: 1.4.0b2
+
+    .. seealso::
+
+        :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial`
+
+    """  # noqa: E501
+
+    __visit_name__ = "table_valued_alias"
+
+    _supports_derived_columns = True
+    _render_derived = False
+    _render_derived_w_types = False
+    joins_implicitly = False
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("name", InternalTraversal.dp_anon_name),
+        ("_tableval_type", InternalTraversal.dp_type),
+        ("_render_derived", InternalTraversal.dp_boolean),
+        ("_render_derived_w_types", InternalTraversal.dp_boolean),
+    ]
+
+    def _init(
+        self,
+        selectable: Any,
+        *,
+        name: Optional[str] = None,
+        table_value_type: Optional[TableValueType] = None,
+        joins_implicitly: bool = False,
+    ) -> None:
+        super()._init(selectable, name=name)
+
+        self.joins_implicitly = joins_implicitly
+        self._tableval_type = (
+            type_api.TABLEVALUE
+            if table_value_type is None
+            else table_value_type
+        )
+
+    @HasMemoized.memoized_attribute
+    def column(self) -> TableValuedColumn[Any]:
+        """Return a column expression representing this
+        :class:`_sql.TableValuedAlias`.
+
+        This accessor is used to implement the
+        :meth:`_functions.FunctionElement.column_valued` method. See that
+        method for further details.
+
+        E.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> print(select(func.some_func().table_valued("value").column))
+            {printsql}SELECT anon_1 FROM some_func() AS anon_1
+
+        .. seealso::
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+        """
+
+        return TableValuedColumn(self, self._tableval_type)
+
+    def alias(
+        self, name: Optional[str] = None, flat: bool = False
+    ) -> TableValuedAlias:
+        """Return a new alias of this :class:`_sql.TableValuedAlias`.
+
+        This creates a distinct FROM object that will be distinguished
+        from the original one when used in a SQL statement.
+
+        """
+
+        tva: TableValuedAlias = TableValuedAlias._construct(
+            self,
+            name=name,
+            table_value_type=self._tableval_type,
+            joins_implicitly=self.joins_implicitly,
+        )
+
+        if self._render_derived:
+            tva._render_derived = True
+            tva._render_derived_w_types = self._render_derived_w_types
+
+        return tva
+
+    def lateral(self, name: Optional[str] = None) -> LateralFromClause:
+        """Return a new :class:`_sql.TableValuedAlias` with the lateral flag
+        set, so that it renders as LATERAL.
+
+        .. seealso::
+
+            :func:`_expression.lateral`
+
+        """
+        tva = self.alias(name=name)
+        tva._is_lateral = True
+        return tva
+
+    def render_derived(
+        self,
+        name: Optional[str] = None,
+        with_types: bool = False,
+    ) -> TableValuedAlias:
+        """Apply "render derived" to this :class:`_sql.TableValuedAlias`.
+
+        This has the effect of the individual column names listed out
+        after the alias name in the "AS" sequence, e.g.:
+
+        .. sourcecode:: pycon+sql
+
+            >>> print(
+            ...     select(
+            ...         func.unnest(array(["one", "two", "three"]))
+            ...         .table_valued("x", with_ordinality="o")
+            ...         .render_derived()
+            ...     )
+            ... )
+            {printsql}SELECT anon_1.x, anon_1.o
+            FROM unnest(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s]) WITH ORDINALITY AS anon_1(x, o)
+
+        The ``with_types`` keyword will render column types inline within
+        the alias expression (this syntax currently applies to the
+        PostgreSQL database):
+
+        .. sourcecode:: pycon+sql
+
+            >>> print(
+            ...     select(
+            ...         func.json_to_recordset('[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]')
+            ...         .table_valued(column("a", Integer), column("b", String))
+            ...         .render_derived(with_types=True)
+            ...     )
+            ... )
+            {printsql}SELECT anon_1.a, anon_1.b FROM json_to_recordset(:json_to_recordset_1)
+            AS anon_1(a INTEGER, b VARCHAR)
+
+        :param name: optional string name that will be applied to the alias
+         generated.  If left as None, a unique anonymizing name will be used.
+
+        :param with_types: if True, the derived columns will include the
+         datatype specification with each column. This is a special syntax
+         currently known to be required by PostgreSQL for some SQL functions.
+
+        """  # noqa: E501
+
+        # note: don't use the @_generative system here, keep a reference
+        # to the original object.  otherwise you can have re-use of the
+        # python id() of the original which can cause name conflicts if
+        # a new anon-name grabs the same identifier as the local anon-name
+        # (just saw it happen on CI)
+
+        # construct against original to prevent memory growth
+        # for repeated generations
+        new_alias: TableValuedAlias = TableValuedAlias._construct(
+            self.element,
+            name=name,
+            table_value_type=self._tableval_type,
+            joins_implicitly=self.joins_implicitly,
+        )
+        new_alias._render_derived = True
+        new_alias._render_derived_w_types = with_types
+        return new_alias
+
+
+class Lateral(FromClauseAlias, LateralFromClause):
+    """Represent a LATERAL subquery.
+
+    This object is constructed from the :func:`_expression.lateral` module
+    level function as well as the :meth:`_expression.FromClause.lateral`
+    method available
+    on all :class:`_expression.FromClause` subclasses.
+
+    While LATERAL is part of the SQL standard, currently only more recent
+    PostgreSQL versions provide support for this keyword.
+
+    .. seealso::
+
+        :ref:`tutorial_lateral_correlation` -  overview of usage.
+
+    """
+
+    __visit_name__ = "lateral"
+    _is_lateral = True
+
+    inherit_cache = True
+
+    @classmethod
+    def _factory(
+        cls,
+        selectable: Union[SelectBase, _FromClauseArgument],
+        name: Optional[str] = None,
+    ) -> LateralFromClause:
+        return coercions.expect(
+            roles.FromClauseRole, selectable, explicit_subquery=True
+        ).lateral(name=name)
+
+
+class TableSample(FromClauseAlias):
+    """Represent a TABLESAMPLE clause.
+
+    This object is constructed from the :func:`_expression.tablesample` module
+    level function as well as the :meth:`_expression.FromClause.tablesample`
+    method
+    available on all :class:`_expression.FromClause` subclasses.
+
+    .. seealso::
+
+        :func:`_expression.tablesample`
+
+    """
+
+    __visit_name__ = "tablesample"
+
+    _traverse_internals: _TraverseInternalsType = (
+        AliasedReturnsRows._traverse_internals
+        + [
+            ("sampling", InternalTraversal.dp_clauseelement),
+            ("seed", InternalTraversal.dp_clauseelement),
+        ]
+    )
+
+    @classmethod
+    def _factory(
+        cls,
+        selectable: _FromClauseArgument,
+        sampling: Union[float, Function[Any]],
+        name: Optional[str] = None,
+        seed: Optional[roles.ExpressionElementRole[Any]] = None,
+    ) -> TableSample:
+        return coercions.expect(roles.FromClauseRole, selectable).tablesample(
+            sampling, name=name, seed=seed
+        )
+
+    @util.preload_module("sqlalchemy.sql.functions")
+    def _init(  # type: ignore[override]
+        self,
+        selectable: Any,
+        *,
+        name: Optional[str] = None,
+        sampling: Union[float, Function[Any]],
+        seed: Optional[roles.ExpressionElementRole[Any]] = None,
+    ) -> None:
+        assert sampling is not None
+        functions = util.preloaded.sql_functions
+        if not isinstance(sampling, functions.Function):
+            sampling = functions.func.system(sampling)
+
+        self.sampling: Function[Any] = sampling
+        self.seed = seed
+        super()._init(selectable, name=name)
+
+    def _get_method(self) -> Function[Any]:
+        return self.sampling
+
+
+class CTE(
+    roles.DMLTableRole,
+    roles.IsCTERole,
+    Generative,
+    HasPrefixes,
+    HasSuffixes,
+    AliasedReturnsRows,
+):
+    """Represent a Common Table Expression.
+
+    The :class:`_expression.CTE` object is obtained using the
+    :meth:`_sql.SelectBase.cte` method from any SELECT statement. A less often
+    available syntax also allows use of the :meth:`_sql.HasCTE.cte` method
+    present on :term:`DML` constructs such as :class:`_sql.Insert`,
+    :class:`_sql.Update` and
+    :class:`_sql.Delete`.   See the :meth:`_sql.HasCTE.cte` method for
+    usage details on CTEs.
+
+    .. seealso::
+
+        :ref:`tutorial_subqueries_ctes` - in the 2.0 tutorial
+
+        :meth:`_sql.HasCTE.cte` - examples of calling styles
+
+    """
+
+    __visit_name__ = "cte"
+
+    _traverse_internals: _TraverseInternalsType = (
+        AliasedReturnsRows._traverse_internals
+        + [
+            ("_cte_alias", InternalTraversal.dp_clauseelement),
+            ("_restates", InternalTraversal.dp_clauseelement),
+            ("recursive", InternalTraversal.dp_boolean),
+            ("nesting", InternalTraversal.dp_boolean),
+        ]
+        + HasPrefixes._has_prefixes_traverse_internals
+        + HasSuffixes._has_suffixes_traverse_internals
+    )
+
+    element: HasCTE
+
+    @classmethod
+    def _factory(
+        cls,
+        selectable: HasCTE,
+        name: Optional[str] = None,
+        recursive: bool = False,
+    ) -> CTE:
+        r"""Return a new :class:`_expression.CTE`,
+        or Common Table Expression instance.
+
+        Please see :meth:`_expression.HasCTE.cte` for detail on CTE usage.
+
+        """
+        return coercions.expect(roles.HasCTERole, selectable).cte(
+            name=name, recursive=recursive
+        )
+
+    def _init(
+        self,
+        selectable: Select[Any],
+        *,
+        name: Optional[str] = None,
+        recursive: bool = False,
+        nesting: bool = False,
+        _cte_alias: Optional[CTE] = None,
+        _restates: Optional[CTE] = None,
+        _prefixes: Optional[Tuple[()]] = None,
+        _suffixes: Optional[Tuple[()]] = None,
+    ) -> None:
+        self.recursive = recursive
+        self.nesting = nesting
+        self._cte_alias = _cte_alias
+        # Keep recursivity reference with union/union_all
+        self._restates = _restates
+        if _prefixes:
+            self._prefixes = _prefixes
+        if _suffixes:
+            self._suffixes = _suffixes
+        super()._init(selectable, name=name)
+
+    def _populate_column_collection(
+        self,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        if self._cte_alias is not None:
+            self._cte_alias._generate_fromclause_column_proxies(
+                self,
+                columns,
+                primary_key=primary_key,
+                foreign_keys=foreign_keys,
+            )
+        else:
+            self.element._generate_fromclause_column_proxies(
+                self,
+                columns,
+                primary_key=primary_key,
+                foreign_keys=foreign_keys,
+            )
+
+    def alias(self, name: Optional[str] = None, flat: bool = False) -> CTE:
+        """Return an :class:`_expression.Alias` of this
+        :class:`_expression.CTE`.
+
+        This method is a CTE-specific specialization of the
+        :meth:`_expression.FromClause.alias` method.
+
+        .. seealso::
+
+            :ref:`tutorial_using_aliases`
+
+            :func:`_expression.alias`
+
+        """
+        return CTE._construct(
+            self.element,
+            name=name,
+            recursive=self.recursive,
+            nesting=self.nesting,
+            _cte_alias=self,
+            _prefixes=self._prefixes,
+            _suffixes=self._suffixes,
+        )
+
+    def union(self, *other: _SelectStatementForCompoundArgument[Any]) -> CTE:
+        r"""Return a new :class:`_expression.CTE` with a SQL ``UNION``
+        of the original CTE against the given selectables provided
+        as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28 multiple elements are now accepted.
+
+        .. seealso::
+
+            :meth:`_sql.HasCTE.cte` - examples of calling styles
+
+        """
+        assert is_select_statement(
+            self.element
+        ), f"CTE element f{self.element} does not support union()"
+
+        return CTE._construct(
+            self.element.union(*other),
+            name=self.name,
+            recursive=self.recursive,
+            nesting=self.nesting,
+            _restates=self,
+            _prefixes=self._prefixes,
+            _suffixes=self._suffixes,
+        )
+
+    def union_all(
+        self, *other: _SelectStatementForCompoundArgument[Any]
+    ) -> CTE:
+        r"""Return a new :class:`_expression.CTE` with a SQL ``UNION ALL``
+        of the original CTE against the given selectables provided
+        as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28 multiple elements are now accepted.
+
+        .. seealso::
+
+            :meth:`_sql.HasCTE.cte` - examples of calling styles
+
+        """
+
+        assert is_select_statement(
+            self.element
+        ), f"CTE element f{self.element} does not support union_all()"
+
+        return CTE._construct(
+            self.element.union_all(*other),
+            name=self.name,
+            recursive=self.recursive,
+            nesting=self.nesting,
+            _restates=self,
+            _prefixes=self._prefixes,
+            _suffixes=self._suffixes,
+        )
+
+    def _get_reference_cte(self) -> CTE:
+        """
+        A recursive CTE is updated to attach the recursive part.
+        Updated CTEs should still refer to the original CTE.
+        This function returns this reference identifier.
+        """
+        return self._restates if self._restates is not None else self
+
+
+class _CTEOpts(NamedTuple):
+    nesting: bool
+
+
+class _ColumnsPlusNames(NamedTuple):
+    required_label_name: Optional[str]
+    """
+    string label name, if non-None, must be rendered as a
+    label, i.e. "AS <name>"
+    """
+
+    proxy_key: Optional[str]
+    """
+    proxy_key that is to be part of the result map for this
+    col.  this is also the key in a fromclause.c or
+    select.selected_columns collection
+    """
+
+    fallback_label_name: Optional[str]
+    """
+    name that can be used to render an "AS <name>" when
+    we have to render a label even though
+    required_label_name was not given
+    """
+
+    column: Union[ColumnElement[Any], TextClause]
+    """
+    the ColumnElement itself
+    """
+
+    repeated: bool
+    """
+    True if this is a duplicate of a previous column
+    in the list of columns
+    """
+
+
+class SelectsRows(ReturnsRows):
+    """Sub-base of ReturnsRows for elements that deliver rows
+    directly, namely SELECT and INSERT/UPDATE/DELETE..RETURNING"""
+
+    _label_style: SelectLabelStyle = LABEL_STYLE_NONE
+
+    def _generate_columns_plus_names(
+        self,
+        anon_for_dupe_key: bool,
+        cols: Optional[_SelectIterable] = None,
+    ) -> List[_ColumnsPlusNames]:
+        """Generate column names as rendered in a SELECT statement by
+        the compiler.
+
+        This is distinct from the _column_naming_convention generator that's
+        intended for population of .c collections and similar, which has
+        different rules.   the collection returned here calls upon the
+        _column_naming_convention as well.
+
+        """
+
+        if cols is None:
+            cols = self._all_selected_columns
+
+        key_naming_convention = SelectState._column_naming_convention(
+            self._label_style
+        )
+
+        names = {}
+
+        result: List[_ColumnsPlusNames] = []
+        result_append = result.append
+
+        table_qualified = self._label_style is LABEL_STYLE_TABLENAME_PLUS_COL
+        label_style_none = self._label_style is LABEL_STYLE_NONE
+
+        # a counter used for "dedupe" labels, which have double underscores
+        # in them and are never referred by name; they only act
+        # as positional placeholders.  they need only be unique within
+        # the single columns clause they're rendered within (required by
+        # some dbs such as mysql).  So their anon identity is tracked against
+        # a fixed counter rather than hash() identity.
+        dedupe_hash = 1
+
+        for c in cols:
+            repeated = False
+
+            if not c._render_label_in_columns_clause:
+                effective_name = required_label_name = fallback_label_name = (
+                    None
+                )
+            elif label_style_none:
+                if TYPE_CHECKING:
+                    assert is_column_element(c)
+
+                effective_name = required_label_name = None
+                fallback_label_name = c._non_anon_label or c._anon_name_label
+            else:
+                if TYPE_CHECKING:
+                    assert is_column_element(c)
+
+                if table_qualified:
+                    required_label_name = effective_name = (
+                        fallback_label_name
+                    ) = c._tq_label
+                else:
+                    effective_name = fallback_label_name = c._non_anon_label
+                    required_label_name = None
+
+                if effective_name is None:
+                    # it seems like this could be _proxy_key and we would
+                    # not need _expression_label but it isn't
+                    # giving us a clue when to use anon_label instead
+                    expr_label = c._expression_label
+                    if expr_label is None:
+                        repeated = c._anon_name_label in names
+                        names[c._anon_name_label] = c
+                        effective_name = required_label_name = None
+
+                        if repeated:
+                            # here, "required_label_name" is sent as
+                            # "None" and "fallback_label_name" is sent.
+                            if table_qualified:
+                                fallback_label_name = (
+                                    c._dedupe_anon_tq_label_idx(dedupe_hash)
+                                )
+                                dedupe_hash += 1
+                            else:
+                                fallback_label_name = c._dedupe_anon_label_idx(
+                                    dedupe_hash
+                                )
+                                dedupe_hash += 1
+                        else:
+                            fallback_label_name = c._anon_name_label
+                    else:
+                        required_label_name = effective_name = (
+                            fallback_label_name
+                        ) = expr_label
+
+            if effective_name is not None:
+                if TYPE_CHECKING:
+                    assert is_column_element(c)
+
+                if effective_name in names:
+                    # when looking to see if names[name] is the same column as
+                    # c, use hash(), so that an annotated version of the column
+                    # is seen as the same as the non-annotated
+                    if hash(names[effective_name]) != hash(c):
+                        # different column under the same name.  apply
+                        # disambiguating label
+                        if table_qualified:
+                            required_label_name = fallback_label_name = (
+                                c._anon_tq_label
+                            )
+                        else:
+                            required_label_name = fallback_label_name = (
+                                c._anon_name_label
+                            )
+
+                        if anon_for_dupe_key and required_label_name in names:
+                            # here, c._anon_tq_label is definitely unique to
+                            # that column identity (or annotated version), so
+                            # this should always be true.
+                            # this is also an infrequent codepath because
+                            # you need two levels of duplication to be here
+                            assert hash(names[required_label_name]) == hash(c)
+
+                            # the column under the disambiguating label is
+                            # already present.  apply the "dedupe" label to
+                            # subsequent occurrences of the column so that the
+                            # original stays non-ambiguous
+                            if table_qualified:
+                                required_label_name = fallback_label_name = (
+                                    c._dedupe_anon_tq_label_idx(dedupe_hash)
+                                )
+                                dedupe_hash += 1
+                            else:
+                                required_label_name = fallback_label_name = (
+                                    c._dedupe_anon_label_idx(dedupe_hash)
+                                )
+                                dedupe_hash += 1
+                            repeated = True
+                        else:
+                            names[required_label_name] = c
+                    elif anon_for_dupe_key:
+                        # same column under the same name. apply the "dedupe"
+                        # label so that the original stays non-ambiguous
+                        if table_qualified:
+                            required_label_name = fallback_label_name = (
+                                c._dedupe_anon_tq_label_idx(dedupe_hash)
+                            )
+                            dedupe_hash += 1
+                        else:
+                            required_label_name = fallback_label_name = (
+                                c._dedupe_anon_label_idx(dedupe_hash)
+                            )
+                            dedupe_hash += 1
+                        repeated = True
+                else:
+                    names[effective_name] = c
+
+            result_append(
+                _ColumnsPlusNames(
+                    required_label_name,
+                    key_naming_convention(c),
+                    fallback_label_name,
+                    c,
+                    repeated,
+                )
+            )
+
+        return result
+
+
+class HasCTE(roles.HasCTERole, SelectsRows):
+    """Mixin that declares a class to include CTE support."""
+
+    _has_ctes_traverse_internals: _TraverseInternalsType = [
+        ("_independent_ctes", InternalTraversal.dp_clauseelement_list),
+        ("_independent_ctes_opts", InternalTraversal.dp_plain_obj),
+    ]
+
+    _independent_ctes: Tuple[CTE, ...] = ()
+    _independent_ctes_opts: Tuple[_CTEOpts, ...] = ()
+
+    @_generative
+    def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self:
+        r"""Add one or more :class:`_sql.CTE` constructs to this statement.
+
+        This method will associate the given :class:`_sql.CTE` constructs with
+        the parent statement such that they will each be unconditionally
+        rendered in the WITH clause of the final statement, even if not
+        referenced elsewhere within the statement or any sub-selects.
+
+        The optional :paramref:`.HasCTE.add_cte.nest_here` parameter when set
+        to True will have the effect that each given :class:`_sql.CTE` will
+        render in a WITH clause rendered directly along with this statement,
+        rather than being moved to the top of the ultimate rendered statement,
+        even if this statement is rendered as a subquery within a larger
+        statement.
+
+        This method has two general uses. One is to embed CTE statements that
+        serve some purpose without being referenced explicitly, such as the use
+        case of embedding a DML statement such as an INSERT or UPDATE as a CTE
+        inline with a primary statement that may draw from its results
+        indirectly.  The other is to provide control over the exact placement
+        of a particular series of CTE constructs that should remain rendered
+        directly in terms of a particular statement that may be nested in a
+        larger statement.
+
+        E.g.::
+
+            from sqlalchemy import table, column, select
+
+            t = table("t", column("c1"), column("c2"))
+
+            ins = t.insert().values({"c1": "x", "c2": "y"}).cte()
+
+            stmt = select(t).add_cte(ins)
+
+        Would render:
+
+        .. sourcecode:: sql
+
+            WITH anon_1 AS (
+                INSERT INTO t (c1, c2) VALUES (:param_1, :param_2)
+            )
+            SELECT t.c1, t.c2
+            FROM t
+
+        Above, the "anon_1" CTE is not referenced in the SELECT
+        statement, however still accomplishes the task of running an INSERT
+        statement.
+
+        Similarly in a DML-related context, using the PostgreSQL
+        :class:`_postgresql.Insert` construct to generate an "upsert"::
+
+            from sqlalchemy import table, column
+            from sqlalchemy.dialects.postgresql import insert
+
+            t = table("t", column("c1"), column("c2"))
+
+            delete_statement_cte = t.delete().where(t.c.c1 < 1).cte("deletions")
+
+            insert_stmt = insert(t).values({"c1": 1, "c2": 2})
+            update_statement = insert_stmt.on_conflict_do_update(
+                index_elements=[t.c.c1],
+                set_={
+                    "c1": insert_stmt.excluded.c1,
+                    "c2": insert_stmt.excluded.c2,
+                },
+            ).add_cte(delete_statement_cte)
+
+            print(update_statement)
+
+        The above statement renders as:
+
+        .. sourcecode:: sql
+
+            WITH deletions AS (
+                DELETE FROM t WHERE t.c1 < %(c1_1)s
+            )
+            INSERT INTO t (c1, c2) VALUES (%(c1)s, %(c2)s)
+            ON CONFLICT (c1) DO UPDATE SET c1 = excluded.c1, c2 = excluded.c2
+
+        .. versionadded:: 1.4.21
+
+        :param \*ctes: zero or more :class:`.CTE` constructs.
+
+         .. versionchanged:: 2.0  Multiple CTE instances are accepted
+
+        :param nest_here: if True, the given CTE or CTEs will be rendered
+         as though they specified the :paramref:`.HasCTE.cte.nesting` flag
+         to ``True`` when they were added to this :class:`.HasCTE`.
+         Assuming the given CTEs are not referenced in an outer-enclosing
+         statement as well, the CTEs given should render at the level of
+         this statement when this flag is given.
+
+         .. versionadded:: 2.0
+
+         .. seealso::
+
+            :paramref:`.HasCTE.cte.nesting`
+
+
+        """  # noqa: E501
+        opt = _CTEOpts(nest_here)
+        for cte in ctes:
+            cte = coercions.expect(roles.IsCTERole, cte)
+            self._independent_ctes += (cte,)
+            self._independent_ctes_opts += (opt,)
+        return self
+
+    def cte(
+        self,
+        name: Optional[str] = None,
+        recursive: bool = False,
+        nesting: bool = False,
+    ) -> CTE:
+        r"""Return a new :class:`_expression.CTE`,
+        or Common Table Expression instance.
+
+        Common table expressions are a SQL standard whereby SELECT
+        statements can draw upon secondary statements specified along
+        with the primary statement, using a clause called "WITH".
+        Special semantics regarding UNION can also be employed to
+        allow "recursive" queries, where a SELECT statement can draw
+        upon the set of rows that have previously been selected.
+
+        CTEs can also be applied to DML constructs UPDATE, INSERT
+        and DELETE on some databases, both as a source of CTE rows
+        when combined with RETURNING, as well as a consumer of
+        CTE rows.
+
+        SQLAlchemy detects :class:`_expression.CTE` objects, which are treated
+        similarly to :class:`_expression.Alias` objects, as special elements
+        to be delivered to the FROM clause of the statement as well
+        as to a WITH clause at the top of the statement.
+
+        For special prefixes such as PostgreSQL "MATERIALIZED" and
+        "NOT MATERIALIZED", the :meth:`_expression.CTE.prefix_with`
+        method may be
+        used to establish these.
+
+        .. versionchanged:: 1.3.13 Added support for prefixes.
+           In particular - MATERIALIZED and NOT MATERIALIZED.
+
+        :param name: name given to the common table expression.  Like
+         :meth:`_expression.FromClause.alias`, the name can be left as
+         ``None`` in which case an anonymous symbol will be used at query
+         compile time.
+        :param recursive: if ``True``, will render ``WITH RECURSIVE``.
+         A recursive common table expression is intended to be used in
+         conjunction with UNION ALL in order to derive rows
+         from those already selected.
+        :param nesting: if ``True``, will render the CTE locally to the
+         statement in which it is referenced.   For more complex scenarios,
+         the :meth:`.HasCTE.add_cte` method using the
+         :paramref:`.HasCTE.add_cte.nest_here`
+         parameter may also be used to more carefully
+         control the exact placement of a particular CTE.
+
+         .. versionadded:: 1.4.24
+
+         .. seealso::
+
+            :meth:`.HasCTE.add_cte`
+
+        The following examples include two from PostgreSQL's documentation at
+        https://www.postgresql.org/docs/current/static/queries-with.html,
+        as well as additional examples.
+
+        Example 1, non recursive::
+
+            from sqlalchemy import (
+                Table,
+                Column,
+                String,
+                Integer,
+                MetaData,
+                select,
+                func,
+            )
+
+            metadata = MetaData()
+
+            orders = Table(
+                "orders",
+                metadata,
+                Column("region", String),
+                Column("amount", Integer),
+                Column("product", String),
+                Column("quantity", Integer),
+            )
+
+            regional_sales = (
+                select(orders.c.region, func.sum(orders.c.amount).label("total_sales"))
+                .group_by(orders.c.region)
+                .cte("regional_sales")
+            )
+
+
+            top_regions = (
+                select(regional_sales.c.region)
+                .where(
+                    regional_sales.c.total_sales
+                    > select(func.sum(regional_sales.c.total_sales) / 10)
+                )
+                .cte("top_regions")
+            )
+
+            statement = (
+                select(
+                    orders.c.region,
+                    orders.c.product,
+                    func.sum(orders.c.quantity).label("product_units"),
+                    func.sum(orders.c.amount).label("product_sales"),
+                )
+                .where(orders.c.region.in_(select(top_regions.c.region)))
+                .group_by(orders.c.region, orders.c.product)
+            )
+
+            result = conn.execute(statement).fetchall()
+
+        Example 2, WITH RECURSIVE::
+
+            from sqlalchemy import (
+                Table,
+                Column,
+                String,
+                Integer,
+                MetaData,
+                select,
+                func,
+            )
+
+            metadata = MetaData()
+
+            parts = Table(
+                "parts",
+                metadata,
+                Column("part", String),
+                Column("sub_part", String),
+                Column("quantity", Integer),
+            )
+
+            included_parts = (
+                select(parts.c.sub_part, parts.c.part, parts.c.quantity)
+                .where(parts.c.part == "our part")
+                .cte(recursive=True)
+            )
+
+
+            incl_alias = included_parts.alias()
+            parts_alias = parts.alias()
+            included_parts = included_parts.union_all(
+                select(
+                    parts_alias.c.sub_part, parts_alias.c.part, parts_alias.c.quantity
+                ).where(parts_alias.c.part == incl_alias.c.sub_part)
+            )
+
+            statement = select(
+                included_parts.c.sub_part,
+                func.sum(included_parts.c.quantity).label("total_quantity"),
+            ).group_by(included_parts.c.sub_part)
+
+            result = conn.execute(statement).fetchall()
+
+        Example 3, an upsert using UPDATE and INSERT with CTEs::
+
+            from datetime import date
+            from sqlalchemy import (
+                MetaData,
+                Table,
+                Column,
+                Integer,
+                Date,
+                select,
+                literal,
+                and_,
+                exists,
+            )
+
+            metadata = MetaData()
+
+            visitors = Table(
+                "visitors",
+                metadata,
+                Column("product_id", Integer, primary_key=True),
+                Column("date", Date, primary_key=True),
+                Column("count", Integer),
+            )
+
+            # add 5 visitors for the product_id == 1
+            product_id = 1
+            day = date.today()
+            count = 5
+
+            update_cte = (
+                visitors.update()
+                .where(
+                    and_(visitors.c.product_id == product_id, visitors.c.date == day)
+                )
+                .values(count=visitors.c.count + count)
+                .returning(literal(1))
+                .cte("update_cte")
+            )
+
+            upsert = visitors.insert().from_select(
+                [visitors.c.product_id, visitors.c.date, visitors.c.count],
+                select(literal(product_id), literal(day), literal(count)).where(
+                    ~exists(update_cte.select())
+                ),
+            )
+
+            connection.execute(upsert)
+
+        Example 4, Nesting CTE (SQLAlchemy 1.4.24 and above)::
+
+            value_a = select(literal("root").label("n")).cte("value_a")
+
+            # A nested CTE with the same name as the root one
+            value_a_nested = select(literal("nesting").label("n")).cte(
+                "value_a", nesting=True
+            )
+
+            # Nesting CTEs takes ascendency locally
+            # over the CTEs at a higher level
+            value_b = select(value_a_nested.c.n).cte("value_b")
+
+            value_ab = select(value_a.c.n.label("a"), value_b.c.n.label("b"))
+
+        The above query will render the second CTE nested inside the first,
+        shown with inline parameters below as:
+
+        .. sourcecode:: sql
+
+            WITH
+                value_a AS
+                    (SELECT 'root' AS n),
+                value_b AS
+                    (WITH value_a AS
+                        (SELECT 'nesting' AS n)
+                    SELECT value_a.n AS n FROM value_a)
+            SELECT value_a.n AS a, value_b.n AS b
+            FROM value_a, value_b
+
+        The same CTE can be set up using the :meth:`.HasCTE.add_cte` method
+        as follows (SQLAlchemy 2.0 and above)::
+
+            value_a = select(literal("root").label("n")).cte("value_a")
+
+            # A nested CTE with the same name as the root one
+            value_a_nested = select(literal("nesting").label("n")).cte("value_a")
+
+            # Nesting CTEs takes ascendency locally
+            # over the CTEs at a higher level
+            value_b = (
+                select(value_a_nested.c.n)
+                .add_cte(value_a_nested, nest_here=True)
+                .cte("value_b")
+            )
+
+            value_ab = select(value_a.c.n.label("a"), value_b.c.n.label("b"))
+
+        Example 5, Non-Linear CTE (SQLAlchemy 1.4.28 and above)::
+
+            edge = Table(
+                "edge",
+                metadata,
+                Column("id", Integer, primary_key=True),
+                Column("left", Integer),
+                Column("right", Integer),
+            )
+
+            root_node = select(literal(1).label("node")).cte("nodes", recursive=True)
+
+            left_edge = select(edge.c.left).join(
+                root_node, edge.c.right == root_node.c.node
+            )
+            right_edge = select(edge.c.right).join(
+                root_node, edge.c.left == root_node.c.node
+            )
+
+            subgraph_cte = root_node.union(left_edge, right_edge)
+
+            subgraph = select(subgraph_cte)
+
+        The above query will render 2 UNIONs inside the recursive CTE:
+
+        .. sourcecode:: sql
+
+            WITH RECURSIVE nodes(node) AS (
+                    SELECT 1 AS node
+                UNION
+                    SELECT edge."left" AS "left"
+                    FROM edge JOIN nodes ON edge."right" = nodes.node
+                UNION
+                    SELECT edge."right" AS "right"
+                    FROM edge JOIN nodes ON edge."left" = nodes.node
+            )
+            SELECT nodes.node FROM nodes
+
+        .. seealso::
+
+            :meth:`_orm.Query.cte` - ORM version of
+            :meth:`_expression.HasCTE.cte`.
+
+        """  # noqa: E501
+        return CTE._construct(
+            self, name=name, recursive=recursive, nesting=nesting
+        )
+
+
+class Subquery(AliasedReturnsRows):
+    """Represent a subquery of a SELECT.
+
+    A :class:`.Subquery` is created by invoking the
+    :meth:`_expression.SelectBase.subquery` method, or for convenience the
+    :meth:`_expression.SelectBase.alias` method, on any
+    :class:`_expression.SelectBase` subclass
+    which includes :class:`_expression.Select`,
+    :class:`_expression.CompoundSelect`, and
+    :class:`_expression.TextualSelect`.  As rendered in a FROM clause,
+    it represents the
+    body of the SELECT statement inside of parenthesis, followed by the usual
+    "AS <somename>" that defines all "alias" objects.
+
+    The :class:`.Subquery` object is very similar to the
+    :class:`_expression.Alias`
+    object and can be used in an equivalent way.    The difference between
+    :class:`_expression.Alias` and :class:`.Subquery` is that
+    :class:`_expression.Alias` always
+    contains a :class:`_expression.FromClause` object whereas
+    :class:`.Subquery`
+    always contains a :class:`_expression.SelectBase` object.
+
+    .. versionadded:: 1.4 The :class:`.Subquery` class was added which now
+       serves the purpose of providing an aliased version of a SELECT
+       statement.
+
+    """
+
+    __visit_name__ = "subquery"
+
+    _is_subquery = True
+
+    inherit_cache = True
+
+    element: SelectBase
+
+    @classmethod
+    def _factory(
+        cls, selectable: SelectBase, name: Optional[str] = None
+    ) -> Subquery:
+        """Return a :class:`.Subquery` object."""
+
+        return coercions.expect(
+            roles.SelectStatementRole, selectable
+        ).subquery(name=name)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.Subquery.as_scalar` method, which was previously "
+        "``Alias.as_scalar()`` prior to version 1.4, is deprecated and "
+        "will be removed in a future release; Please use the "
+        ":meth:`_expression.Select.scalar_subquery` method of the "
+        ":func:`_expression.select` "
+        "construct before constructing a subquery object, or with the ORM "
+        "use the :meth:`_query.Query.scalar_subquery` method.",
+    )
+    def as_scalar(self) -> ScalarSelect[Any]:
+        return self.element.set_label_style(LABEL_STYLE_NONE).scalar_subquery()
+
+
+class FromGrouping(GroupedElement, FromClause):
+    """Represent a grouping of a FROM clause"""
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement)
+    ]
+
+    element: FromClause
+
+    def __init__(self, element: FromClause):
+        self.element = coercions.expect(roles.FromClauseRole, element)
+
+    @util.ro_non_memoized_property
+    def columns(
+        self,
+    ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        return self.element.columns
+
+    @util.ro_non_memoized_property
+    def c(self) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        return self.element.columns
+
+    @property
+    def primary_key(self) -> Iterable[NamedColumn[Any]]:
+        return self.element.primary_key
+
+    @property
+    def foreign_keys(self) -> Iterable[ForeignKey]:
+        return self.element.foreign_keys
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        return self.element.is_derived_from(fromclause)
+
+    def alias(
+        self, name: Optional[str] = None, flat: bool = False
+    ) -> NamedFromGrouping:
+        return NamedFromGrouping(self.element.alias(name=name, flat=flat))
+
+    def _anonymous_fromclause(self, **kw: Any) -> FromGrouping:
+        return FromGrouping(self.element._anonymous_fromclause(**kw))
+
+    @util.ro_non_memoized_property
+    def _hide_froms(self) -> Iterable[FromClause]:
+        return self.element._hide_froms
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.element._from_objects
+
+    def __getstate__(self) -> Dict[str, FromClause]:
+        return {"element": self.element}
+
+    def __setstate__(self, state: Dict[str, FromClause]) -> None:
+        self.element = state["element"]
+
+    if TYPE_CHECKING:
+
+        def self_group(
+            self, against: Optional[OperatorType] = None
+        ) -> Self: ...
+
+
+class NamedFromGrouping(FromGrouping, NamedFromClause):
+    """represent a grouping of a named FROM clause
+
+    .. versionadded:: 2.0
+
+    """
+
+    inherit_cache = True
+
+    if TYPE_CHECKING:
+
+        def self_group(
+            self, against: Optional[OperatorType] = None
+        ) -> Self: ...
+
+
+class TableClause(roles.DMLTableRole, Immutable, NamedFromClause):
+    """Represents a minimal "table" construct.
+
+    This is a lightweight table object that has only a name, a
+    collection of columns, which are typically produced
+    by the :func:`_expression.column` function, and a schema::
+
+        from sqlalchemy import table, column
+
+        user = table(
+            "user",
+            column("id"),
+            column("name"),
+            column("description"),
+        )
+
+    The :class:`_expression.TableClause` construct serves as the base for
+    the more commonly used :class:`_schema.Table` object, providing
+    the usual set of :class:`_expression.FromClause` services including
+    the ``.c.`` collection and statement generation methods.
+
+    It does **not** provide all the additional schema-level services
+    of :class:`_schema.Table`, including constraints, references to other
+    tables, or support for :class:`_schema.MetaData`-level services.
+    It's useful
+    on its own as an ad-hoc construct used to generate quick SQL
+    statements when a more fully fledged :class:`_schema.Table`
+    is not on hand.
+
+    """
+
+    __visit_name__ = "table"
+
+    _traverse_internals: _TraverseInternalsType = [
+        (
+            "columns",
+            InternalTraversal.dp_fromclause_canonical_column_collection,
+        ),
+        ("name", InternalTraversal.dp_string),
+        ("schema", InternalTraversal.dp_string),
+    ]
+
+    _is_table = True
+
+    fullname: str
+
+    implicit_returning = False
+    """:class:`_expression.TableClause`
+    doesn't support having a primary key or column
+    -level defaults, so implicit returning doesn't apply."""
+
+    @util.ro_memoized_property
+    def _autoincrement_column(self) -> Optional[ColumnClause[Any]]:
+        """No PK or default support so no autoincrement column."""
+        return None
+
+    def __init__(self, name: str, *columns: ColumnClause[Any], **kw: Any):
+        super().__init__()
+        self.name = name
+        self._columns = DedupeColumnCollection()
+        self.primary_key = ColumnSet()  # type: ignore
+        self.foreign_keys = set()  # type: ignore
+        for c in columns:
+            self.append_column(c)
+
+        schema = kw.pop("schema", None)
+        if schema is not None:
+            self.schema = schema
+        if self.schema is not None:
+            self.fullname = "%s.%s" % (self.schema, self.name)
+        else:
+            self.fullname = self.name
+        if kw:
+            raise exc.ArgumentError("Unsupported argument(s): %s" % list(kw))
+
+    if TYPE_CHECKING:
+
+        @util.ro_non_memoized_property
+        def columns(
+            self,
+        ) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: ...
+
+        @util.ro_non_memoized_property
+        def c(self) -> ReadOnlyColumnCollection[str, ColumnClause[Any]]: ...
+
+    def __str__(self) -> str:
+        if self.schema is not None:
+            return self.schema + "." + self.name
+        else:
+            return self.name
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        pass
+
+    @util.ro_memoized_property
+    def description(self) -> str:
+        return self.name
+
+    def append_column(self, c: ColumnClause[Any]) -> None:
+        existing = c.table
+        if existing is not None and existing is not self:
+            raise exc.ArgumentError(
+                "column object '%s' already assigned to table '%s'"
+                % (c.key, existing)
+            )
+
+        self._columns.add(c)
+        c.table = self
+
+    @util.preload_module("sqlalchemy.sql.dml")
+    def insert(self) -> util.preloaded.sql_dml.Insert:
+        """Generate an :class:`_sql.Insert` construct against this
+        :class:`_expression.TableClause`.
+
+        E.g.::
+
+            table.insert().values(name="foo")
+
+        See :func:`_expression.insert` for argument and usage information.
+
+        """
+
+        return util.preloaded.sql_dml.Insert(self)
+
+    @util.preload_module("sqlalchemy.sql.dml")
+    def update(self) -> Update:
+        """Generate an :func:`_expression.update` construct against this
+        :class:`_expression.TableClause`.
+
+        E.g.::
+
+            table.update().where(table.c.id == 7).values(name="foo")
+
+        See :func:`_expression.update` for argument and usage information.
+
+        """
+        return util.preloaded.sql_dml.Update(
+            self,
+        )
+
+    @util.preload_module("sqlalchemy.sql.dml")
+    def delete(self) -> Delete:
+        """Generate a :func:`_expression.delete` construct against this
+        :class:`_expression.TableClause`.
+
+        E.g.::
+
+            table.delete().where(table.c.id == 7)
+
+        See :func:`_expression.delete` for argument and usage information.
+
+        """
+        return util.preloaded.sql_dml.Delete(self)
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return [self]
+
+
+ForUpdateParameter = Union["ForUpdateArg", None, bool, Dict[str, Any]]
+
+
+class ForUpdateArg(ClauseElement):
+    _traverse_internals: _TraverseInternalsType = [
+        ("of", InternalTraversal.dp_clauseelement_list),
+        ("nowait", InternalTraversal.dp_boolean),
+        ("read", InternalTraversal.dp_boolean),
+        ("skip_locked", InternalTraversal.dp_boolean),
+        ("key_share", InternalTraversal.dp_boolean),
+    ]
+
+    of: Optional[Sequence[ClauseElement]]
+    nowait: bool
+    read: bool
+    skip_locked: bool
+
+    @classmethod
+    def _from_argument(
+        cls, with_for_update: ForUpdateParameter
+    ) -> Optional[ForUpdateArg]:
+        if isinstance(with_for_update, ForUpdateArg):
+            return with_for_update
+        elif with_for_update in (None, False):
+            return None
+        elif with_for_update is True:
+            return ForUpdateArg()
+        else:
+            return ForUpdateArg(**cast("Dict[str, Any]", with_for_update))
+
+    def __eq__(self, other: Any) -> bool:
+        return (
+            isinstance(other, ForUpdateArg)
+            and other.nowait == self.nowait
+            and other.read == self.read
+            and other.skip_locked == self.skip_locked
+            and other.key_share == self.key_share
+            and other.of is self.of
+        )
+
+    def __ne__(self, other: Any) -> bool:
+        return not self.__eq__(other)
+
+    def __hash__(self) -> int:
+        return id(self)
+
+    def __init__(
+        self,
+        *,
+        nowait: bool = False,
+        read: bool = False,
+        of: Optional[_ForUpdateOfArgument] = None,
+        skip_locked: bool = False,
+        key_share: bool = False,
+    ):
+        """Represents arguments specified to
+        :meth:`_expression.Select.for_update`.
+
+        """
+
+        self.nowait = nowait
+        self.read = read
+        self.skip_locked = skip_locked
+        self.key_share = key_share
+        if of is not None:
+            self.of = [
+                coercions.expect(roles.ColumnsClauseRole, elem)
+                for elem in util.to_list(of)
+            ]
+        else:
+            self.of = None
+
+
+class Values(roles.InElementRole, Generative, LateralFromClause):
+    """Represent a ``VALUES`` construct that can be used as a FROM element
+    in a statement.
+
+    The :class:`_expression.Values` object is created from the
+    :func:`_expression.values` function.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __visit_name__ = "values"
+
+    _data: Tuple[Sequence[Tuple[Any, ...]], ...] = ()
+
+    _unnamed: bool
+    _traverse_internals: _TraverseInternalsType = [
+        ("_column_args", InternalTraversal.dp_clauseelement_list),
+        ("_data", InternalTraversal.dp_dml_multi_values),
+        ("name", InternalTraversal.dp_string),
+        ("literal_binds", InternalTraversal.dp_boolean),
+    ]
+
+    def __init__(
+        self,
+        *columns: ColumnClause[Any],
+        name: Optional[str] = None,
+        literal_binds: bool = False,
+    ):
+        super().__init__()
+        self._column_args = columns
+
+        if name is None:
+            self._unnamed = True
+            self.name = _anonymous_label.safe_construct(id(self), "anon")
+        else:
+            self._unnamed = False
+            self.name = name
+        self.literal_binds = literal_binds
+        self.named_with_column = not self._unnamed
+
+    @property
+    def _column_types(self) -> List[TypeEngine[Any]]:
+        return [col.type for col in self._column_args]
+
+    @_generative
+    def alias(self, name: Optional[str] = None, flat: bool = False) -> Self:
+        """Return a new :class:`_expression.Values`
+        construct that is a copy of this
+        one with the given name.
+
+        This method is a VALUES-specific specialization of the
+        :meth:`_expression.FromClause.alias` method.
+
+        .. seealso::
+
+            :ref:`tutorial_using_aliases`
+
+            :func:`_expression.alias`
+
+        """
+        non_none_name: str
+
+        if name is None:
+            non_none_name = _anonymous_label.safe_construct(id(self), "anon")
+        else:
+            non_none_name = name
+
+        self.name = non_none_name
+        self.named_with_column = True
+        self._unnamed = False
+        return self
+
+    @_generative
+    def lateral(self, name: Optional[str] = None) -> LateralFromClause:
+        """Return a new :class:`_expression.Values` with the lateral flag set,
+        so that
+        it renders as LATERAL.
+
+        .. seealso::
+
+            :func:`_expression.lateral`
+
+        """
+        non_none_name: str
+
+        if name is None:
+            non_none_name = self.name
+        else:
+            non_none_name = name
+
+        self._is_lateral = True
+        self.name = non_none_name
+        self._unnamed = False
+        return self
+
+    @_generative
+    def data(self, values: Sequence[Tuple[Any, ...]]) -> Self:
+        """Return a new :class:`_expression.Values` construct,
+        adding the given data to the data list.
+
+        E.g.::
+
+            my_values = my_values.data([(1, "value 1"), (2, "value2")])
+
+        :param values: a sequence (i.e. list) of tuples that map to the
+         column expressions given in the :class:`_expression.Values`
+         constructor.
+
+        """
+
+        self._data += (values,)
+        return self
+
+    def scalar_values(self) -> ScalarValues:
+        """Returns a scalar ``VALUES`` construct that can be used as a
+        COLUMN element in a statement.
+
+        .. versionadded:: 2.0.0b4
+
+        """
+        return ScalarValues(self._column_args, self._data, self.literal_binds)
+
+    def _populate_column_collection(
+        self,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+    ) -> None:
+        for c in self._column_args:
+            if c.table is not None and c.table is not self:
+                _, c = c._make_proxy(
+                    self, primary_key=primary_key, foreign_keys=foreign_keys
+                )
+            else:
+                # if the column was used in other contexts, ensure
+                # no memoizations of other FROM clauses.
+                # see test_values.py -> test_auto_proxy_select_direct_col
+                c._reset_memoizations()
+            columns.add(c)
+            c.table = self
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return [self]
+
+
+class ScalarValues(roles.InElementRole, GroupedElement, ColumnElement[Any]):
+    """Represent a scalar ``VALUES`` construct that can be used as a
+    COLUMN element in a statement.
+
+    The :class:`_expression.ScalarValues` object is created from the
+    :meth:`_expression.Values.scalar_values` method. It's also
+    automatically generated when a :class:`_expression.Values` is used in
+    an ``IN`` or ``NOT IN`` condition.
+
+    .. versionadded:: 2.0.0b4
+
+    """
+
+    __visit_name__ = "scalar_values"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("_column_args", InternalTraversal.dp_clauseelement_list),
+        ("_data", InternalTraversal.dp_dml_multi_values),
+        ("literal_binds", InternalTraversal.dp_boolean),
+    ]
+
+    def __init__(
+        self,
+        columns: Sequence[ColumnClause[Any]],
+        data: Tuple[Sequence[Tuple[Any, ...]], ...],
+        literal_binds: bool,
+    ):
+        super().__init__()
+        self._column_args = columns
+        self._data = data
+        self.literal_binds = literal_binds
+
+    @property
+    def _column_types(self) -> List[TypeEngine[Any]]:
+        return [col.type for col in self._column_args]
+
+    def __clause_element__(self) -> ScalarValues:
+        return self
+
+    if TYPE_CHECKING:
+
+        def self_group(
+            self, against: Optional[OperatorType] = None
+        ) -> Self: ...
+
+
+class SelectBase(
+    roles.SelectStatementRole,
+    roles.DMLSelectRole,
+    roles.CompoundElementRole,
+    roles.InElementRole,
+    HasCTE,
+    SupportsCloneAnnotations,
+    Selectable,
+):
+    """Base class for SELECT statements.
+
+
+    This includes :class:`_expression.Select`,
+    :class:`_expression.CompoundSelect` and
+    :class:`_expression.TextualSelect`.
+
+
+    """
+
+    _is_select_base = True
+    is_select = True
+
+    _label_style: SelectLabelStyle = LABEL_STYLE_NONE
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        self._reset_memoizations()
+
+    @util.ro_non_memoized_property
+    def selected_columns(
+        self,
+    ) -> ColumnCollection[str, ColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        representing the columns that
+        this SELECT statement or similar construct returns in its result set.
+
+        This collection differs from the :attr:`_expression.FromClause.columns`
+        collection of a :class:`_expression.FromClause` in that the columns
+        within this collection cannot be directly nested inside another SELECT
+        statement; a subquery must be applied first which provides for the
+        necessary parenthesization required by SQL.
+
+        .. note::
+
+            The :attr:`_sql.SelectBase.selected_columns` collection does not
+            include expressions established in the columns clause using the
+            :func:`_sql.text` construct; these are silently omitted from the
+            collection. To use plain textual column expressions inside of a
+            :class:`_sql.Select` construct, use the :func:`_sql.literal_column`
+            construct.
+
+        .. seealso::
+
+            :attr:`_sql.Select.selected_columns`
+
+        .. versionadded:: 1.4
+
+        """
+        raise NotImplementedError()
+
+    def _generate_fromclause_column_proxies(
+        self,
+        subquery: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        *,
+        proxy_compound_columns: Optional[
+            Iterable[Sequence[ColumnElement[Any]]]
+        ] = None,
+    ) -> None:
+        raise NotImplementedError()
+
+    @util.ro_non_memoized_property
+    def _all_selected_columns(self) -> _SelectIterable:
+        """A sequence of expressions that correspond to what is rendered
+        in the columns clause, including :class:`_sql.TextClause`
+        constructs.
+
+        .. versionadded:: 1.4.12
+
+        .. seealso::
+
+            :attr:`_sql.SelectBase.exported_columns`
+
+        """
+        raise NotImplementedError()
+
+    @property
+    def exported_columns(
+        self,
+    ) -> ReadOnlyColumnCollection[str, ColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        that represents the "exported"
+        columns of this :class:`_expression.Selectable`, not including
+        :class:`_sql.TextClause` constructs.
+
+        The "exported" columns for a :class:`_expression.SelectBase`
+        object are synonymous
+        with the :attr:`_expression.SelectBase.selected_columns` collection.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_expression.Select.exported_columns`
+
+            :attr:`_expression.Selectable.exported_columns`
+
+            :attr:`_expression.FromClause.exported_columns`
+
+
+        """
+        return self.selected_columns.as_readonly()
+
+    @property
+    @util.deprecated(
+        "1.4",
+        "The :attr:`_expression.SelectBase.c` and "
+        ":attr:`_expression.SelectBase.columns` attributes "
+        "are deprecated and will be removed in a future release; these "
+        "attributes implicitly create a subquery that should be explicit.  "
+        "Please call :meth:`_expression.SelectBase.subquery` "
+        "first in order to create "
+        "a subquery, which then contains this attribute.  To access the "
+        "columns that this SELECT object SELECTs "
+        "from, use the :attr:`_expression.SelectBase.selected_columns` "
+        "attribute.",
+    )
+    def c(self) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        return self._implicit_subquery.columns
+
+    @property
+    def columns(
+        self,
+    ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        return self.c
+
+    def get_label_style(self) -> SelectLabelStyle:
+        """
+        Retrieve the current label style.
+
+        Implemented by subclasses.
+
+        """
+        raise NotImplementedError()
+
+    def set_label_style(self, style: SelectLabelStyle) -> Self:
+        """Return a new selectable with the specified label style.
+
+        Implemented by subclasses.
+
+        """
+
+        raise NotImplementedError()
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_expression.SelectBase.select` method is deprecated "
+        "and will be removed in a future release; this method implicitly "
+        "creates a subquery that should be explicit.  "
+        "Please call :meth:`_expression.SelectBase.subquery` "
+        "first in order to create "
+        "a subquery, which then can be selected.",
+    )
+    def select(self, *arg: Any, **kw: Any) -> Select[Any]:
+        return self._implicit_subquery.select(*arg, **kw)
+
+    @HasMemoized.memoized_attribute
+    def _implicit_subquery(self) -> Subquery:
+        return self.subquery()
+
+    def _scalar_type(self) -> TypeEngine[Any]:
+        raise NotImplementedError()
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_expression.SelectBase.as_scalar` "
+        "method is deprecated and will be "
+        "removed in a future release.  Please refer to "
+        ":meth:`_expression.SelectBase.scalar_subquery`.",
+    )
+    def as_scalar(self) -> ScalarSelect[Any]:
+        return self.scalar_subquery()
+
+    def exists(self) -> Exists:
+        """Return an :class:`_sql.Exists` representation of this selectable,
+        which can be used as a column expression.
+
+        The returned object is an instance of :class:`_sql.Exists`.
+
+        .. seealso::
+
+            :func:`_sql.exists`
+
+            :ref:`tutorial_exists` - in the :term:`2.0 style` tutorial.
+
+        .. versionadded:: 1.4
+
+        """
+        return Exists(self)
+
+    def scalar_subquery(self) -> ScalarSelect[Any]:
+        """Return a 'scalar' representation of this selectable, which can be
+        used as a column expression.
+
+        The returned object is an instance of :class:`_sql.ScalarSelect`.
+
+        Typically, a select statement which has only one column in its columns
+        clause is eligible to be used as a scalar expression.  The scalar
+        subquery can then be used in the WHERE clause or columns clause of
+        an enclosing SELECT.
+
+        Note that the scalar subquery differentiates from the FROM-level
+        subquery that can be produced using the
+        :meth:`_expression.SelectBase.subquery`
+        method.
+
+        .. versionchanged: 1.4 - the ``.as_scalar()`` method was renamed to
+           :meth:`_expression.SelectBase.scalar_subquery`.
+
+        .. seealso::
+
+            :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+        """
+        if self._label_style is not LABEL_STYLE_NONE:
+            self = self.set_label_style(LABEL_STYLE_NONE)
+
+        return ScalarSelect(self)
+
+    def label(self, name: Optional[str]) -> Label[Any]:
+        """Return a 'scalar' representation of this selectable, embedded as a
+        subquery with a label.
+
+        .. seealso::
+
+            :meth:`_expression.SelectBase.scalar_subquery`.
+
+        """
+        return self.scalar_subquery().label(name)
+
+    def lateral(self, name: Optional[str] = None) -> LateralFromClause:
+        """Return a LATERAL alias of this :class:`_expression.Selectable`.
+
+        The return value is the :class:`_expression.Lateral` construct also
+        provided by the top-level :func:`_expression.lateral` function.
+
+        .. seealso::
+
+            :ref:`tutorial_lateral_correlation` -  overview of usage.
+
+        """
+        return Lateral._factory(self, name)
+
+    def subquery(self, name: Optional[str] = None) -> Subquery:
+        """Return a subquery of this :class:`_expression.SelectBase`.
+
+        A subquery is from a SQL perspective a parenthesized, named
+        construct that can be placed in the FROM clause of another
+        SELECT statement.
+
+        Given a SELECT statement such as::
+
+            stmt = select(table.c.id, table.c.name)
+
+        The above statement might look like:
+
+        .. sourcecode:: sql
+
+            SELECT table.id, table.name FROM table
+
+        The subquery form by itself renders the same way, however when
+        embedded into the FROM clause of another SELECT statement, it becomes
+        a named sub-element::
+
+            subq = stmt.subquery()
+            new_stmt = select(subq)
+
+        The above renders as:
+
+        .. sourcecode:: sql
+
+            SELECT anon_1.id, anon_1.name
+            FROM (SELECT table.id, table.name FROM table) AS anon_1
+
+        Historically, :meth:`_expression.SelectBase.subquery`
+        is equivalent to calling
+        the :meth:`_expression.FromClause.alias`
+        method on a FROM object; however,
+        as a :class:`_expression.SelectBase`
+        object is not directly  FROM object,
+        the :meth:`_expression.SelectBase.subquery`
+        method provides clearer semantics.
+
+        .. versionadded:: 1.4
+
+        """
+
+        return Subquery._construct(
+            self._ensure_disambiguated_names(), name=name
+        )
+
+    def _ensure_disambiguated_names(self) -> Self:
+        """Ensure that the names generated by this selectbase will be
+        disambiguated in some way, if possible.
+
+        """
+
+        raise NotImplementedError()
+
+    def alias(
+        self, name: Optional[str] = None, flat: bool = False
+    ) -> Subquery:
+        """Return a named subquery against this
+        :class:`_expression.SelectBase`.
+
+        For a :class:`_expression.SelectBase` (as opposed to a
+        :class:`_expression.FromClause`),
+        this returns a :class:`.Subquery` object which behaves mostly the
+        same as the :class:`_expression.Alias` object that is used with a
+        :class:`_expression.FromClause`.
+
+        .. versionchanged:: 1.4 The :meth:`_expression.SelectBase.alias`
+           method is now
+           a synonym for the :meth:`_expression.SelectBase.subquery` method.
+
+        """
+        return self.subquery(name=name)
+
+
+_SB = TypeVar("_SB", bound=SelectBase)
+
+
+class SelectStatementGrouping(GroupedElement, SelectBase, Generic[_SB]):
+    """Represent a grouping of a :class:`_expression.SelectBase`.
+
+    This differs from :class:`.Subquery` in that we are still
+    an "inner" SELECT statement, this is strictly for grouping inside of
+    compound selects.
+
+    """
+
+    __visit_name__ = "select_statement_grouping"
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement)
+    ] + SupportsCloneAnnotations._clone_annotations_traverse_internals
+
+    _is_select_container = True
+
+    element: _SB
+
+    def __init__(self, element: _SB) -> None:
+        self.element = cast(
+            _SB, coercions.expect(roles.SelectStatementRole, element)
+        )
+
+    def _ensure_disambiguated_names(self) -> SelectStatementGrouping[_SB]:
+        new_element = self.element._ensure_disambiguated_names()
+        if new_element is not self.element:
+            return SelectStatementGrouping(new_element)
+        else:
+            return self
+
+    def get_label_style(self) -> SelectLabelStyle:
+        return self.element.get_label_style()
+
+    def set_label_style(
+        self, label_style: SelectLabelStyle
+    ) -> SelectStatementGrouping[_SB]:
+        return SelectStatementGrouping(
+            self.element.set_label_style(label_style)
+        )
+
+    @property
+    def select_statement(self) -> _SB:
+        return self.element
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Self:
+        return self
+
+    if TYPE_CHECKING:
+
+        def _ungroup(self) -> _SB: ...
+
+    # def _generate_columns_plus_names(
+    #    self, anon_for_dupe_key: bool
+    # ) -> List[Tuple[str, str, str, ColumnElement[Any], bool]]:
+    #    return self.element._generate_columns_plus_names(anon_for_dupe_key)
+
+    def _generate_fromclause_column_proxies(
+        self,
+        subquery: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        *,
+        proxy_compound_columns: Optional[
+            Iterable[Sequence[ColumnElement[Any]]]
+        ] = None,
+    ) -> None:
+        self.element._generate_fromclause_column_proxies(
+            subquery,
+            columns,
+            proxy_compound_columns=proxy_compound_columns,
+            primary_key=primary_key,
+            foreign_keys=foreign_keys,
+        )
+
+    @util.ro_non_memoized_property
+    def _all_selected_columns(self) -> _SelectIterable:
+        return self.element._all_selected_columns
+
+    @util.ro_non_memoized_property
+    def selected_columns(self) -> ColumnCollection[str, ColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        representing the columns that
+        the embedded SELECT statement returns in its result set, not including
+        :class:`_sql.TextClause` constructs.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_sql.Select.selected_columns`
+
+        """
+        return self.element.selected_columns
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return self.element._from_objects
+
+    def add_cte(self, *ctes: CTE, nest_here: bool = False) -> Self:
+        # SelectStatementGrouping not generative: has no attribute '_generate'
+        raise NotImplementedError
+
+
+class GenerativeSelect(SelectBase, Generative):
+    """Base class for SELECT statements where additional elements can be
+    added.
+
+    This serves as the base for :class:`_expression.Select` and
+    :class:`_expression.CompoundSelect`
+    where elements such as ORDER BY, GROUP BY can be added and column
+    rendering can be controlled.  Compare to
+    :class:`_expression.TextualSelect`, which,
+    while it subclasses :class:`_expression.SelectBase`
+    and is also a SELECT construct,
+    represents a fixed textual string which cannot be altered at this level,
+    only wrapped as a subquery.
+
+    """
+
+    _order_by_clauses: Tuple[ColumnElement[Any], ...] = ()
+    _group_by_clauses: Tuple[ColumnElement[Any], ...] = ()
+    _limit_clause: Optional[ColumnElement[Any]] = None
+    _offset_clause: Optional[ColumnElement[Any]] = None
+    _fetch_clause: Optional[ColumnElement[Any]] = None
+    _fetch_clause_options: Optional[Dict[str, bool]] = None
+    _for_update_arg: Optional[ForUpdateArg] = None
+
+    def __init__(self, _label_style: SelectLabelStyle = LABEL_STYLE_DEFAULT):
+        self._label_style = _label_style
+
+    @_generative
+    def with_for_update(
+        self,
+        *,
+        nowait: bool = False,
+        read: bool = False,
+        of: Optional[_ForUpdateOfArgument] = None,
+        skip_locked: bool = False,
+        key_share: bool = False,
+    ) -> Self:
+        """Specify a ``FOR UPDATE`` clause for this
+        :class:`_expression.GenerativeSelect`.
+
+        E.g.::
+
+            stmt = select(table).with_for_update(nowait=True)
+
+        On a database like PostgreSQL or Oracle Database, the above would
+        render a statement like:
+
+        .. sourcecode:: sql
+
+            SELECT table.a, table.b FROM table FOR UPDATE NOWAIT
+
+        on other backends, the ``nowait`` option is ignored and instead
+        would produce:
+
+        .. sourcecode:: sql
+
+            SELECT table.a, table.b FROM table FOR UPDATE
+
+        When called with no arguments, the statement will render with
+        the suffix ``FOR UPDATE``.   Additional arguments can then be
+        provided which allow for common database-specific
+        variants.
+
+        :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle
+         Database and PostgreSQL dialects.
+
+        :param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL,
+         ``FOR SHARE`` on PostgreSQL.  On PostgreSQL, when combined with
+         ``nowait``, will render ``FOR SHARE NOWAIT``.
+
+        :param of: SQL expression or list of SQL expression elements,
+         (typically :class:`_schema.Column` objects or a compatible expression,
+         for some backends may also be a table expression) which will render
+         into a ``FOR UPDATE OF`` clause; supported by PostgreSQL, Oracle
+         Database, some MySQL versions and possibly others. May render as a
+         table or as a column depending on backend.
+
+        :param skip_locked: boolean, will render ``FOR UPDATE SKIP LOCKED`` on
+         Oracle Database and PostgreSQL dialects or ``FOR SHARE SKIP LOCKED``
+         if ``read=True`` is also specified.
+
+        :param key_share: boolean, will render ``FOR NO KEY UPDATE``,
+         or if combined with ``read=True`` will render ``FOR KEY SHARE``,
+         on the PostgreSQL dialect.
+
+        """
+        self._for_update_arg = ForUpdateArg(
+            nowait=nowait,
+            read=read,
+            of=of,
+            skip_locked=skip_locked,
+            key_share=key_share,
+        )
+        return self
+
+    def get_label_style(self) -> SelectLabelStyle:
+        """
+        Retrieve the current label style.
+
+        .. versionadded:: 1.4
+
+        """
+        return self._label_style
+
+    def set_label_style(self, style: SelectLabelStyle) -> Self:
+        """Return a new selectable with the specified label style.
+
+        There are three "label styles" available,
+        :attr:`_sql.SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY`,
+        :attr:`_sql.SelectLabelStyle.LABEL_STYLE_TABLENAME_PLUS_COL`, and
+        :attr:`_sql.SelectLabelStyle.LABEL_STYLE_NONE`.   The default style is
+        :attr:`_sql.SelectLabelStyle.LABEL_STYLE_DISAMBIGUATE_ONLY`.
+
+        In modern SQLAlchemy, there is not generally a need to change the
+        labeling style, as per-expression labels are more effectively used by
+        making use of the :meth:`_sql.ColumnElement.label` method. In past
+        versions, :data:`_sql.LABEL_STYLE_TABLENAME_PLUS_COL` was used to
+        disambiguate same-named columns from different tables, aliases, or
+        subqueries; the newer :data:`_sql.LABEL_STYLE_DISAMBIGUATE_ONLY` now
+        applies labels only to names that conflict with an existing name so
+        that the impact of this labeling is minimal.
+
+        The rationale for disambiguation is mostly so that all column
+        expressions are available from a given :attr:`_sql.FromClause.c`
+        collection when a subquery is created.
+
+        .. versionadded:: 1.4 - the
+            :meth:`_sql.GenerativeSelect.set_label_style` method replaces the
+            previous combination of ``.apply_labels()``, ``.with_labels()`` and
+            ``use_labels=True`` methods and/or parameters.
+
+        .. seealso::
+
+            :data:`_sql.LABEL_STYLE_DISAMBIGUATE_ONLY`
+
+            :data:`_sql.LABEL_STYLE_TABLENAME_PLUS_COL`
+
+            :data:`_sql.LABEL_STYLE_NONE`
+
+            :data:`_sql.LABEL_STYLE_DEFAULT`
+
+        """
+        if self._label_style is not style:
+            self = self._generate()
+            self._label_style = style
+        return self
+
+    @property
+    def _group_by_clause(self) -> ClauseList:
+        """ClauseList access to group_by_clauses for legacy dialects"""
+        return ClauseList._construct_raw(
+            operators.comma_op, self._group_by_clauses
+        )
+
+    @property
+    def _order_by_clause(self) -> ClauseList:
+        """ClauseList access to order_by_clauses for legacy dialects"""
+        return ClauseList._construct_raw(
+            operators.comma_op, self._order_by_clauses
+        )
+
+    def _offset_or_limit_clause(
+        self,
+        element: _LimitOffsetType,
+        name: Optional[str] = None,
+        type_: Optional[_TypeEngineArgument[int]] = None,
+    ) -> ColumnElement[Any]:
+        """Convert the given value to an "offset or limit" clause.
+
+        This handles incoming integers and converts to an expression; if
+        an expression is already given, it is passed through.
+
+        """
+        return coercions.expect(
+            roles.LimitOffsetRole, element, name=name, type_=type_
+        )
+
+    @overload
+    def _offset_or_limit_clause_asint(
+        self, clause: ColumnElement[Any], attrname: str
+    ) -> NoReturn: ...
+
+    @overload
+    def _offset_or_limit_clause_asint(
+        self, clause: Optional[_OffsetLimitParam], attrname: str
+    ) -> Optional[int]: ...
+
+    def _offset_or_limit_clause_asint(
+        self, clause: Optional[ColumnElement[Any]], attrname: str
+    ) -> Union[NoReturn, Optional[int]]:
+        """Convert the "offset or limit" clause of a select construct to an
+        integer.
+
+        This is only possible if the value is stored as a simple bound
+        parameter. Otherwise, a compilation error is raised.
+
+        """
+        if clause is None:
+            return None
+        try:
+            value = clause._limit_offset_value
+        except AttributeError as err:
+            raise exc.CompileError(
+                "This SELECT structure does not use a simple "
+                "integer value for %s" % attrname
+            ) from err
+        else:
+            return util.asint(value)
+
+    @property
+    def _limit(self) -> Optional[int]:
+        """Get an integer value for the limit.  This should only be used
+        by code that cannot support a limit as a BindParameter or
+        other custom clause as it will throw an exception if the limit
+        isn't currently set to an integer.
+
+        """
+        return self._offset_or_limit_clause_asint(self._limit_clause, "limit")
+
+    def _simple_int_clause(self, clause: ClauseElement) -> bool:
+        """True if the clause is a simple integer, False
+        if it is not present or is a SQL expression.
+        """
+        return isinstance(clause, _OffsetLimitParam)
+
+    @property
+    def _offset(self) -> Optional[int]:
+        """Get an integer value for the offset.  This should only be used
+        by code that cannot support an offset as a BindParameter or
+        other custom clause as it will throw an exception if the
+        offset isn't currently set to an integer.
+
+        """
+        return self._offset_or_limit_clause_asint(
+            self._offset_clause, "offset"
+        )
+
+    @property
+    def _has_row_limiting_clause(self) -> bool:
+        return (
+            self._limit_clause is not None
+            or self._offset_clause is not None
+            or self._fetch_clause is not None
+        )
+
+    @_generative
+    def limit(self, limit: _LimitOffsetType) -> Self:
+        """Return a new selectable with the given LIMIT criterion
+        applied.
+
+        This is a numerical value which usually renders as a ``LIMIT``
+        expression in the resulting select.  Backends that don't
+        support ``LIMIT`` will attempt to provide similar
+        functionality.
+
+        .. note::
+
+           The :meth:`_sql.GenerativeSelect.limit` method will replace
+           any clause applied with :meth:`_sql.GenerativeSelect.fetch`.
+
+        :param limit: an integer LIMIT parameter, or a SQL expression
+         that provides an integer result. Pass ``None`` to reset it.
+
+        .. seealso::
+
+           :meth:`_sql.GenerativeSelect.fetch`
+
+           :meth:`_sql.GenerativeSelect.offset`
+
+        """
+
+        self._fetch_clause = self._fetch_clause_options = None
+        self._limit_clause = self._offset_or_limit_clause(limit)
+        return self
+
+    @_generative
+    def fetch(
+        self,
+        count: _LimitOffsetType,
+        with_ties: bool = False,
+        percent: bool = False,
+    ) -> Self:
+        """Return a new selectable with the given FETCH FIRST criterion
+        applied.
+
+        This is a numeric value which usually renders as ``FETCH {FIRST | NEXT}
+        [ count ] {ROW | ROWS} {ONLY | WITH TIES}`` expression in the resulting
+        select. This functionality is is currently implemented for Oracle
+        Database, PostgreSQL, MSSQL.
+
+        Use :meth:`_sql.GenerativeSelect.offset` to specify the offset.
+
+        .. note::
+
+           The :meth:`_sql.GenerativeSelect.fetch` method will replace
+           any clause applied with :meth:`_sql.GenerativeSelect.limit`.
+
+        .. versionadded:: 1.4
+
+        :param count: an integer COUNT parameter, or a SQL expression
+         that provides an integer result. When ``percent=True`` this will
+         represent the percentage of rows to return, not the absolute value.
+         Pass ``None`` to reset it.
+
+        :param with_ties: When ``True``, the WITH TIES option is used
+         to return any additional rows that tie for the last place in the
+         result set according to the ``ORDER BY`` clause. The
+         ``ORDER BY`` may be mandatory in this case. Defaults to ``False``
+
+        :param percent: When ``True``, ``count`` represents the percentage
+         of the total number of selected rows to return. Defaults to ``False``
+
+        .. seealso::
+
+           :meth:`_sql.GenerativeSelect.limit`
+
+           :meth:`_sql.GenerativeSelect.offset`
+
+        """
+
+        self._limit_clause = None
+        if count is None:
+            self._fetch_clause = self._fetch_clause_options = None
+        else:
+            self._fetch_clause = self._offset_or_limit_clause(count)
+            self._fetch_clause_options = {
+                "with_ties": with_ties,
+                "percent": percent,
+            }
+        return self
+
+    @_generative
+    def offset(self, offset: _LimitOffsetType) -> Self:
+        """Return a new selectable with the given OFFSET criterion
+        applied.
+
+
+        This is a numeric value which usually renders as an ``OFFSET``
+        expression in the resulting select.  Backends that don't
+        support ``OFFSET`` will attempt to provide similar
+        functionality.
+
+        :param offset: an integer OFFSET parameter, or a SQL expression
+         that provides an integer result. Pass ``None`` to reset it.
+
+        .. seealso::
+
+           :meth:`_sql.GenerativeSelect.limit`
+
+           :meth:`_sql.GenerativeSelect.fetch`
+
+        """
+
+        self._offset_clause = self._offset_or_limit_clause(offset)
+        return self
+
+    @_generative
+    @util.preload_module("sqlalchemy.sql.util")
+    def slice(
+        self,
+        start: int,
+        stop: int,
+    ) -> Self:
+        """Apply LIMIT / OFFSET to this statement based on a slice.
+
+        The start and stop indices behave like the argument to Python's
+        built-in :func:`range` function. This method provides an
+        alternative to using ``LIMIT``/``OFFSET`` to get a slice of the
+        query.
+
+        For example, ::
+
+            stmt = select(User).order_by(User.id).slice(1, 3)
+
+        renders as
+
+        .. sourcecode:: sql
+
+           SELECT users.id AS users_id,
+                  users.name AS users_name
+           FROM users ORDER BY users.id
+           LIMIT ? OFFSET ?
+           (2, 1)
+
+        .. note::
+
+           The :meth:`_sql.GenerativeSelect.slice` method will replace
+           any clause applied with :meth:`_sql.GenerativeSelect.fetch`.
+
+        .. versionadded:: 1.4  Added the :meth:`_sql.GenerativeSelect.slice`
+           method generalized from the ORM.
+
+        .. seealso::
+
+           :meth:`_sql.GenerativeSelect.limit`
+
+           :meth:`_sql.GenerativeSelect.offset`
+
+           :meth:`_sql.GenerativeSelect.fetch`
+
+        """
+        sql_util = util.preloaded.sql_util
+        self._fetch_clause = self._fetch_clause_options = None
+        self._limit_clause, self._offset_clause = sql_util._make_slice(
+            self._limit_clause, self._offset_clause, start, stop
+        )
+        return self
+
+    @_generative
+    def order_by(
+        self,
+        __first: Union[
+            Literal[None, _NoArg.NO_ARG],
+            _ColumnExpressionOrStrLabelArgument[Any],
+        ] = _NoArg.NO_ARG,
+        *clauses: _ColumnExpressionOrStrLabelArgument[Any],
+    ) -> Self:
+        r"""Return a new selectable with the given list of ORDER BY
+        criteria applied.
+
+        e.g.::
+
+            stmt = select(table).order_by(table.c.id, table.c.name)
+
+        Calling this method multiple times is equivalent to calling it once
+        with all the clauses concatenated. All existing ORDER BY criteria may
+        be cancelled by passing ``None`` by itself.  New ORDER BY criteria may
+        then be added by invoking :meth:`_orm.Query.order_by` again, e.g.::
+
+            # will erase all ORDER BY and ORDER BY new_col alone
+            stmt = stmt.order_by(None).order_by(new_col)
+
+        :param \*clauses: a series of :class:`_expression.ColumnElement`
+         constructs
+         which will be used to generate an ORDER BY clause.
+
+        .. seealso::
+
+            :ref:`tutorial_order_by` - in the :ref:`unified_tutorial`
+
+            :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial`
+
+        """
+
+        if not clauses and __first is None:
+            self._order_by_clauses = ()
+        elif __first is not _NoArg.NO_ARG:
+            self._order_by_clauses += tuple(
+                coercions.expect(
+                    roles.OrderByRole, clause, apply_propagate_attrs=self
+                )
+                for clause in (__first,) + clauses
+            )
+        return self
+
+    @_generative
+    def group_by(
+        self,
+        __first: Union[
+            Literal[None, _NoArg.NO_ARG],
+            _ColumnExpressionOrStrLabelArgument[Any],
+        ] = _NoArg.NO_ARG,
+        *clauses: _ColumnExpressionOrStrLabelArgument[Any],
+    ) -> Self:
+        r"""Return a new selectable with the given list of GROUP BY
+        criterion applied.
+
+        All existing GROUP BY settings can be suppressed by passing ``None``.
+
+        e.g.::
+
+            stmt = select(table.c.name, func.max(table.c.stat)).group_by(table.c.name)
+
+        :param \*clauses: a series of :class:`_expression.ColumnElement`
+         constructs
+         which will be used to generate an GROUP BY clause.
+
+        .. seealso::
+
+            :ref:`tutorial_group_by_w_aggregates` - in the
+            :ref:`unified_tutorial`
+
+            :ref:`tutorial_order_by_label` - in the :ref:`unified_tutorial`
+
+        """  # noqa: E501
+
+        if not clauses and __first is None:
+            self._group_by_clauses = ()
+        elif __first is not _NoArg.NO_ARG:
+            self._group_by_clauses += tuple(
+                coercions.expect(
+                    roles.GroupByRole, clause, apply_propagate_attrs=self
+                )
+                for clause in (__first,) + clauses
+            )
+        return self
+
+
+@CompileState.plugin_for("default", "compound_select")
+class CompoundSelectState(CompileState):
+    @util.memoized_property
+    def _label_resolve_dict(
+        self,
+    ) -> Tuple[
+        Dict[str, ColumnElement[Any]],
+        Dict[str, ColumnElement[Any]],
+        Dict[str, ColumnElement[Any]],
+    ]:
+        # TODO: this is hacky and slow
+        hacky_subquery = self.statement.subquery()
+        hacky_subquery.named_with_column = False
+        d = {c.key: c for c in hacky_subquery.c}
+        return d, d, d
+
+
+class _CompoundSelectKeyword(Enum):
+    UNION = "UNION"
+    UNION_ALL = "UNION ALL"
+    EXCEPT = "EXCEPT"
+    EXCEPT_ALL = "EXCEPT ALL"
+    INTERSECT = "INTERSECT"
+    INTERSECT_ALL = "INTERSECT ALL"
+
+
+class CompoundSelect(HasCompileState, GenerativeSelect, TypedReturnsRows[_TP]):
+    """Forms the basis of ``UNION``, ``UNION ALL``, and other
+    SELECT-based set operations.
+
+
+    .. seealso::
+
+        :func:`_expression.union`
+
+        :func:`_expression.union_all`
+
+        :func:`_expression.intersect`
+
+        :func:`_expression.intersect_all`
+
+        :func:`_expression.except`
+
+        :func:`_expression.except_all`
+
+    """
+
+    __visit_name__ = "compound_select"
+
+    _traverse_internals: _TraverseInternalsType = (
+        [
+            ("selects", InternalTraversal.dp_clauseelement_list),
+            ("_limit_clause", InternalTraversal.dp_clauseelement),
+            ("_offset_clause", InternalTraversal.dp_clauseelement),
+            ("_fetch_clause", InternalTraversal.dp_clauseelement),
+            ("_fetch_clause_options", InternalTraversal.dp_plain_dict),
+            ("_order_by_clauses", InternalTraversal.dp_clauseelement_list),
+            ("_group_by_clauses", InternalTraversal.dp_clauseelement_list),
+            ("_for_update_arg", InternalTraversal.dp_clauseelement),
+            ("keyword", InternalTraversal.dp_string),
+        ]
+        + SupportsCloneAnnotations._clone_annotations_traverse_internals
+        + HasCTE._has_ctes_traverse_internals
+    )
+
+    selects: List[SelectBase]
+
+    _is_from_container = True
+    _auto_correlate = False
+
+    def __init__(
+        self,
+        keyword: _CompoundSelectKeyword,
+        *selects: _SelectStatementForCompoundArgument[_TP],
+    ):
+        self.keyword = keyword
+        self.selects = [
+            coercions.expect(
+                roles.CompoundElementRole, s, apply_propagate_attrs=self
+            ).self_group(against=self)
+            for s in selects
+        ]
+
+        GenerativeSelect.__init__(self)
+
+    @classmethod
+    def _create_union(
+        cls, *selects: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        return CompoundSelect(_CompoundSelectKeyword.UNION, *selects)
+
+    @classmethod
+    def _create_union_all(
+        cls, *selects: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        return CompoundSelect(_CompoundSelectKeyword.UNION_ALL, *selects)
+
+    @classmethod
+    def _create_except(
+        cls, *selects: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        return CompoundSelect(_CompoundSelectKeyword.EXCEPT, *selects)
+
+    @classmethod
+    def _create_except_all(
+        cls, *selects: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        return CompoundSelect(_CompoundSelectKeyword.EXCEPT_ALL, *selects)
+
+    @classmethod
+    def _create_intersect(
+        cls, *selects: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        return CompoundSelect(_CompoundSelectKeyword.INTERSECT, *selects)
+
+    @classmethod
+    def _create_intersect_all(
+        cls, *selects: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        return CompoundSelect(_CompoundSelectKeyword.INTERSECT_ALL, *selects)
+
+    def _scalar_type(self) -> TypeEngine[Any]:
+        return self.selects[0]._scalar_type()
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> GroupedElement:
+        return SelectStatementGrouping(self)
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        for s in self.selects:
+            if s.is_derived_from(fromclause):
+                return True
+        return False
+
+    def set_label_style(self, style: SelectLabelStyle) -> Self:
+        if self._label_style is not style:
+            self = self._generate()
+            select_0 = self.selects[0].set_label_style(style)
+            self.selects = [select_0] + self.selects[1:]
+
+        return self
+
+    def _ensure_disambiguated_names(self) -> Self:
+        new_select = self.selects[0]._ensure_disambiguated_names()
+        if new_select is not self.selects[0]:
+            self = self._generate()
+            self.selects = [new_select] + self.selects[1:]
+
+        return self
+
+    def _generate_fromclause_column_proxies(
+        self,
+        subquery: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        *,
+        proxy_compound_columns: Optional[
+            Iterable[Sequence[ColumnElement[Any]]]
+        ] = None,
+    ) -> None:
+        # this is a slightly hacky thing - the union exports a
+        # column that resembles just that of the *first* selectable.
+        # to get at a "composite" column, particularly foreign keys,
+        # you have to dig through the proxies collection which we
+        # generate below.
+        select_0 = self.selects[0]
+
+        if self._label_style is not LABEL_STYLE_DEFAULT:
+            select_0 = select_0.set_label_style(self._label_style)
+
+        # hand-construct the "_proxies" collection to include all
+        # derived columns place a 'weight' annotation corresponding
+        # to how low in the list of select()s the column occurs, so
+        # that the corresponding_column() operation can resolve
+        # conflicts
+        extra_col_iterator = zip(
+            *[
+                [
+                    c._annotate(dd)
+                    for c in stmt._all_selected_columns
+                    if is_column_element(c)
+                ]
+                for dd, stmt in [
+                    ({"weight": i + 1}, stmt)
+                    for i, stmt in enumerate(self.selects)
+                ]
+            ]
+        )
+
+        # the incoming proxy_compound_columns can be present also if this is
+        # a compound embedded in a compound.  it's probably more appropriate
+        # that we generate new weights local to this nested compound, though
+        # i haven't tried to think what it means for compound nested in
+        # compound
+        select_0._generate_fromclause_column_proxies(
+            subquery,
+            columns,
+            proxy_compound_columns=extra_col_iterator,
+            primary_key=primary_key,
+            foreign_keys=foreign_keys,
+        )
+
+    def _refresh_for_new_column(self, column: ColumnElement[Any]) -> None:
+        super()._refresh_for_new_column(column)
+        for select in self.selects:
+            select._refresh_for_new_column(column)
+
+    @util.ro_non_memoized_property
+    def _all_selected_columns(self) -> _SelectIterable:
+        return self.selects[0]._all_selected_columns
+
+    @util.ro_non_memoized_property
+    def selected_columns(
+        self,
+    ) -> ColumnCollection[str, ColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        representing the columns that
+        this SELECT statement or similar construct returns in its result set,
+        not including :class:`_sql.TextClause` constructs.
+
+        For a :class:`_expression.CompoundSelect`, the
+        :attr:`_expression.CompoundSelect.selected_columns`
+        attribute returns the selected
+        columns of the first SELECT statement contained within the series of
+        statements within the set operation.
+
+        .. seealso::
+
+            :attr:`_sql.Select.selected_columns`
+
+        .. versionadded:: 1.4
+
+        """
+        return self.selects[0].selected_columns
+
+
+# backwards compat
+for elem in _CompoundSelectKeyword:
+    setattr(CompoundSelect, elem.name, elem)
+
+
+@CompileState.plugin_for("default", "select")
+class SelectState(util.MemoizedSlots, CompileState):
+    __slots__ = (
+        "from_clauses",
+        "froms",
+        "columns_plus_names",
+        "_label_resolve_dict",
+    )
+
+    if TYPE_CHECKING:
+        default_select_compile_options: CacheableOptions
+    else:
+
+        class default_select_compile_options(CacheableOptions):
+            _cache_key_traversal = []
+
+    if TYPE_CHECKING:
+
+        @classmethod
+        def get_plugin_class(
+            cls, statement: Executable
+        ) -> Type[SelectState]: ...
+
+    def __init__(
+        self,
+        statement: Select[Any],
+        compiler: SQLCompiler,
+        **kw: Any,
+    ):
+        self.statement = statement
+        self.from_clauses = statement._from_obj
+
+        for memoized_entities in statement._memoized_select_entities:
+            self._setup_joins(
+                memoized_entities._setup_joins, memoized_entities._raw_columns
+            )
+
+        if statement._setup_joins:
+            self._setup_joins(statement._setup_joins, statement._raw_columns)
+
+        self.froms = self._get_froms(statement)
+
+        self.columns_plus_names = statement._generate_columns_plus_names(True)
+
+    @classmethod
+    def _plugin_not_implemented(cls) -> NoReturn:
+        raise NotImplementedError(
+            "The default SELECT construct without plugins does not "
+            "implement this method."
+        )
+
+    @classmethod
+    def get_column_descriptions(
+        cls, statement: Select[Any]
+    ) -> List[Dict[str, Any]]:
+        return [
+            {
+                "name": name,
+                "type": element.type,
+                "expr": element,
+            }
+            for _, name, _, element, _ in (
+                statement._generate_columns_plus_names(False)
+            )
+        ]
+
+    @classmethod
+    def from_statement(
+        cls, statement: Select[Any], from_statement: roles.ReturnsRowsRole
+    ) -> ExecutableReturnsRows:
+        cls._plugin_not_implemented()
+
+    @classmethod
+    def get_columns_clause_froms(
+        cls, statement: Select[Any]
+    ) -> List[FromClause]:
+        return cls._normalize_froms(
+            itertools.chain.from_iterable(
+                element._from_objects for element in statement._raw_columns
+            )
+        )
+
+    @classmethod
+    def _column_naming_convention(
+        cls, label_style: SelectLabelStyle
+    ) -> _LabelConventionCallable:
+        table_qualified = label_style is LABEL_STYLE_TABLENAME_PLUS_COL
+
+        dedupe = label_style is not LABEL_STYLE_NONE
+
+        pa = prefix_anon_map()
+        names = set()
+
+        def go(
+            c: Union[ColumnElement[Any], TextClause],
+            col_name: Optional[str] = None,
+        ) -> Optional[str]:
+            if is_text_clause(c):
+                return None
+            elif TYPE_CHECKING:
+                assert is_column_element(c)
+
+            if not dedupe:
+                name = c._proxy_key
+                if name is None:
+                    name = "_no_label"
+                return name
+
+            name = c._tq_key_label if table_qualified else c._proxy_key
+
+            if name is None:
+                name = "_no_label"
+                if name in names:
+                    return c._anon_label(name) % pa
+                else:
+                    names.add(name)
+                    return name
+
+            elif name in names:
+                return (
+                    c._anon_tq_key_label % pa
+                    if table_qualified
+                    else c._anon_key_label % pa
+                )
+            else:
+                names.add(name)
+                return name
+
+        return go
+
+    def _get_froms(self, statement: Select[Any]) -> List[FromClause]:
+        ambiguous_table_name_map: _AmbiguousTableNameMap
+        self._ambiguous_table_name_map = ambiguous_table_name_map = {}
+
+        return self._normalize_froms(
+            itertools.chain(
+                self.from_clauses,
+                itertools.chain.from_iterable(
+                    [
+                        element._from_objects
+                        for element in statement._raw_columns
+                    ]
+                ),
+                itertools.chain.from_iterable(
+                    [
+                        element._from_objects
+                        for element in statement._where_criteria
+                    ]
+                ),
+            ),
+            check_statement=statement,
+            ambiguous_table_name_map=ambiguous_table_name_map,
+        )
+
+    @classmethod
+    def _normalize_froms(
+        cls,
+        iterable_of_froms: Iterable[FromClause],
+        check_statement: Optional[Select[Any]] = None,
+        ambiguous_table_name_map: Optional[_AmbiguousTableNameMap] = None,
+    ) -> List[FromClause]:
+        """given an iterable of things to select FROM, reduce them to what
+        would actually render in the FROM clause of a SELECT.
+
+        This does the job of checking for JOINs, tables, etc. that are in fact
+        overlapping due to cloning, adaption, present in overlapping joins,
+        etc.
+
+        """
+        seen: Set[FromClause] = set()
+        froms: List[FromClause] = []
+
+        for item in iterable_of_froms:
+            if is_subquery(item) and item.element is check_statement:
+                raise exc.InvalidRequestError(
+                    "select() construct refers to itself as a FROM"
+                )
+
+            if not seen.intersection(item._cloned_set):
+                froms.append(item)
+                seen.update(item._cloned_set)
+
+        if froms:
+            toremove = set(
+                itertools.chain.from_iterable(
+                    [_expand_cloned(f._hide_froms) for f in froms]
+                )
+            )
+            if toremove:
+                # filter out to FROM clauses not in the list,
+                # using a list to maintain ordering
+                froms = [f for f in froms if f not in toremove]
+
+            if ambiguous_table_name_map is not None:
+                ambiguous_table_name_map.update(
+                    (
+                        fr.name,
+                        _anonymous_label.safe_construct(
+                            hash(fr.name), fr.name
+                        ),
+                    )
+                    for item in froms
+                    for fr in item._from_objects
+                    if is_table(fr)
+                    and fr.schema
+                    and fr.name not in ambiguous_table_name_map
+                )
+
+        return froms
+
+    def _get_display_froms(
+        self,
+        explicit_correlate_froms: Optional[Sequence[FromClause]] = None,
+        implicit_correlate_froms: Optional[Sequence[FromClause]] = None,
+    ) -> List[FromClause]:
+        """Return the full list of 'from' clauses to be displayed.
+
+        Takes into account a set of existing froms which may be
+        rendered in the FROM clause of enclosing selects; this Select
+        may want to leave those absent if it is automatically
+        correlating.
+
+        """
+
+        froms = self.froms
+
+        if self.statement._correlate:
+            to_correlate = self.statement._correlate
+            if to_correlate:
+                froms = [
+                    f
+                    for f in froms
+                    if f
+                    not in _cloned_intersection(
+                        _cloned_intersection(
+                            froms, explicit_correlate_froms or ()
+                        ),
+                        to_correlate,
+                    )
+                ]
+
+        if self.statement._correlate_except is not None:
+            froms = [
+                f
+                for f in froms
+                if f
+                not in _cloned_difference(
+                    _cloned_intersection(
+                        froms, explicit_correlate_froms or ()
+                    ),
+                    self.statement._correlate_except,
+                )
+            ]
+
+        if (
+            self.statement._auto_correlate
+            and implicit_correlate_froms
+            and len(froms) > 1
+        ):
+            froms = [
+                f
+                for f in froms
+                if f
+                not in _cloned_intersection(froms, implicit_correlate_froms)
+            ]
+
+            if not len(froms):
+                raise exc.InvalidRequestError(
+                    "Select statement '%r"
+                    "' returned no FROM clauses "
+                    "due to auto-correlation; "
+                    "specify correlate(<tables>) "
+                    "to control correlation "
+                    "manually." % self.statement
+                )
+
+        return froms
+
+    def _memoized_attr__label_resolve_dict(
+        self,
+    ) -> Tuple[
+        Dict[str, ColumnElement[Any]],
+        Dict[str, ColumnElement[Any]],
+        Dict[str, ColumnElement[Any]],
+    ]:
+        with_cols: Dict[str, ColumnElement[Any]] = {
+            c._tq_label or c.key: c
+            for c in self.statement._all_selected_columns
+            if c._allow_label_resolve
+        }
+        only_froms: Dict[str, ColumnElement[Any]] = {
+            c.key: c  # type: ignore
+            for c in _select_iterables(self.froms)
+            if c._allow_label_resolve
+        }
+        only_cols: Dict[str, ColumnElement[Any]] = with_cols.copy()
+        for key, value in only_froms.items():
+            with_cols.setdefault(key, value)
+
+        return with_cols, only_froms, only_cols
+
+    @classmethod
+    def determine_last_joined_entity(
+        cls, stmt: Select[Any]
+    ) -> Optional[_JoinTargetElement]:
+        if stmt._setup_joins:
+            return stmt._setup_joins[-1][0]
+        else:
+            return None
+
+    @classmethod
+    def all_selected_columns(cls, statement: Select[Any]) -> _SelectIterable:
+        return [c for c in _select_iterables(statement._raw_columns)]
+
+    def _setup_joins(
+        self,
+        args: Tuple[_SetupJoinsElement, ...],
+        raw_columns: List[_ColumnsClauseElement],
+    ) -> None:
+        for right, onclause, left, flags in args:
+            if TYPE_CHECKING:
+                if onclause is not None:
+                    assert isinstance(onclause, ColumnElement)
+
+            isouter = flags["isouter"]
+            full = flags["full"]
+
+            if left is None:
+                (
+                    left,
+                    replace_from_obj_index,
+                ) = self._join_determine_implicit_left_side(
+                    raw_columns, left, right, onclause
+                )
+            else:
+                (replace_from_obj_index) = self._join_place_explicit_left_side(
+                    left
+                )
+
+            # these assertions can be made here, as if the right/onclause
+            # contained ORM elements, the select() statement would have been
+            # upgraded to an ORM select, and this method would not be called;
+            # orm.context.ORMSelectCompileState._join() would be
+            # used instead.
+            if TYPE_CHECKING:
+                assert isinstance(right, FromClause)
+                if onclause is not None:
+                    assert isinstance(onclause, ColumnElement)
+
+            if replace_from_obj_index is not None:
+                # splice into an existing element in the
+                # self._from_obj list
+                left_clause = self.from_clauses[replace_from_obj_index]
+
+                self.from_clauses = (
+                    self.from_clauses[:replace_from_obj_index]
+                    + (
+                        Join(
+                            left_clause,
+                            right,
+                            onclause,
+                            isouter=isouter,
+                            full=full,
+                        ),
+                    )
+                    + self.from_clauses[replace_from_obj_index + 1 :]
+                )
+            else:
+                assert left is not None
+                self.from_clauses = self.from_clauses + (
+                    Join(left, right, onclause, isouter=isouter, full=full),
+                )
+
+    @util.preload_module("sqlalchemy.sql.util")
+    def _join_determine_implicit_left_side(
+        self,
+        raw_columns: List[_ColumnsClauseElement],
+        left: Optional[FromClause],
+        right: _JoinTargetElement,
+        onclause: Optional[ColumnElement[Any]],
+    ) -> Tuple[Optional[FromClause], Optional[int]]:
+        """When join conditions don't express the left side explicitly,
+        determine if an existing FROM or entity in this query
+        can serve as the left hand side.
+
+        """
+
+        sql_util = util.preloaded.sql_util
+
+        replace_from_obj_index: Optional[int] = None
+
+        from_clauses = self.from_clauses
+
+        if from_clauses:
+            indexes: List[int] = sql_util.find_left_clause_to_join_from(
+                from_clauses, right, onclause
+            )
+
+            if len(indexes) == 1:
+                replace_from_obj_index = indexes[0]
+                left = from_clauses[replace_from_obj_index]
+        else:
+            potential = {}
+            statement = self.statement
+
+            for from_clause in itertools.chain(
+                itertools.chain.from_iterable(
+                    [element._from_objects for element in raw_columns]
+                ),
+                itertools.chain.from_iterable(
+                    [
+                        element._from_objects
+                        for element in statement._where_criteria
+                    ]
+                ),
+            ):
+                potential[from_clause] = ()
+
+            all_clauses = list(potential.keys())
+            indexes = sql_util.find_left_clause_to_join_from(
+                all_clauses, right, onclause
+            )
+
+            if len(indexes) == 1:
+                left = all_clauses[indexes[0]]
+
+        if len(indexes) > 1:
+            raise exc.InvalidRequestError(
+                "Can't determine which FROM clause to join "
+                "from, there are multiple FROMS which can "
+                "join to this entity. Please use the .select_from() "
+                "method to establish an explicit left side, as well as "
+                "providing an explicit ON clause if not present already to "
+                "help resolve the ambiguity."
+            )
+        elif not indexes:
+            raise exc.InvalidRequestError(
+                "Don't know how to join to %r. "
+                "Please use the .select_from() "
+                "method to establish an explicit left side, as well as "
+                "providing an explicit ON clause if not present already to "
+                "help resolve the ambiguity." % (right,)
+            )
+        return left, replace_from_obj_index
+
+    @util.preload_module("sqlalchemy.sql.util")
+    def _join_place_explicit_left_side(
+        self, left: FromClause
+    ) -> Optional[int]:
+        replace_from_obj_index: Optional[int] = None
+
+        sql_util = util.preloaded.sql_util
+
+        from_clauses = list(self.statement._iterate_from_elements())
+
+        if from_clauses:
+            indexes: List[int] = sql_util.find_left_clause_that_matches_given(
+                self.from_clauses, left
+            )
+        else:
+            indexes = []
+
+        if len(indexes) > 1:
+            raise exc.InvalidRequestError(
+                "Can't identify which entity in which to assign the "
+                "left side of this join.   Please use a more specific "
+                "ON clause."
+            )
+
+        # have an index, means the left side is already present in
+        # an existing FROM in the self._from_obj tuple
+        if indexes:
+            replace_from_obj_index = indexes[0]
+
+        # no index, means we need to add a new element to the
+        # self._from_obj tuple
+
+        return replace_from_obj_index
+
+
+class _SelectFromElements:
+    __slots__ = ()
+
+    _raw_columns: List[_ColumnsClauseElement]
+    _where_criteria: Tuple[ColumnElement[Any], ...]
+    _from_obj: Tuple[FromClause, ...]
+
+    def _iterate_from_elements(self) -> Iterator[FromClause]:
+        # note this does not include elements
+        # in _setup_joins
+
+        seen = set()
+        for element in self._raw_columns:
+            for fr in element._from_objects:
+                if fr in seen:
+                    continue
+                seen.add(fr)
+                yield fr
+        for element in self._where_criteria:
+            for fr in element._from_objects:
+                if fr in seen:
+                    continue
+                seen.add(fr)
+                yield fr
+        for element in self._from_obj:
+            if element in seen:
+                continue
+            seen.add(element)
+            yield element
+
+
+class _MemoizedSelectEntities(
+    cache_key.HasCacheKey, traversals.HasCopyInternals, visitors.Traversible
+):
+    """represents partial state from a Select object, for the case
+    where Select.columns() has redefined the set of columns/entities the
+    statement will be SELECTing from.  This object represents
+    the entities from the SELECT before that transformation was applied,
+    so that transformations that were made in terms of the SELECT at that
+    time, such as join() as well as options(), can access the correct context.
+
+    In previous SQLAlchemy versions, this wasn't needed because these
+    constructs calculated everything up front, like when you called join()
+    or options(), it did everything to figure out how that would translate
+    into specific SQL constructs that would be ready to send directly to the
+    SQL compiler when needed.  But as of
+    1.4, all of that stuff is done in the compilation phase, during the
+    "compile state" portion of the process, so that the work can all be
+    cached.  So it needs to be able to resolve joins/options2 based on what
+    the list of entities was when those methods were called.
+
+
+    """
+
+    __visit_name__ = "memoized_select_entities"
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("_raw_columns", InternalTraversal.dp_clauseelement_list),
+        ("_setup_joins", InternalTraversal.dp_setup_join_tuple),
+        ("_with_options", InternalTraversal.dp_executable_options),
+    ]
+
+    _is_clone_of: Optional[ClauseElement]
+    _raw_columns: List[_ColumnsClauseElement]
+    _setup_joins: Tuple[_SetupJoinsElement, ...]
+    _with_options: Tuple[ExecutableOption, ...]
+
+    _annotations = util.EMPTY_DICT
+
+    def _clone(self, **kw: Any) -> Self:
+        c = self.__class__.__new__(self.__class__)
+        c.__dict__ = {k: v for k, v in self.__dict__.items()}
+
+        c._is_clone_of = self.__dict__.get("_is_clone_of", self)
+        return c
+
+    @classmethod
+    def _generate_for_statement(cls, select_stmt: Select[Any]) -> None:
+        if select_stmt._setup_joins or select_stmt._with_options:
+            self = _MemoizedSelectEntities()
+            self._raw_columns = select_stmt._raw_columns
+            self._setup_joins = select_stmt._setup_joins
+            self._with_options = select_stmt._with_options
+
+            select_stmt._memoized_select_entities += (self,)
+            select_stmt._raw_columns = []
+            select_stmt._setup_joins = select_stmt._with_options = ()
+
+
+class Select(
+    HasPrefixes,
+    HasSuffixes,
+    HasHints,
+    HasCompileState,
+    _SelectFromElements,
+    GenerativeSelect,
+    TypedReturnsRows[_TP],
+):
+    """Represents a ``SELECT`` statement.
+
+    The :class:`_sql.Select` object is normally constructed using the
+    :func:`_sql.select` function.  See that function for details.
+
+    .. seealso::
+
+        :func:`_sql.select`
+
+        :ref:`tutorial_selecting_data` - in the 2.0 tutorial
+
+    """
+
+    __visit_name__ = "select"
+
+    _setup_joins: Tuple[_SetupJoinsElement, ...] = ()
+    _memoized_select_entities: Tuple[TODO_Any, ...] = ()
+
+    _raw_columns: List[_ColumnsClauseElement]
+
+    _distinct: bool = False
+    _distinct_on: Tuple[ColumnElement[Any], ...] = ()
+    _correlate: Tuple[FromClause, ...] = ()
+    _correlate_except: Optional[Tuple[FromClause, ...]] = None
+    _where_criteria: Tuple[ColumnElement[Any], ...] = ()
+    _having_criteria: Tuple[ColumnElement[Any], ...] = ()
+    _from_obj: Tuple[FromClause, ...] = ()
+    _auto_correlate = True
+    _is_select_statement = True
+    _compile_options: CacheableOptions = (
+        SelectState.default_select_compile_options
+    )
+
+    _traverse_internals: _TraverseInternalsType = (
+        [
+            ("_raw_columns", InternalTraversal.dp_clauseelement_list),
+            (
+                "_memoized_select_entities",
+                InternalTraversal.dp_memoized_select_entities,
+            ),
+            ("_from_obj", InternalTraversal.dp_clauseelement_list),
+            ("_where_criteria", InternalTraversal.dp_clauseelement_tuple),
+            ("_having_criteria", InternalTraversal.dp_clauseelement_tuple),
+            ("_order_by_clauses", InternalTraversal.dp_clauseelement_tuple),
+            ("_group_by_clauses", InternalTraversal.dp_clauseelement_tuple),
+            ("_setup_joins", InternalTraversal.dp_setup_join_tuple),
+            ("_correlate", InternalTraversal.dp_clauseelement_tuple),
+            ("_correlate_except", InternalTraversal.dp_clauseelement_tuple),
+            ("_limit_clause", InternalTraversal.dp_clauseelement),
+            ("_offset_clause", InternalTraversal.dp_clauseelement),
+            ("_fetch_clause", InternalTraversal.dp_clauseelement),
+            ("_fetch_clause_options", InternalTraversal.dp_plain_dict),
+            ("_for_update_arg", InternalTraversal.dp_clauseelement),
+            ("_distinct", InternalTraversal.dp_boolean),
+            ("_distinct_on", InternalTraversal.dp_clauseelement_tuple),
+            ("_label_style", InternalTraversal.dp_plain_obj),
+        ]
+        + HasCTE._has_ctes_traverse_internals
+        + HasPrefixes._has_prefixes_traverse_internals
+        + HasSuffixes._has_suffixes_traverse_internals
+        + HasHints._has_hints_traverse_internals
+        + SupportsCloneAnnotations._clone_annotations_traverse_internals
+        + Executable._executable_traverse_internals
+    )
+
+    _cache_key_traversal: _CacheKeyTraversalType = _traverse_internals + [
+        ("_compile_options", InternalTraversal.dp_has_cache_key)
+    ]
+
+    _compile_state_factory: Type[SelectState]
+
+    @classmethod
+    def _create_raw_select(cls, **kw: Any) -> Select[Any]:
+        """Create a :class:`.Select` using raw ``__new__`` with no coercions.
+
+        Used internally to build up :class:`.Select` constructs with
+        pre-established state.
+
+        """
+
+        stmt = Select.__new__(Select)
+        stmt.__dict__.update(kw)
+        return stmt
+
+    def __init__(self, *entities: _ColumnsClauseArgument[Any]):
+        r"""Construct a new :class:`_expression.Select`.
+
+        The public constructor for :class:`_expression.Select` is the
+        :func:`_sql.select` function.
+
+        """
+        self._raw_columns = [
+            coercions.expect(
+                roles.ColumnsClauseRole, ent, apply_propagate_attrs=self
+            )
+            for ent in entities
+        ]
+
+        GenerativeSelect.__init__(self)
+
+    def _scalar_type(self) -> TypeEngine[Any]:
+        if not self._raw_columns:
+            return NULLTYPE
+        elem = self._raw_columns[0]
+        cols = list(elem._select_iterable)
+        return cols[0].type
+
+    def filter(self, *criteria: _ColumnExpressionArgument[bool]) -> Self:
+        """A synonym for the :meth:`_sql.Select.where` method."""
+
+        return self.where(*criteria)
+
+    def _filter_by_zero(
+        self,
+    ) -> Union[
+        FromClause, _JoinTargetProtocol, ColumnElement[Any], TextClause
+    ]:
+        if self._setup_joins:
+            meth = SelectState.get_plugin_class(
+                self
+            ).determine_last_joined_entity
+            _last_joined_entity = meth(self)
+            if _last_joined_entity is not None:
+                return _last_joined_entity
+
+        if self._from_obj:
+            return self._from_obj[0]
+
+        return self._raw_columns[0]
+
+    if TYPE_CHECKING:
+
+        @overload
+        def scalar_subquery(
+            self: Select[Tuple[_MAYBE_ENTITY]],
+        ) -> ScalarSelect[Any]: ...
+
+        @overload
+        def scalar_subquery(
+            self: Select[Tuple[_NOT_ENTITY]],
+        ) -> ScalarSelect[_NOT_ENTITY]: ...
+
+        @overload
+        def scalar_subquery(self) -> ScalarSelect[Any]: ...
+
+        def scalar_subquery(self) -> ScalarSelect[Any]: ...
+
+    def filter_by(self, **kwargs: Any) -> Self:
+        r"""apply the given filtering criterion as a WHERE clause
+        to this select.
+
+        """
+        from_entity = self._filter_by_zero()
+
+        clauses = [
+            _entity_namespace_key(from_entity, key) == value
+            for key, value in kwargs.items()
+        ]
+        return self.filter(*clauses)
+
+    @property
+    def column_descriptions(self) -> Any:
+        """Return a :term:`plugin-enabled` 'column descriptions' structure
+        referring to the columns which are SELECTed by this statement.
+
+        This attribute is generally useful when using the ORM, as an
+        extended structure which includes information about mapped
+        entities is returned.  The section :ref:`queryguide_inspection`
+        contains more background.
+
+        For a Core-only statement, the structure returned by this accessor
+        is derived from the same objects that are returned by the
+        :attr:`.Select.selected_columns` accessor, formatted as a list of
+        dictionaries which contain the keys ``name``, ``type`` and ``expr``,
+        which indicate the column expressions to be selected::
+
+            >>> stmt = select(user_table)
+            >>> stmt.column_descriptions
+            [
+                {
+                    'name': 'id',
+                    'type': Integer(),
+                    'expr': Column('id', Integer(), ...)},
+                {
+                    'name': 'name',
+                    'type': String(length=30),
+                    'expr': Column('name', String(length=30), ...)}
+            ]
+
+        .. versionchanged:: 1.4.33 The :attr:`.Select.column_descriptions`
+           attribute returns a structure for a Core-only set of entities,
+           not just ORM-only entities.
+
+        .. seealso::
+
+            :attr:`.UpdateBase.entity_description` - entity information for
+            an :func:`.insert`, :func:`.update`, or :func:`.delete`
+
+            :ref:`queryguide_inspection` - ORM background
+
+        """
+        meth = SelectState.get_plugin_class(self).get_column_descriptions
+        return meth(self)
+
+    def from_statement(
+        self, statement: roles.ReturnsRowsRole
+    ) -> ExecutableReturnsRows:
+        """Apply the columns which this :class:`.Select` would select
+        onto another statement.
+
+        This operation is :term:`plugin-specific` and will raise a not
+        supported exception if this :class:`_sql.Select` does not select from
+        plugin-enabled entities.
+
+
+        The statement is typically either a :func:`_expression.text` or
+        :func:`_expression.select` construct, and should return the set of
+        columns appropriate to the entities represented by this
+        :class:`.Select`.
+
+        .. seealso::
+
+            :ref:`orm_queryguide_selecting_text` - usage examples in the
+            ORM Querying Guide
+
+        """
+        meth = SelectState.get_plugin_class(self).from_statement
+        return meth(self, statement)
+
+    @_generative
+    def join(
+        self,
+        target: _JoinTargetArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        *,
+        isouter: bool = False,
+        full: bool = False,
+    ) -> Self:
+        r"""Create a SQL JOIN against this :class:`_expression.Select`
+        object's criterion
+        and apply generatively, returning the newly resulting
+        :class:`_expression.Select`.
+
+        E.g.::
+
+            stmt = select(user_table).join(
+                address_table, user_table.c.id == address_table.c.user_id
+            )
+
+        The above statement generates SQL similar to:
+
+        .. sourcecode:: sql
+
+            SELECT user.id, user.name
+            FROM user
+            JOIN address ON user.id = address.user_id
+
+        .. versionchanged:: 1.4 :meth:`_expression.Select.join` now creates
+           a :class:`_sql.Join` object between a :class:`_sql.FromClause`
+           source that is within the FROM clause of the existing SELECT,
+           and a given target :class:`_sql.FromClause`, and then adds
+           this :class:`_sql.Join` to the FROM clause of the newly generated
+           SELECT statement.    This is completely reworked from the behavior
+           in 1.3, which would instead create a subquery of the entire
+           :class:`_expression.Select` and then join that subquery to the
+           target.
+
+           This is a **backwards incompatible change** as the previous behavior
+           was mostly useless, producing an unnamed subquery rejected by
+           most databases in any case.   The new behavior is modeled after
+           that of the very successful :meth:`_orm.Query.join` method in the
+           ORM, in order to support the functionality of :class:`_orm.Query`
+           being available by using a :class:`_sql.Select` object with an
+           :class:`_orm.Session`.
+
+           See the notes for this change at :ref:`change_select_join`.
+
+
+        :param target: target table to join towards
+
+        :param onclause: ON clause of the join.  If omitted, an ON clause
+         is generated automatically based on the :class:`_schema.ForeignKey`
+         linkages between the two tables, if one can be unambiguously
+         determined, otherwise an error is raised.
+
+        :param isouter: if True, generate LEFT OUTER join.  Same as
+         :meth:`_expression.Select.outerjoin`.
+
+        :param full: if True, generate FULL OUTER join.
+
+        .. seealso::
+
+            :ref:`tutorial_select_join` - in the :doc:`/tutorial/index`
+
+            :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel`
+
+            :meth:`_expression.Select.join_from`
+
+            :meth:`_expression.Select.outerjoin`
+
+        """  # noqa: E501
+        join_target = coercions.expect(
+            roles.JoinTargetRole, target, apply_propagate_attrs=self
+        )
+        if onclause is not None:
+            onclause_element = coercions.expect(roles.OnClauseRole, onclause)
+        else:
+            onclause_element = None
+
+        self._setup_joins += (
+            (
+                join_target,
+                onclause_element,
+                None,
+                {"isouter": isouter, "full": full},
+            ),
+        )
+        return self
+
+    def outerjoin_from(
+        self,
+        from_: _FromClauseArgument,
+        target: _JoinTargetArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        *,
+        full: bool = False,
+    ) -> Self:
+        r"""Create a SQL LEFT OUTER JOIN against this
+        :class:`_expression.Select` object's criterion and apply generatively,
+        returning the newly resulting :class:`_expression.Select`.
+
+        Usage is the same as that of :meth:`_selectable.Select.join_from`.
+
+        """
+        return self.join_from(
+            from_, target, onclause=onclause, isouter=True, full=full
+        )
+
+    @_generative
+    def join_from(
+        self,
+        from_: _FromClauseArgument,
+        target: _JoinTargetArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        *,
+        isouter: bool = False,
+        full: bool = False,
+    ) -> Self:
+        r"""Create a SQL JOIN against this :class:`_expression.Select`
+        object's criterion
+        and apply generatively, returning the newly resulting
+        :class:`_expression.Select`.
+
+        E.g.::
+
+            stmt = select(user_table, address_table).join_from(
+                user_table, address_table, user_table.c.id == address_table.c.user_id
+            )
+
+        The above statement generates SQL similar to:
+
+        .. sourcecode:: sql
+
+            SELECT user.id, user.name, address.id, address.email, address.user_id
+            FROM user JOIN address ON user.id = address.user_id
+
+        .. versionadded:: 1.4
+
+        :param from\_: the left side of the join, will be rendered in the
+         FROM clause and is roughly equivalent to using the
+         :meth:`.Select.select_from` method.
+
+        :param target: target table to join towards
+
+        :param onclause: ON clause of the join.
+
+        :param isouter: if True, generate LEFT OUTER join.  Same as
+         :meth:`_expression.Select.outerjoin`.
+
+        :param full: if True, generate FULL OUTER join.
+
+        .. seealso::
+
+            :ref:`tutorial_select_join` - in the :doc:`/tutorial/index`
+
+            :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel`
+
+            :meth:`_expression.Select.join`
+
+        """  # noqa: E501
+
+        # note the order of parsing from vs. target is important here, as we
+        # are also deriving the source of the plugin (i.e. the subject mapper
+        # in an ORM query) which should favor the "from_" over the "target"
+
+        from_ = coercions.expect(
+            roles.FromClauseRole, from_, apply_propagate_attrs=self
+        )
+        join_target = coercions.expect(
+            roles.JoinTargetRole, target, apply_propagate_attrs=self
+        )
+        if onclause is not None:
+            onclause_element = coercions.expect(roles.OnClauseRole, onclause)
+        else:
+            onclause_element = None
+
+        self._setup_joins += (
+            (
+                join_target,
+                onclause_element,
+                from_,
+                {"isouter": isouter, "full": full},
+            ),
+        )
+        return self
+
+    def outerjoin(
+        self,
+        target: _JoinTargetArgument,
+        onclause: Optional[_OnClauseArgument] = None,
+        *,
+        full: bool = False,
+    ) -> Self:
+        """Create a left outer join.
+
+        Parameters are the same as that of :meth:`_expression.Select.join`.
+
+        .. versionchanged:: 1.4 :meth:`_expression.Select.outerjoin` now
+           creates a :class:`_sql.Join` object between a
+           :class:`_sql.FromClause` source that is within the FROM clause of
+           the existing SELECT, and a given target :class:`_sql.FromClause`,
+           and then adds this :class:`_sql.Join` to the FROM clause of the
+           newly generated SELECT statement.    This is completely reworked
+           from the behavior in 1.3, which would instead create a subquery of
+           the entire
+           :class:`_expression.Select` and then join that subquery to the
+           target.
+
+           This is a **backwards incompatible change** as the previous behavior
+           was mostly useless, producing an unnamed subquery rejected by
+           most databases in any case.   The new behavior is modeled after
+           that of the very successful :meth:`_orm.Query.join` method in the
+           ORM, in order to support the functionality of :class:`_orm.Query`
+           being available by using a :class:`_sql.Select` object with an
+           :class:`_orm.Session`.
+
+           See the notes for this change at :ref:`change_select_join`.
+
+        .. seealso::
+
+            :ref:`tutorial_select_join` - in the :doc:`/tutorial/index`
+
+            :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel`
+
+            :meth:`_expression.Select.join`
+
+        """
+        return self.join(target, onclause=onclause, isouter=True, full=full)
+
+    def get_final_froms(self) -> Sequence[FromClause]:
+        """Compute the final displayed list of :class:`_expression.FromClause`
+        elements.
+
+        This method will run through the full computation required to
+        determine what FROM elements will be displayed in the resulting
+        SELECT statement, including shadowing individual tables with
+        JOIN objects, as well as full computation for ORM use cases including
+        eager loading clauses.
+
+        For ORM use, this accessor returns the **post compilation**
+        list of FROM objects; this collection will include elements such as
+        eagerly loaded tables and joins.  The objects will **not** be
+        ORM enabled and not work as a replacement for the
+        :meth:`_sql.Select.select_froms` collection; additionally, the
+        method is not well performing for an ORM enabled statement as it
+        will incur the full ORM construction process.
+
+        To retrieve the FROM list that's implied by the "columns" collection
+        passed to the :class:`_sql.Select` originally, use the
+        :attr:`_sql.Select.columns_clause_froms` accessor.
+
+        To select from an alternative set of columns while maintaining the
+        FROM list, use the :meth:`_sql.Select.with_only_columns` method and
+        pass the
+        :paramref:`_sql.Select.with_only_columns.maintain_column_froms`
+        parameter.
+
+        .. versionadded:: 1.4.23 - the :meth:`_sql.Select.get_final_froms`
+           method replaces the previous :attr:`_sql.Select.froms` accessor,
+           which is deprecated.
+
+        .. seealso::
+
+            :attr:`_sql.Select.columns_clause_froms`
+
+        """
+        compiler = self._default_compiler()
+
+        return self._compile_state_factory(self, compiler)._get_display_froms()
+
+    @property
+    @util.deprecated(
+        "1.4.23",
+        "The :attr:`_expression.Select.froms` attribute is moved to "
+        "the :meth:`_expression.Select.get_final_froms` method.",
+    )
+    def froms(self) -> Sequence[FromClause]:
+        """Return the displayed list of :class:`_expression.FromClause`
+        elements.
+
+
+        """
+        return self.get_final_froms()
+
+    @property
+    def columns_clause_froms(self) -> List[FromClause]:
+        """Return the set of :class:`_expression.FromClause` objects implied
+        by the columns clause of this SELECT statement.
+
+        .. versionadded:: 1.4.23
+
+        .. seealso::
+
+            :attr:`_sql.Select.froms` - "final" FROM list taking the full
+            statement into account
+
+            :meth:`_sql.Select.with_only_columns` - makes use of this
+            collection to set up a new FROM list
+
+        """
+
+        return SelectState.get_plugin_class(self).get_columns_clause_froms(
+            self
+        )
+
+    @property
+    def inner_columns(self) -> _SelectIterable:
+        """An iterator of all :class:`_expression.ColumnElement`
+        expressions which would
+        be rendered into the columns clause of the resulting SELECT statement.
+
+        This method is legacy as of 1.4 and is superseded by the
+        :attr:`_expression.Select.exported_columns` collection.
+
+        """
+
+        return iter(self._all_selected_columns)
+
+    def is_derived_from(self, fromclause: Optional[FromClause]) -> bool:
+        if fromclause is not None and self in fromclause._cloned_set:
+            return True
+
+        for f in self._iterate_from_elements():
+            if f.is_derived_from(fromclause):
+                return True
+        return False
+
+    def _copy_internals(
+        self, clone: _CloneCallableType = _clone, **kw: Any
+    ) -> None:
+        # Select() object has been cloned and probably adapted by the
+        # given clone function.  Apply the cloning function to internal
+        # objects
+
+        # 1. keep a dictionary of the froms we've cloned, and what
+        # they've become.  This allows us to ensure the same cloned from
+        # is used when other items such as columns are "cloned"
+
+        all_the_froms = set(
+            itertools.chain(
+                _from_objects(*self._raw_columns),
+                _from_objects(*self._where_criteria),
+                _from_objects(*[elem[0] for elem in self._setup_joins]),
+            )
+        )
+
+        # do a clone for the froms we've gathered.  what is important here
+        # is if any of the things we are selecting from, like tables,
+        # were converted into Join objects.   if so, these need to be
+        # added to _from_obj explicitly, because otherwise they won't be
+        # part of the new state, as they don't associate themselves with
+        # their columns.
+        new_froms = {f: clone(f, **kw) for f in all_the_froms}
+
+        # 2. copy FROM collections, adding in joins that we've created.
+        existing_from_obj = [clone(f, **kw) for f in self._from_obj]
+        add_froms = (
+            {f for f in new_froms.values() if isinstance(f, Join)}
+            .difference(all_the_froms)
+            .difference(existing_from_obj)
+        )
+
+        self._from_obj = tuple(existing_from_obj) + tuple(add_froms)
+
+        # 3. clone everything else, making sure we use columns
+        # corresponding to the froms we just made.
+        def replace(
+            obj: Union[BinaryExpression[Any], ColumnClause[Any]],
+            **kw: Any,
+        ) -> Optional[KeyedColumnElement[Any]]:
+            if isinstance(obj, ColumnClause) and obj.table in new_froms:
+                newelem = new_froms[obj.table].corresponding_column(obj)
+                return newelem
+            return None
+
+        kw["replace"] = replace
+
+        # copy everything else.   for table-ish things like correlate,
+        # correlate_except, setup_joins, these clone normally.  For
+        # column-expression oriented things like raw_columns, where_criteria,
+        # order by, we get this from the new froms.
+        super()._copy_internals(clone=clone, omit_attrs=("_from_obj",), **kw)
+
+        self._reset_memoizations()
+
+    def get_children(self, **kw: Any) -> Iterable[ClauseElement]:
+        return itertools.chain(
+            super().get_children(
+                omit_attrs=("_from_obj", "_correlate", "_correlate_except"),
+                **kw,
+            ),
+            self._iterate_from_elements(),
+        )
+
+    @_generative
+    def add_columns(
+        self, *entities: _ColumnsClauseArgument[Any]
+    ) -> Select[Any]:
+        r"""Return a new :func:`_expression.select` construct with
+        the given entities appended to its columns clause.
+
+        E.g.::
+
+            my_select = my_select.add_columns(table.c.new_column)
+
+        The original expressions in the columns clause remain in place.
+        To replace the original expressions with new ones, see the method
+        :meth:`_expression.Select.with_only_columns`.
+
+        :param \*entities: column, table, or other entity expressions to be
+         added to the columns clause
+
+        .. seealso::
+
+            :meth:`_expression.Select.with_only_columns` - replaces existing
+            expressions rather than appending.
+
+            :ref:`orm_queryguide_select_multiple_entities` - ORM-centric
+            example
+
+        """
+        self._reset_memoizations()
+
+        self._raw_columns = self._raw_columns + [
+            coercions.expect(
+                roles.ColumnsClauseRole, column, apply_propagate_attrs=self
+            )
+            for column in entities
+        ]
+        return self
+
+    def _set_entities(
+        self, entities: Iterable[_ColumnsClauseArgument[Any]]
+    ) -> None:
+        self._raw_columns = [
+            coercions.expect(
+                roles.ColumnsClauseRole, ent, apply_propagate_attrs=self
+            )
+            for ent in util.to_list(entities)
+        ]
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_expression.Select.column` method is deprecated and will "
+        "be removed in a future release.  Please use "
+        ":meth:`_expression.Select.add_columns`",
+    )
+    def column(self, column: _ColumnsClauseArgument[Any]) -> Select[Any]:
+        """Return a new :func:`_expression.select` construct with
+        the given column expression added to its columns clause.
+
+        E.g.::
+
+            my_select = my_select.column(table.c.new_column)
+
+        See the documentation for
+        :meth:`_expression.Select.with_only_columns`
+        for guidelines on adding /replacing the columns of a
+        :class:`_expression.Select` object.
+
+        """
+        return self.add_columns(column)
+
+    @util.preload_module("sqlalchemy.sql.util")
+    def reduce_columns(self, only_synonyms: bool = True) -> Select[Any]:
+        """Return a new :func:`_expression.select` construct with redundantly
+        named, equivalently-valued columns removed from the columns clause.
+
+        "Redundant" here means two columns where one refers to the
+        other either based on foreign key, or via a simple equality
+        comparison in the WHERE clause of the statement.   The primary purpose
+        of this method is to automatically construct a select statement
+        with all uniquely-named columns, without the need to use
+        table-qualified labels as
+        :meth:`_expression.Select.set_label_style`
+        does.
+
+        When columns are omitted based on foreign key, the referred-to
+        column is the one that's kept.  When columns are omitted based on
+        WHERE equivalence, the first column in the columns clause is the
+        one that's kept.
+
+        :param only_synonyms: when True, limit the removal of columns
+         to those which have the same name as the equivalent.   Otherwise,
+         all columns that are equivalent to another are removed.
+
+        """
+        woc: Select[Any]
+        woc = self.with_only_columns(
+            *util.preloaded.sql_util.reduce_columns(
+                self._all_selected_columns,
+                only_synonyms=only_synonyms,
+                *(self._where_criteria + self._from_obj),
+            )
+        )
+        return woc
+
+    # START OVERLOADED FUNCTIONS self.with_only_columns Select 1-8 ", *, maintain_column_froms: bool =..." # noqa: E501
+
+    # code within this block is **programmatically,
+    # statically generated** by tools/generate_tuple_map_overloads.py
+
+    @overload
+    def with_only_columns(
+        self, __ent0: _TCCA[_T0], *, maintain_column_froms: bool = ...
+    ) -> Select[Tuple[_T0]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1, _T2]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1, _T2, _T3]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]: ...
+
+    @overload
+    def with_only_columns(
+        self,
+        __ent0: _TCCA[_T0],
+        __ent1: _TCCA[_T1],
+        __ent2: _TCCA[_T2],
+        __ent3: _TCCA[_T3],
+        __ent4: _TCCA[_T4],
+        __ent5: _TCCA[_T5],
+        __ent6: _TCCA[_T6],
+        __ent7: _TCCA[_T7],
+        *,
+        maintain_column_froms: bool = ...,
+    ) -> Select[Tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ...
+
+    # END OVERLOADED FUNCTIONS self.with_only_columns
+
+    @overload
+    def with_only_columns(
+        self,
+        *entities: _ColumnsClauseArgument[Any],
+        maintain_column_froms: bool = False,
+        **__kw: Any,
+    ) -> Select[Any]: ...
+
+    @_generative
+    def with_only_columns(
+        self,
+        *entities: _ColumnsClauseArgument[Any],
+        maintain_column_froms: bool = False,
+        **__kw: Any,
+    ) -> Select[Any]:
+        r"""Return a new :func:`_expression.select` construct with its columns
+        clause replaced with the given entities.
+
+        By default, this method is exactly equivalent to as if the original
+        :func:`_expression.select` had been called with the given entities.
+        E.g. a statement::
+
+            s = select(table1.c.a, table1.c.b)
+            s = s.with_only_columns(table1.c.b)
+
+        should be exactly equivalent to::
+
+            s = select(table1.c.b)
+
+        In this mode of operation, :meth:`_sql.Select.with_only_columns`
+        will also dynamically alter the FROM clause of the
+        statement if it is not explicitly stated.
+        To maintain the existing set of FROMs including those implied by the
+        current columns clause, add the
+        :paramref:`_sql.Select.with_only_columns.maintain_column_froms`
+        parameter::
+
+            s = select(table1.c.a, table2.c.b)
+            s = s.with_only_columns(table1.c.a, maintain_column_froms=True)
+
+        The above parameter performs a transfer of the effective FROMs
+        in the columns collection to the :meth:`_sql.Select.select_from`
+        method, as though the following were invoked::
+
+            s = select(table1.c.a, table2.c.b)
+            s = s.select_from(table1, table2).with_only_columns(table1.c.a)
+
+        The :paramref:`_sql.Select.with_only_columns.maintain_column_froms`
+        parameter makes use of the :attr:`_sql.Select.columns_clause_froms`
+        collection and performs an operation equivalent to the following::
+
+            s = select(table1.c.a, table2.c.b)
+            s = s.select_from(*s.columns_clause_froms).with_only_columns(table1.c.a)
+
+        :param \*entities: column expressions to be used.
+
+        :param maintain_column_froms: boolean parameter that will ensure the
+         FROM list implied from the current columns clause will be transferred
+         to the :meth:`_sql.Select.select_from` method first.
+
+         .. versionadded:: 1.4.23
+
+        """  # noqa: E501
+
+        if __kw:
+            raise _no_kw()
+
+        # memoizations should be cleared here as of
+        # I95c560ffcbfa30b26644999412fb6a385125f663 , asserting this
+        # is the case for now.
+        self._assert_no_memoizations()
+
+        if maintain_column_froms:
+            self.select_from.non_generative(  # type: ignore
+                self, *self.columns_clause_froms
+            )
+
+        # then memoize the FROMs etc.
+        _MemoizedSelectEntities._generate_for_statement(self)
+
+        self._raw_columns = [
+            coercions.expect(roles.ColumnsClauseRole, c)
+            for c in coercions._expression_collection_was_a_list(
+                "entities", "Select.with_only_columns", entities
+            )
+        ]
+        return self
+
+    @property
+    def whereclause(self) -> Optional[ColumnElement[Any]]:
+        """Return the completed WHERE clause for this
+        :class:`_expression.Select` statement.
+
+        This assembles the current collection of WHERE criteria
+        into a single :class:`_expression.BooleanClauseList` construct.
+
+
+        .. versionadded:: 1.4
+
+        """
+
+        return BooleanClauseList._construct_for_whereclause(
+            self._where_criteria
+        )
+
+    _whereclause = whereclause
+
+    @_generative
+    def where(self, *whereclause: _ColumnExpressionArgument[bool]) -> Self:
+        """Return a new :func:`_expression.select` construct with
+        the given expression added to
+        its WHERE clause, joined to the existing clause via AND, if any.
+
+        """
+
+        assert isinstance(self._where_criteria, tuple)
+
+        for criterion in whereclause:
+            where_criteria: ColumnElement[Any] = coercions.expect(
+                roles.WhereHavingRole, criterion, apply_propagate_attrs=self
+            )
+            self._where_criteria += (where_criteria,)
+        return self
+
+    @_generative
+    def having(self, *having: _ColumnExpressionArgument[bool]) -> Self:
+        """Return a new :func:`_expression.select` construct with
+        the given expression added to
+        its HAVING clause, joined to the existing clause via AND, if any.
+
+        """
+
+        for criterion in having:
+            having_criteria = coercions.expect(
+                roles.WhereHavingRole, criterion, apply_propagate_attrs=self
+            )
+            self._having_criteria += (having_criteria,)
+        return self
+
+    @_generative
+    def distinct(self, *expr: _ColumnExpressionArgument[Any]) -> Self:
+        r"""Return a new :func:`_expression.select` construct which
+        will apply DISTINCT to the SELECT statement overall.
+
+        E.g.::
+
+            from sqlalchemy import select
+
+            stmt = select(users_table.c.id, users_table.c.name).distinct()
+
+        The above would produce an statement resembling:
+
+        .. sourcecode:: sql
+
+            SELECT DISTINCT user.id, user.name FROM user
+
+        The method also accepts an ``*expr`` parameter which produces the
+        PostgreSQL dialect-specific ``DISTINCT ON`` expression.  Using this
+        parameter on other backends which don't support this syntax will
+        raise an error.
+
+        :param \*expr: optional column expressions.  When present,
+         the PostgreSQL dialect will render a ``DISTINCT ON (<expressions>)``
+         construct.  A deprecation warning and/or :class:`_exc.CompileError`
+         will be raised on other backends.
+
+         .. deprecated:: 1.4 Using \*expr in other dialects is deprecated
+            and will raise :class:`_exc.CompileError` in a future version.
+
+        """
+        if expr:
+            self._distinct = True
+            self._distinct_on = self._distinct_on + tuple(
+                coercions.expect(roles.ByOfRole, e, apply_propagate_attrs=self)
+                for e in expr
+            )
+        else:
+            self._distinct = True
+        return self
+
+    @_generative
+    def select_from(self, *froms: _FromClauseArgument) -> Self:
+        r"""Return a new :func:`_expression.select` construct with the
+        given FROM expression(s)
+        merged into its list of FROM objects.
+
+        E.g.::
+
+            table1 = table("t1", column("a"))
+            table2 = table("t2", column("b"))
+            s = select(table1.c.a).select_from(
+                table1.join(table2, table1.c.a == table2.c.b)
+            )
+
+        The "from" list is a unique set on the identity of each element,
+        so adding an already present :class:`_schema.Table`
+        or other selectable
+        will have no effect.   Passing a :class:`_expression.Join` that refers
+        to an already present :class:`_schema.Table`
+        or other selectable will have
+        the effect of concealing the presence of that selectable as
+        an individual element in the rendered FROM list, instead
+        rendering it into a JOIN clause.
+
+        While the typical purpose of :meth:`_expression.Select.select_from`
+        is to
+        replace the default, derived FROM clause with a join, it can
+        also be called with individual table elements, multiple times
+        if desired, in the case that the FROM clause cannot be fully
+        derived from the columns clause::
+
+            select(func.count("*")).select_from(table1)
+
+        """
+
+        self._from_obj += tuple(
+            coercions.expect(
+                roles.FromClauseRole, fromclause, apply_propagate_attrs=self
+            )
+            for fromclause in froms
+        )
+        return self
+
+    @_generative
+    def correlate(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        r"""Return a new :class:`_expression.Select`
+        which will correlate the given FROM
+        clauses to that of an enclosing :class:`_expression.Select`.
+
+        Calling this method turns off the :class:`_expression.Select` object's
+        default behavior of "auto-correlation".  Normally, FROM elements
+        which appear in a :class:`_expression.Select`
+        that encloses this one via
+        its :term:`WHERE clause`, ORDER BY, HAVING or
+        :term:`columns clause` will be omitted from this
+        :class:`_expression.Select`
+        object's :term:`FROM clause`.
+        Setting an explicit correlation collection using the
+        :meth:`_expression.Select.correlate`
+        method provides a fixed list of FROM objects
+        that can potentially take place in this process.
+
+        When :meth:`_expression.Select.correlate`
+        is used to apply specific FROM clauses
+        for correlation, the FROM elements become candidates for
+        correlation regardless of how deeply nested this
+        :class:`_expression.Select`
+        object is, relative to an enclosing :class:`_expression.Select`
+        which refers to
+        the same FROM object.  This is in contrast to the behavior of
+        "auto-correlation" which only correlates to an immediate enclosing
+        :class:`_expression.Select`.
+        Multi-level correlation ensures that the link
+        between enclosed and enclosing :class:`_expression.Select`
+        is always via
+        at least one WHERE/ORDER BY/HAVING/columns clause in order for
+        correlation to take place.
+
+        If ``None`` is passed, the :class:`_expression.Select`
+        object will correlate
+        none of its FROM entries, and all will render unconditionally
+        in the local FROM clause.
+
+        :param \*fromclauses: one or more :class:`.FromClause` or other
+         FROM-compatible construct such as an ORM mapped entity to become part
+         of the correlate collection; alternatively pass a single value
+         ``None`` to remove all existing correlations.
+
+        .. seealso::
+
+            :meth:`_expression.Select.correlate_except`
+
+            :ref:`tutorial_scalar_subquery`
+
+        """
+
+        # tests failing when we try to change how these
+        # arguments are passed
+
+        self._auto_correlate = False
+        if not fromclauses or fromclauses[0] in {None, False}:
+            if len(fromclauses) > 1:
+                raise exc.ArgumentError(
+                    "additional FROM objects not accepted when "
+                    "passing None/False to correlate()"
+                )
+            self._correlate = ()
+        else:
+            self._correlate = self._correlate + tuple(
+                coercions.expect(roles.FromClauseRole, f) for f in fromclauses
+            )
+        return self
+
+    @_generative
+    def correlate_except(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        r"""Return a new :class:`_expression.Select`
+        which will omit the given FROM
+        clauses from the auto-correlation process.
+
+        Calling :meth:`_expression.Select.correlate_except` turns off the
+        :class:`_expression.Select` object's default behavior of
+        "auto-correlation" for the given FROM elements.  An element
+        specified here will unconditionally appear in the FROM list, while
+        all other FROM elements remain subject to normal auto-correlation
+        behaviors.
+
+        If ``None`` is passed, or no arguments are passed,
+        the :class:`_expression.Select` object will correlate all of its
+        FROM entries.
+
+        :param \*fromclauses: a list of one or more
+         :class:`_expression.FromClause`
+         constructs, or other compatible constructs (i.e. ORM-mapped
+         classes) to become part of the correlate-exception collection.
+
+        .. seealso::
+
+            :meth:`_expression.Select.correlate`
+
+            :ref:`tutorial_scalar_subquery`
+
+        """
+
+        self._auto_correlate = False
+        if not fromclauses or fromclauses[0] in {None, False}:
+            if len(fromclauses) > 1:
+                raise exc.ArgumentError(
+                    "additional FROM objects not accepted when "
+                    "passing None/False to correlate_except()"
+                )
+            self._correlate_except = ()
+        else:
+            self._correlate_except = (self._correlate_except or ()) + tuple(
+                coercions.expect(roles.FromClauseRole, f) for f in fromclauses
+            )
+
+        return self
+
+    @HasMemoized_ro_memoized_attribute
+    def selected_columns(
+        self,
+    ) -> ColumnCollection[str, ColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        representing the columns that
+        this SELECT statement or similar construct returns in its result set,
+        not including :class:`_sql.TextClause` constructs.
+
+        This collection differs from the :attr:`_expression.FromClause.columns`
+        collection of a :class:`_expression.FromClause` in that the columns
+        within this collection cannot be directly nested inside another SELECT
+        statement; a subquery must be applied first which provides for the
+        necessary parenthesization required by SQL.
+
+        For a :func:`_expression.select` construct, the collection here is
+        exactly what would be rendered inside the "SELECT" statement, and the
+        :class:`_expression.ColumnElement` objects are directly present as they
+        were given, e.g.::
+
+            col1 = column("q", Integer)
+            col2 = column("p", Integer)
+            stmt = select(col1, col2)
+
+        Above, ``stmt.selected_columns`` would be a collection that contains
+        the ``col1`` and ``col2`` objects directly. For a statement that is
+        against a :class:`_schema.Table` or other
+        :class:`_expression.FromClause`, the collection will use the
+        :class:`_expression.ColumnElement` objects that are in the
+        :attr:`_expression.FromClause.c` collection of the from element.
+
+        A use case for the :attr:`_sql.Select.selected_columns` collection is
+        to allow the existing columns to be referenced when adding additional
+        criteria, e.g.::
+
+            def filter_on_id(my_select, id):
+                return my_select.where(my_select.selected_columns["id"] == id)
+
+
+            stmt = select(MyModel)
+
+            # adds "WHERE id=:param" to the statement
+            stmt = filter_on_id(stmt, 42)
+
+        .. note::
+
+            The :attr:`_sql.Select.selected_columns` collection does not
+            include expressions established in the columns clause using the
+            :func:`_sql.text` construct; these are silently omitted from the
+            collection. To use plain textual column expressions inside of a
+            :class:`_sql.Select` construct, use the :func:`_sql.literal_column`
+            construct.
+
+
+        .. versionadded:: 1.4
+
+        """
+
+        # compare to SelectState._generate_columns_plus_names, which
+        # generates the actual names used in the SELECT string.  that
+        # method is more complex because it also renders columns that are
+        # fully ambiguous, e.g. same column more than once.
+        conv = cast(
+            "Callable[[Any], str]",
+            SelectState._column_naming_convention(self._label_style),
+        )
+
+        cc: ColumnCollection[str, ColumnElement[Any]] = ColumnCollection(
+            [
+                (conv(c), c)
+                for c in self._all_selected_columns
+                if is_column_element(c)
+            ]
+        )
+        return cc.as_readonly()
+
+    @HasMemoized_ro_memoized_attribute
+    def _all_selected_columns(self) -> _SelectIterable:
+        meth = SelectState.get_plugin_class(self).all_selected_columns
+        return list(meth(self))
+
+    def _ensure_disambiguated_names(self) -> Select[Any]:
+        if self._label_style is LABEL_STYLE_NONE:
+            self = self.set_label_style(LABEL_STYLE_DISAMBIGUATE_ONLY)
+        return self
+
+    def _generate_fromclause_column_proxies(
+        self,
+        subquery: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        *,
+        proxy_compound_columns: Optional[
+            Iterable[Sequence[ColumnElement[Any]]]
+        ] = None,
+    ) -> None:
+        """Generate column proxies to place in the exported ``.c``
+        collection of a subquery."""
+
+        if proxy_compound_columns:
+            extra_col_iterator = proxy_compound_columns
+            prox = [
+                c._make_proxy(
+                    subquery,
+                    key=proxy_key,
+                    name=required_label_name,
+                    name_is_truncatable=True,
+                    compound_select_cols=extra_cols,
+                    primary_key=primary_key,
+                    foreign_keys=foreign_keys,
+                )
+                for (
+                    (
+                        required_label_name,
+                        proxy_key,
+                        fallback_label_name,
+                        c,
+                        repeated,
+                    ),
+                    extra_cols,
+                ) in (
+                    zip(
+                        self._generate_columns_plus_names(False),
+                        extra_col_iterator,
+                    )
+                )
+                if is_column_element(c)
+            ]
+        else:
+            prox = [
+                c._make_proxy(
+                    subquery,
+                    key=proxy_key,
+                    name=required_label_name,
+                    name_is_truncatable=True,
+                    primary_key=primary_key,
+                    foreign_keys=foreign_keys,
+                )
+                for (
+                    required_label_name,
+                    proxy_key,
+                    fallback_label_name,
+                    c,
+                    repeated,
+                ) in (self._generate_columns_plus_names(False))
+                if is_column_element(c)
+            ]
+
+        columns._populate_separate_keys(prox)
+
+    def _needs_parens_for_grouping(self) -> bool:
+        return self._has_row_limiting_clause or bool(
+            self._order_by_clause.clauses
+        )
+
+    def self_group(
+        self, against: Optional[OperatorType] = None
+    ) -> Union[SelectStatementGrouping[Self], Self]:
+        """Return a 'grouping' construct as per the
+        :class:`_expression.ClauseElement` specification.
+
+        This produces an element that can be embedded in an expression. Note
+        that this method is called automatically as needed when constructing
+        expressions and should not require explicit use.
+
+        """
+        if (
+            isinstance(against, CompoundSelect)
+            and not self._needs_parens_for_grouping()
+        ):
+            return self
+        else:
+            return SelectStatementGrouping(self)
+
+    def union(
+        self, *other: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        r"""Return a SQL ``UNION`` of this select() construct against
+        the given selectables provided as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28
+
+            multiple elements are now accepted.
+
+        :param \**kwargs: keyword arguments are forwarded to the constructor
+         for the newly created :class:`_sql.CompoundSelect` object.
+
+        """
+        return CompoundSelect._create_union(self, *other)
+
+    def union_all(
+        self, *other: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        r"""Return a SQL ``UNION ALL`` of this select() construct against
+        the given selectables provided as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28
+
+            multiple elements are now accepted.
+
+        :param \**kwargs: keyword arguments are forwarded to the constructor
+         for the newly created :class:`_sql.CompoundSelect` object.
+
+        """
+        return CompoundSelect._create_union_all(self, *other)
+
+    def except_(
+        self, *other: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        r"""Return a SQL ``EXCEPT`` of this select() construct against
+        the given selectable provided as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28
+
+            multiple elements are now accepted.
+
+        """
+        return CompoundSelect._create_except(self, *other)
+
+    def except_all(
+        self, *other: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        r"""Return a SQL ``EXCEPT ALL`` of this select() construct against
+        the given selectables provided as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28
+
+            multiple elements are now accepted.
+
+        """
+        return CompoundSelect._create_except_all(self, *other)
+
+    def intersect(
+        self, *other: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        r"""Return a SQL ``INTERSECT`` of this select() construct against
+        the given selectables provided as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28
+
+            multiple elements are now accepted.
+
+        :param \**kwargs: keyword arguments are forwarded to the constructor
+         for the newly created :class:`_sql.CompoundSelect` object.
+
+        """
+        return CompoundSelect._create_intersect(self, *other)
+
+    def intersect_all(
+        self, *other: _SelectStatementForCompoundArgument[_TP]
+    ) -> CompoundSelect[_TP]:
+        r"""Return a SQL ``INTERSECT ALL`` of this select() construct
+        against the given selectables provided as positional arguments.
+
+        :param \*other: one or more elements with which to create a
+         UNION.
+
+         .. versionchanged:: 1.4.28
+
+            multiple elements are now accepted.
+
+        :param \**kwargs: keyword arguments are forwarded to the constructor
+         for the newly created :class:`_sql.CompoundSelect` object.
+
+        """
+        return CompoundSelect._create_intersect_all(self, *other)
+
+
+class ScalarSelect(
+    roles.InElementRole, Generative, GroupedElement, ColumnElement[_T]
+):
+    """Represent a scalar subquery.
+
+
+    A :class:`_sql.ScalarSelect` is created by invoking the
+    :meth:`_sql.SelectBase.scalar_subquery` method.   The object
+    then participates in other SQL expressions as a SQL column expression
+    within the :class:`_sql.ColumnElement` hierarchy.
+
+    .. seealso::
+
+        :meth:`_sql.SelectBase.scalar_subquery`
+
+        :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+    """
+
+    _traverse_internals: _TraverseInternalsType = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("type", InternalTraversal.dp_type),
+    ]
+
+    _from_objects: List[FromClause] = []
+    _is_from_container = True
+    if not TYPE_CHECKING:
+        _is_implicitly_boolean = False
+    inherit_cache = True
+
+    element: SelectBase
+
+    def __init__(self, element: SelectBase) -> None:
+        self.element = element
+        self.type = element._scalar_type()
+        self._propagate_attrs = element._propagate_attrs
+
+    def __getattr__(self, attr: str) -> Any:
+        return getattr(self.element, attr)
+
+    def __getstate__(self) -> Dict[str, Any]:
+        return {"element": self.element, "type": self.type}
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        self.element = state["element"]
+        self.type = state["type"]
+
+    @property
+    def columns(self) -> NoReturn:
+        raise exc.InvalidRequestError(
+            "Scalar Select expression has no "
+            "columns; use this object directly "
+            "within a column-level expression."
+        )
+
+    c = columns
+
+    @_generative
+    def where(self, crit: _ColumnExpressionArgument[bool]) -> Self:
+        """Apply a WHERE clause to the SELECT statement referred to
+        by this :class:`_expression.ScalarSelect`.
+
+        """
+        self.element = cast("Select[Any]", self.element).where(crit)
+        return self
+
+    def self_group(self, against: Optional[OperatorType] = None) -> Self:
+        return self
+
+    if TYPE_CHECKING:
+
+        def _ungroup(self) -> Select[Any]: ...
+
+    @_generative
+    def correlate(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        r"""Return a new :class:`_expression.ScalarSelect`
+        which will correlate the given FROM
+        clauses to that of an enclosing :class:`_expression.Select`.
+
+        This method is mirrored from the :meth:`_sql.Select.correlate` method
+        of the underlying :class:`_sql.Select`.  The method applies the
+        :meth:_sql.Select.correlate` method, then returns a new
+        :class:`_sql.ScalarSelect` against that statement.
+
+        .. versionadded:: 1.4 Previously, the
+           :meth:`_sql.ScalarSelect.correlate`
+           method was only available from :class:`_sql.Select`.
+
+        :param \*fromclauses: a list of one or more
+         :class:`_expression.FromClause`
+         constructs, or other compatible constructs (i.e. ORM-mapped
+         classes) to become part of the correlate collection.
+
+        .. seealso::
+
+            :meth:`_expression.ScalarSelect.correlate_except`
+
+            :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+
+        """
+        self.element = cast("Select[Any]", self.element).correlate(
+            *fromclauses
+        )
+        return self
+
+    @_generative
+    def correlate_except(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        r"""Return a new :class:`_expression.ScalarSelect`
+        which will omit the given FROM
+        clauses from the auto-correlation process.
+
+        This method is mirrored from the
+        :meth:`_sql.Select.correlate_except` method of the underlying
+        :class:`_sql.Select`.  The method applies the
+        :meth:_sql.Select.correlate_except` method, then returns a new
+        :class:`_sql.ScalarSelect` against that statement.
+
+        .. versionadded:: 1.4 Previously, the
+           :meth:`_sql.ScalarSelect.correlate_except`
+           method was only available from :class:`_sql.Select`.
+
+        :param \*fromclauses: a list of one or more
+         :class:`_expression.FromClause`
+         constructs, or other compatible constructs (i.e. ORM-mapped
+         classes) to become part of the correlate-exception collection.
+
+        .. seealso::
+
+            :meth:`_expression.ScalarSelect.correlate`
+
+            :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+
+        """
+
+        self.element = cast("Select[Any]", self.element).correlate_except(
+            *fromclauses
+        )
+        return self
+
+
+class Exists(UnaryExpression[bool]):
+    """Represent an ``EXISTS`` clause.
+
+    See :func:`_sql.exists` for a description of usage.
+
+    An ``EXISTS`` clause can also be constructed from a :func:`_sql.select`
+    instance by calling :meth:`_sql.SelectBase.exists`.
+
+    """
+
+    inherit_cache = True
+    element: Union[SelectStatementGrouping[Select[Any]], ScalarSelect[Any]]
+
+    def __init__(
+        self,
+        __argument: Optional[
+            Union[_ColumnsClauseArgument[Any], SelectBase, ScalarSelect[Any]]
+        ] = None,
+    ):
+        s: ScalarSelect[Any]
+
+        # TODO: this seems like we should be using coercions for this
+        if __argument is None:
+            s = Select(literal_column("*")).scalar_subquery()
+        elif isinstance(__argument, SelectBase):
+            s = __argument.scalar_subquery()
+            s._propagate_attrs = __argument._propagate_attrs
+        elif isinstance(__argument, ScalarSelect):
+            s = __argument
+        else:
+            s = Select(__argument).scalar_subquery()
+
+        UnaryExpression.__init__(
+            self,
+            s,
+            operator=operators.exists,
+            type_=type_api.BOOLEANTYPE,
+            wraps_column_expression=True,
+        )
+
+    @util.ro_non_memoized_property
+    def _from_objects(self) -> List[FromClause]:
+        return []
+
+    def _regroup(
+        self, fn: Callable[[Select[Any]], Select[Any]]
+    ) -> SelectStatementGrouping[Select[Any]]:
+        element = self.element._ungroup()
+        new_element = fn(element)
+
+        return_value = new_element.self_group(against=operators.exists)
+        assert isinstance(return_value, SelectStatementGrouping)
+        return return_value
+
+    def select(self) -> Select[Tuple[bool]]:
+        r"""Return a SELECT of this :class:`_expression.Exists`.
+
+        e.g.::
+
+            stmt = exists(some_table.c.id).where(some_table.c.id == 5).select()
+
+        This will produce a statement resembling:
+
+        .. sourcecode:: sql
+
+            SELECT EXISTS (SELECT id FROM some_table WHERE some_table = :param) AS anon_1
+
+        .. seealso::
+
+            :func:`_expression.select` - general purpose
+            method which allows for arbitrary column lists.
+
+        """  # noqa
+
+        return Select(self)
+
+    def correlate(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        """Apply correlation to the subquery noted by this
+        :class:`_sql.Exists`.
+
+        .. seealso::
+
+            :meth:`_sql.ScalarSelect.correlate`
+
+        """
+        e = self._clone()
+        e.element = self._regroup(
+            lambda element: element.correlate(*fromclauses)
+        )
+        return e
+
+    def correlate_except(
+        self,
+        *fromclauses: Union[Literal[None, False], _FromClauseArgument],
+    ) -> Self:
+        """Apply correlation to the subquery noted by this
+        :class:`_sql.Exists`.
+
+        .. seealso::
+
+            :meth:`_sql.ScalarSelect.correlate_except`
+
+        """
+
+        e = self._clone()
+        e.element = self._regroup(
+            lambda element: element.correlate_except(*fromclauses)
+        )
+        return e
+
+    def select_from(self, *froms: _FromClauseArgument) -> Self:
+        """Return a new :class:`_expression.Exists` construct,
+        applying the given
+        expression to the :meth:`_expression.Select.select_from`
+        method of the select
+        statement contained.
+
+        .. note:: it is typically preferable to build a :class:`_sql.Select`
+           statement first, including the desired WHERE clause, then use the
+           :meth:`_sql.SelectBase.exists` method to produce an
+           :class:`_sql.Exists` object at once.
+
+        """
+        e = self._clone()
+        e.element = self._regroup(lambda element: element.select_from(*froms))
+        return e
+
+    def where(self, *clause: _ColumnExpressionArgument[bool]) -> Self:
+        """Return a new :func:`_expression.exists` construct with the
+        given expression added to
+        its WHERE clause, joined to the existing clause via AND, if any.
+
+
+        .. note:: it is typically preferable to build a :class:`_sql.Select`
+           statement first, including the desired WHERE clause, then use the
+           :meth:`_sql.SelectBase.exists` method to produce an
+           :class:`_sql.Exists` object at once.
+
+        """
+        e = self._clone()
+        e.element = self._regroup(lambda element: element.where(*clause))
+        return e
+
+
+class TextualSelect(SelectBase, ExecutableReturnsRows, Generative):
+    """Wrap a :class:`_expression.TextClause` construct within a
+    :class:`_expression.SelectBase`
+    interface.
+
+    This allows the :class:`_expression.TextClause` object to gain a
+    ``.c`` collection
+    and other FROM-like capabilities such as
+    :meth:`_expression.FromClause.alias`,
+    :meth:`_expression.SelectBase.cte`, etc.
+
+    The :class:`_expression.TextualSelect` construct is produced via the
+    :meth:`_expression.TextClause.columns`
+    method - see that method for details.
+
+    .. versionchanged:: 1.4 the :class:`_expression.TextualSelect`
+       class was renamed
+       from ``TextAsFrom``, to more correctly suit its role as a
+       SELECT-oriented object and not a FROM clause.
+
+    .. seealso::
+
+        :func:`_expression.text`
+
+        :meth:`_expression.TextClause.columns` - primary creation interface.
+
+    """
+
+    __visit_name__ = "textual_select"
+
+    _label_style = LABEL_STYLE_NONE
+
+    _traverse_internals: _TraverseInternalsType = (
+        [
+            ("element", InternalTraversal.dp_clauseelement),
+            ("column_args", InternalTraversal.dp_clauseelement_list),
+        ]
+        + SupportsCloneAnnotations._clone_annotations_traverse_internals
+        + HasCTE._has_ctes_traverse_internals
+    )
+
+    _is_textual = True
+
+    is_text = True
+    is_select = True
+
+    def __init__(
+        self,
+        text: TextClause,
+        columns: List[_ColumnExpressionArgument[Any]],
+        positional: bool = False,
+    ) -> None:
+        self._init(
+            text,
+            # convert for ORM attributes->columns, etc
+            [
+                coercions.expect(roles.LabeledColumnExprRole, c)
+                for c in columns
+            ],
+            positional,
+        )
+
+    def _init(
+        self,
+        text: TextClause,
+        columns: List[NamedColumn[Any]],
+        positional: bool = False,
+    ) -> None:
+        self.element = text
+        self.column_args = columns
+        self.positional = positional
+
+    @HasMemoized_ro_memoized_attribute
+    def selected_columns(
+        self,
+    ) -> ColumnCollection[str, KeyedColumnElement[Any]]:
+        """A :class:`_expression.ColumnCollection`
+        representing the columns that
+        this SELECT statement or similar construct returns in its result set,
+        not including :class:`_sql.TextClause` constructs.
+
+        This collection differs from the :attr:`_expression.FromClause.columns`
+        collection of a :class:`_expression.FromClause` in that the columns
+        within this collection cannot be directly nested inside another SELECT
+        statement; a subquery must be applied first which provides for the
+        necessary parenthesization required by SQL.
+
+        For a :class:`_expression.TextualSelect` construct, the collection
+        contains the :class:`_expression.ColumnElement` objects that were
+        passed to the constructor, typically via the
+        :meth:`_expression.TextClause.columns` method.
+
+
+        .. versionadded:: 1.4
+
+        """
+        return ColumnCollection(
+            (c.key, c) for c in self.column_args
+        ).as_readonly()
+
+    @util.ro_non_memoized_property
+    def _all_selected_columns(self) -> _SelectIterable:
+        return self.column_args
+
+    def set_label_style(self, style: SelectLabelStyle) -> TextualSelect:
+        return self
+
+    def _ensure_disambiguated_names(self) -> TextualSelect:
+        return self
+
+    @_generative
+    def bindparams(
+        self,
+        *binds: BindParameter[Any],
+        **bind_as_values: Any,
+    ) -> Self:
+        self.element = self.element.bindparams(*binds, **bind_as_values)
+        return self
+
+    def _generate_fromclause_column_proxies(
+        self,
+        fromclause: FromClause,
+        columns: ColumnCollection[str, KeyedColumnElement[Any]],
+        primary_key: ColumnSet,
+        foreign_keys: Set[KeyedColumnElement[Any]],
+        *,
+        proxy_compound_columns: Optional[
+            Iterable[Sequence[ColumnElement[Any]]]
+        ] = None,
+    ) -> None:
+        if TYPE_CHECKING:
+            assert isinstance(fromclause, Subquery)
+
+        if proxy_compound_columns:
+            columns._populate_separate_keys(
+                c._make_proxy(
+                    fromclause,
+                    compound_select_cols=extra_cols,
+                    primary_key=primary_key,
+                    foreign_keys=foreign_keys,
+                )
+                for c, extra_cols in zip(
+                    self.column_args, proxy_compound_columns
+                )
+            )
+        else:
+            columns._populate_separate_keys(
+                c._make_proxy(
+                    fromclause,
+                    primary_key=primary_key,
+                    foreign_keys=foreign_keys,
+                )
+                for c in self.column_args
+            )
+
+    def _scalar_type(self) -> Union[TypeEngine[Any], Any]:
+        return self.column_args[0].type
+
+
+TextAsFrom = TextualSelect
+"""Backwards compatibility with the previous name"""
+
+
+class AnnotatedFromClause(Annotated):
+    def _copy_internals(self, **kw: Any) -> None:
+        super()._copy_internals(**kw)
+        if kw.get("ind_cols_on_fromclause", False):
+            ee = self._Annotated__element  # type: ignore
+
+            self.c = ee.__class__.c.fget(self)  # type: ignore
+
+    @util.ro_memoized_property
+    def c(self) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        """proxy the .c collection of the underlying FromClause.
+
+        Originally implemented in 2008 as a simple load of the .c collection
+        when the annotated construct was created (see d3621ae961a), in modern
+        SQLAlchemy versions this can be expensive for statements constructed
+        with ORM aliases.   So for #8796 SQLAlchemy 2.0 we instead proxy
+        it, which works just as well.
+
+        Two different use cases seem to require the collection either copied
+        from the underlying one, or unique to this AnnotatedFromClause.
+
+        See test_selectable->test_annotated_corresponding_column
+
+        """
+        ee = self._Annotated__element  # type: ignore
+        return ee.c  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/sqltypes.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/sqltypes.py
new file mode 100644
index 00000000..ad220356
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/sqltypes.py
@@ -0,0 +1,3844 @@
+# sql/sqltypes.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""SQL specific types.
+
+"""
+from __future__ import annotations
+
+import collections.abc as collections_abc
+import datetime as dt
+import decimal
+import enum
+import json
+import pickle
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+from uuid import UUID as _python_UUID
+
+from . import coercions
+from . import elements
+from . import operators
+from . import roles
+from . import type_api
+from .base import _NONE_NAME
+from .base import NO_ARG
+from .base import SchemaEventTarget
+from .cache_key import HasCacheKey
+from .elements import quoted_name
+from .elements import Slice
+from .elements import TypeCoerce as type_coerce  # noqa
+from .type_api import Emulated
+from .type_api import NativeForEmulated  # noqa
+from .type_api import to_instance as to_instance
+from .type_api import TypeDecorator as TypeDecorator
+from .type_api import TypeEngine as TypeEngine
+from .type_api import TypeEngineMixin
+from .type_api import Variant  # noqa
+from .visitors import InternalTraversal
+from .. import event
+from .. import exc
+from .. import inspection
+from .. import util
+from ..engine import processors
+from ..util import langhelpers
+from ..util import OrderedDict
+from ..util import warn_deprecated
+from ..util.typing import get_args
+from ..util.typing import is_literal
+from ..util.typing import is_pep695
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from ._typing import _ColumnExpressionArgument
+    from ._typing import _TypeEngineArgument
+    from .operators import OperatorType
+    from .schema import MetaData
+    from .type_api import _BindProcessorType
+    from .type_api import _ComparatorFactory
+    from .type_api import _LiteralProcessorType
+    from .type_api import _MatchedOnType
+    from .type_api import _ResultProcessorType
+    from ..engine.interfaces import Dialect
+
+_T = TypeVar("_T", bound="Any")
+_CT = TypeVar("_CT", bound=Any)
+_TE = TypeVar("_TE", bound="TypeEngine[Any]")
+
+
+class HasExpressionLookup(TypeEngineMixin):
+    """Mixin expression adaptations based on lookup tables.
+
+    These rules are currently used by the numeric, integer and date types
+    which have detailed cross-expression coercion rules.
+
+    """
+
+    @property
+    def _expression_adaptations(self):
+        raise NotImplementedError()
+
+    class Comparator(TypeEngine.Comparator[_CT]):
+        __slots__ = ()
+
+        _blank_dict = util.EMPTY_DICT
+
+        def _adapt_expression(
+            self,
+            op: OperatorType,
+            other_comparator: TypeEngine.Comparator[Any],
+        ) -> Tuple[OperatorType, TypeEngine[Any]]:
+            othertype = other_comparator.type._type_affinity
+            if TYPE_CHECKING:
+                assert isinstance(self.type, HasExpressionLookup)
+            lookup = self.type._expression_adaptations.get(
+                op, self._blank_dict
+            ).get(othertype, self.type)
+            if lookup is othertype:
+                return (op, other_comparator.type)
+            elif lookup is self.type._type_affinity:
+                return (op, self.type)
+            else:
+                return (op, to_instance(lookup))
+
+    comparator_factory: _ComparatorFactory[Any] = Comparator
+
+
+class Concatenable(TypeEngineMixin):
+    """A mixin that marks a type as supporting 'concatenation',
+    typically strings."""
+
+    class Comparator(TypeEngine.Comparator[_T]):
+        __slots__ = ()
+
+        def _adapt_expression(
+            self,
+            op: OperatorType,
+            other_comparator: TypeEngine.Comparator[Any],
+        ) -> Tuple[OperatorType, TypeEngine[Any]]:
+            if op is operators.add and isinstance(
+                other_comparator,
+                (Concatenable.Comparator, NullType.Comparator),
+            ):
+                return operators.concat_op, self.expr.type
+            else:
+                return super()._adapt_expression(op, other_comparator)
+
+    comparator_factory: _ComparatorFactory[Any] = Comparator
+
+
+class Indexable(TypeEngineMixin):
+    """A mixin that marks a type as supporting indexing operations,
+    such as array or JSON structures.
+
+    """
+
+    class Comparator(TypeEngine.Comparator[_T]):
+        __slots__ = ()
+
+        def _setup_getitem(self, index):
+            raise NotImplementedError()
+
+        def __getitem__(self, index):
+            (
+                adjusted_op,
+                adjusted_right_expr,
+                result_type,
+            ) = self._setup_getitem(index)
+            return self.operate(
+                adjusted_op, adjusted_right_expr, result_type=result_type
+            )
+
+    comparator_factory: _ComparatorFactory[Any] = Comparator
+
+
+class String(Concatenable, TypeEngine[str]):
+    """The base for all string and character types.
+
+    In SQL, corresponds to VARCHAR.
+
+    The `length` field is usually required when the `String` type is
+    used within a CREATE TABLE statement, as VARCHAR requires a length
+    on most databases.
+
+    """
+
+    __visit_name__ = "string"
+
+    def __init__(
+        self,
+        length: Optional[int] = None,
+        collation: Optional[str] = None,
+    ):
+        """
+        Create a string-holding type.
+
+        :param length: optional, a length for the column for use in
+          DDL and CAST expressions.  May be safely omitted if no ``CREATE
+          TABLE`` will be issued.  Certain databases may require a
+          ``length`` for use in DDL, and will raise an exception when
+          the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
+          with no length is included.  Whether the value is
+          interpreted as bytes or characters is database specific.
+
+        :param collation: Optional, a column-level collation for
+          use in DDL and CAST expressions.  Renders using the
+          COLLATE keyword supported by SQLite, MySQL, and PostgreSQL.
+          E.g.:
+
+          .. sourcecode:: pycon+sql
+
+            >>> from sqlalchemy import cast, select, String
+            >>> print(select(cast("some string", String(collation="utf8"))))
+            {printsql}SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
+
+          .. note::
+
+            In most cases, the :class:`.Unicode` or :class:`.UnicodeText`
+            datatypes should be used for a :class:`_schema.Column` that expects
+            to store non-ascii data. These datatypes will ensure that the
+            correct types are used on the database.
+
+        """
+
+        self.length = length
+        self.collation = collation
+
+    def _with_collation(self, collation):
+        new_type = self.copy()
+        new_type.collation = collation
+        return new_type
+
+    def _resolve_for_literal(self, value):
+        # I was SO PROUD of my regex trick, but we dont need it.
+        # re.search(r"[^\u0000-\u007F]", value)
+
+        if value.isascii():
+            return _STRING
+        else:
+            return _UNICODE
+
+    def literal_processor(self, dialect):
+        def process(value):
+            value = value.replace("'", "''")
+
+            if dialect.identifier_preparer._double_percents:
+                value = value.replace("%", "%%")
+
+            return "'%s'" % value
+
+        return process
+
+    def bind_processor(self, dialect):
+        return None
+
+    def result_processor(self, dialect, coltype):
+        return None
+
+    @property
+    def python_type(self):
+        return str
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.STRING
+
+
+class Text(String):
+    """A variably sized string type.
+
+    In SQL, usually corresponds to CLOB or TEXT.  In general, TEXT objects
+    do not have a length; while some databases will accept a length
+    argument here, it will be rejected by others.
+
+    """
+
+    __visit_name__ = "text"
+
+
+class Unicode(String):
+    """A variable length Unicode string type.
+
+    The :class:`.Unicode` type is a :class:`.String` subclass that assumes
+    input and output strings that may contain non-ASCII characters, and for
+    some backends implies an underlying column type that is explicitly
+    supporting of non-ASCII data, such as ``NVARCHAR`` on Oracle Database and
+    SQL Server.  This will impact the output of ``CREATE TABLE`` statements and
+    ``CAST`` functions at the dialect level.
+
+    The character encoding used by the :class:`.Unicode` type that is used to
+    transmit and receive data to the database is usually determined by the
+    DBAPI itself. All modern DBAPIs accommodate non-ASCII strings but may have
+    different methods of managing database encodings; if necessary, this
+    encoding should be configured as detailed in the notes for the target DBAPI
+    in the :ref:`dialect_toplevel` section.
+
+    In modern SQLAlchemy, use of the :class:`.Unicode` datatype does not
+    imply any encoding/decoding behavior within SQLAlchemy itself.  In Python
+    3, all string objects are inherently Unicode capable, and SQLAlchemy
+    does not produce bytestring objects nor does it accommodate a DBAPI that
+    does not return Python Unicode objects in result sets for string values.
+
+    .. warning:: Some database backends, particularly SQL Server with pyodbc,
+       are known to have undesirable behaviors regarding data that is noted
+       as being of ``NVARCHAR`` type as opposed to ``VARCHAR``, including
+       datatype mismatch errors and non-use of indexes.  See the section
+       on :meth:`.DialectEvents.do_setinputsizes` for background on working
+       around unicode character issues for backends like SQL Server with
+       pyodbc as well as cx_Oracle.
+
+    .. seealso::
+
+        :class:`.UnicodeText` - unlengthed textual counterpart
+        to :class:`.Unicode`.
+
+        :meth:`.DialectEvents.do_setinputsizes`
+
+    """
+
+    __visit_name__ = "unicode"
+
+
+class UnicodeText(Text):
+    """An unbounded-length Unicode string type.
+
+    See :class:`.Unicode` for details on the unicode
+    behavior of this object.
+
+    Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
+    unicode-capable type being used on the backend, such as
+    ``NCLOB``, ``NTEXT``.
+
+    """
+
+    __visit_name__ = "unicode_text"
+
+
+class Integer(HasExpressionLookup, TypeEngine[int]):
+    """A type for ``int`` integers."""
+
+    __visit_name__ = "integer"
+
+    if TYPE_CHECKING:
+
+        @util.ro_memoized_property
+        def _type_affinity(self) -> Type[Integer]: ...
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.NUMBER
+
+    @property
+    def python_type(self):
+        return int
+
+    def _resolve_for_literal(self, value):
+        if value.bit_length() >= 32:
+            return _BIGINTEGER
+        else:
+            return self
+
+    def literal_processor(self, dialect):
+        def process(value):
+            return str(int(value))
+
+        return process
+
+    @util.memoized_property
+    def _expression_adaptations(self):
+        return {
+            operators.add: {
+                Date: Date,
+                Integer: self.__class__,
+                Numeric: Numeric,
+            },
+            operators.mul: {
+                Interval: Interval,
+                Integer: self.__class__,
+                Numeric: Numeric,
+            },
+            operators.truediv: {Integer: Numeric, Numeric: Numeric},
+            operators.floordiv: {Integer: self.__class__, Numeric: Numeric},
+            operators.sub: {Integer: self.__class__, Numeric: Numeric},
+        }
+
+
+class SmallInteger(Integer):
+    """A type for smaller ``int`` integers.
+
+    Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
+    a normal :class:`.Integer` on the Python side.
+
+    """
+
+    __visit_name__ = "small_integer"
+
+
+class BigInteger(Integer):
+    """A type for bigger ``int`` integers.
+
+    Typically generates a ``BIGINT`` in DDL, and otherwise acts like
+    a normal :class:`.Integer` on the Python side.
+
+    """
+
+    __visit_name__ = "big_integer"
+
+
+_N = TypeVar("_N", bound=Union[decimal.Decimal, float])
+
+
+class Numeric(HasExpressionLookup, TypeEngine[_N]):
+    """Base for non-integer numeric types, such as
+    ``NUMERIC``, ``FLOAT``, ``DECIMAL``, and other variants.
+
+    The :class:`.Numeric` datatype when used directly will render DDL
+    corresponding to precision numerics if available, such as
+    ``NUMERIC(precision, scale)``.  The :class:`.Float` subclass will
+    attempt to render a floating-point datatype such as ``FLOAT(precision)``.
+
+    :class:`.Numeric` returns Python ``decimal.Decimal`` objects by default,
+    based on the default value of ``True`` for the
+    :paramref:`.Numeric.asdecimal` parameter.  If this parameter is set to
+    False, returned values are coerced to Python ``float`` objects.
+
+    The :class:`.Float` subtype, being more specific to floating point,
+    defaults the :paramref:`.Float.asdecimal` flag to False so that the
+    default Python datatype is ``float``.
+
+    .. note::
+
+        When using a :class:`.Numeric` datatype against a database type that
+        returns Python floating point values to the driver, the accuracy of the
+        decimal conversion indicated by :paramref:`.Numeric.asdecimal` may be
+        limited.   The behavior of specific numeric/floating point datatypes
+        is a product of the SQL datatype in use, the Python :term:`DBAPI`
+        in use, as well as strategies that may be present within
+        the SQLAlchemy dialect in use.   Users requiring specific precision/
+        scale are encouraged to experiment with the available datatypes
+        in order to determine the best results.
+
+    """
+
+    __visit_name__ = "numeric"
+
+    if TYPE_CHECKING:
+
+        @util.ro_memoized_property
+        def _type_affinity(self) -> Type[Numeric[_N]]: ...
+
+    _default_decimal_return_scale = 10
+
+    @overload
+    def __init__(
+        self: Numeric[decimal.Decimal],
+        precision: Optional[int] = ...,
+        scale: Optional[int] = ...,
+        decimal_return_scale: Optional[int] = ...,
+        asdecimal: Literal[True] = ...,
+    ): ...
+
+    @overload
+    def __init__(
+        self: Numeric[float],
+        precision: Optional[int] = ...,
+        scale: Optional[int] = ...,
+        decimal_return_scale: Optional[int] = ...,
+        asdecimal: Literal[False] = ...,
+    ): ...
+
+    def __init__(
+        self,
+        precision: Optional[int] = None,
+        scale: Optional[int] = None,
+        decimal_return_scale: Optional[int] = None,
+        asdecimal: bool = True,
+    ):
+        """
+        Construct a Numeric.
+
+        :param precision: the numeric precision for use in DDL ``CREATE
+          TABLE``.
+
+        :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
+
+        :param asdecimal: default True.  Return whether or not
+          values should be sent as Python Decimal objects, or
+          as floats.   Different DBAPIs send one or the other based on
+          datatypes - the Numeric type will ensure that return values
+          are one or the other across DBAPIs consistently.
+
+        :param decimal_return_scale: Default scale to use when converting
+         from floats to Python decimals.  Floating point values will typically
+         be much longer due to decimal inaccuracy, and most floating point
+         database types don't have a notion of "scale", so by default the
+         float type looks for the first ten decimal places when converting.
+         Specifying this value will override that length.  Types which
+         do include an explicit ".scale" value, such as the base
+         :class:`.Numeric` as well as the MySQL float types, will use the
+         value of ".scale" as the default for decimal_return_scale, if not
+         otherwise specified.
+
+        When using the ``Numeric`` type, care should be taken to ensure
+        that the asdecimal setting is appropriate for the DBAPI in use -
+        when Numeric applies a conversion from Decimal->float or float->
+        Decimal, this conversion incurs an additional performance overhead
+        for all result columns received.
+
+        DBAPIs that return Decimal natively (e.g. psycopg2) will have
+        better accuracy and higher performance with a setting of ``True``,
+        as the native translation to Decimal reduces the amount of floating-
+        point issues at play, and the Numeric type itself doesn't need
+        to apply any further conversions.  However, another DBAPI which
+        returns floats natively *will* incur an additional conversion
+        overhead, and is still subject to floating point data loss - in
+        which case ``asdecimal=False`` will at least remove the extra
+        conversion overhead.
+
+        """
+        self.precision = precision
+        self.scale = scale
+        self.decimal_return_scale = decimal_return_scale
+        self.asdecimal = asdecimal
+
+    @property
+    def _effective_decimal_return_scale(self):
+        if self.decimal_return_scale is not None:
+            return self.decimal_return_scale
+        elif getattr(self, "scale", None) is not None:
+            return self.scale
+        else:
+            return self._default_decimal_return_scale
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.NUMBER
+
+    def literal_processor(self, dialect):
+        def process(value):
+            return str(value)
+
+        return process
+
+    @property
+    def python_type(self):
+        if self.asdecimal:
+            return decimal.Decimal
+        else:
+            return float
+
+    def bind_processor(self, dialect):
+        if dialect.supports_native_decimal:
+            return None
+        else:
+            return processors.to_float
+
+    def result_processor(self, dialect, coltype):
+        if self.asdecimal:
+            if dialect.supports_native_decimal:
+                # we're a "numeric", DBAPI will give us Decimal directly
+                return None
+            else:
+                # we're a "numeric", DBAPI returns floats, convert.
+                return processors.to_decimal_processor_factory(
+                    decimal.Decimal,
+                    (
+                        self.scale
+                        if self.scale is not None
+                        else self._default_decimal_return_scale
+                    ),
+                )
+        else:
+            if dialect.supports_native_decimal:
+                return processors.to_float
+            else:
+                return None
+
+    @util.memoized_property
+    def _expression_adaptations(self):
+        return {
+            operators.mul: {
+                Interval: Interval,
+                Numeric: self.__class__,
+                Integer: self.__class__,
+            },
+            operators.truediv: {
+                Numeric: self.__class__,
+                Integer: self.__class__,
+            },
+            operators.add: {Numeric: self.__class__, Integer: self.__class__},
+            operators.sub: {Numeric: self.__class__, Integer: self.__class__},
+        }
+
+
+class Float(Numeric[_N]):
+    """Type representing floating point types, such as ``FLOAT`` or ``REAL``.
+
+    This type returns Python ``float`` objects by default, unless the
+    :paramref:`.Float.asdecimal` flag is set to ``True``, in which case they
+    are coerced to ``decimal.Decimal`` objects.
+
+    When a :paramref:`.Float.precision` is not provided in a
+    :class:`_types.Float` type some backend may compile this type as
+    an 8 bytes / 64 bit float datatype. To use a 4 bytes / 32 bit float
+    datatype a precision <= 24 can usually be provided or the
+    :class:`_types.REAL` type can be used.
+    This is known to be the case in the PostgreSQL and MSSQL dialects
+    that render the type as ``FLOAT`` that's in both an alias of
+    ``DOUBLE PRECISION``. Other third party dialects may have similar
+    behavior.
+    """
+
+    __visit_name__ = "float"
+
+    scale = None
+
+    @overload
+    def __init__(
+        self: Float[float],
+        precision: Optional[int] = ...,
+        asdecimal: Literal[False] = ...,
+        decimal_return_scale: Optional[int] = ...,
+    ): ...
+
+    @overload
+    def __init__(
+        self: Float[decimal.Decimal],
+        precision: Optional[int] = ...,
+        asdecimal: Literal[True] = ...,
+        decimal_return_scale: Optional[int] = ...,
+    ): ...
+
+    def __init__(
+        self: Float[_N],
+        precision: Optional[int] = None,
+        asdecimal: bool = False,
+        decimal_return_scale: Optional[int] = None,
+    ):
+        r"""
+        Construct a Float.
+
+        :param precision: the numeric precision for use in DDL ``CREATE
+           TABLE``. Backends **should** attempt to ensure this precision
+           indicates a number of digits for the generic
+           :class:`_sqltypes.Float` datatype.
+
+           .. note:: For the Oracle Database backend, the
+              :paramref:`_sqltypes.Float.precision` parameter is not accepted
+              when rendering DDL, as Oracle Database does not support float precision
+              specified as a number of decimal places. Instead, use the
+              Oracle Database-specific :class:`_oracle.FLOAT` datatype and specify the
+              :paramref:`_oracle.FLOAT.binary_precision` parameter. This is new
+              in version 2.0 of SQLAlchemy.
+
+              To create a database agnostic :class:`_types.Float` that
+              separately specifies binary precision for Oracle Database, use
+              :meth:`_types.TypeEngine.with_variant` as follows::
+
+                    from sqlalchemy import Column
+                    from sqlalchemy import Float
+                    from sqlalchemy.dialects import oracle
+
+                    Column(
+                        "float_data",
+                        Float(5).with_variant(oracle.FLOAT(binary_precision=16), "oracle"),
+                    )
+
+        :param asdecimal: the same flag as that of :class:`.Numeric`, but
+          defaults to ``False``.   Note that setting this flag to ``True``
+          results in floating point conversion.
+
+        :param decimal_return_scale: Default scale to use when converting
+         from floats to Python decimals.  Floating point values will typically
+         be much longer due to decimal inaccuracy, and most floating point
+         database types don't have a notion of "scale", so by default the
+         float type looks for the first ten decimal places when converting.
+         Specifying this value will override that length.  Note that the
+         MySQL float types, which do include "scale", will use "scale"
+         as the default for decimal_return_scale, if not otherwise specified.
+
+        """  # noqa: E501
+        self.precision = precision
+        self.asdecimal = asdecimal
+        self.decimal_return_scale = decimal_return_scale
+
+    def result_processor(self, dialect, coltype):
+        if self.asdecimal:
+            return processors.to_decimal_processor_factory(
+                decimal.Decimal, self._effective_decimal_return_scale
+            )
+        elif dialect.supports_native_decimal:
+            return processors.to_float
+        else:
+            return None
+
+
+class Double(Float[_N]):
+    """A type for double ``FLOAT`` floating point types.
+
+    Typically generates a ``DOUBLE`` or ``DOUBLE_PRECISION`` in DDL,
+    and otherwise acts like a normal :class:`.Float` on the Python
+    side.
+
+    .. versionadded:: 2.0
+
+    """
+
+    __visit_name__ = "double"
+
+
+class _RenderISO8601NoT:
+    def _literal_processor_datetime(self, dialect):
+        return self._literal_processor_portion(dialect, None)
+
+    def _literal_processor_date(self, dialect):
+        return self._literal_processor_portion(dialect, 0)
+
+    def _literal_processor_time(self, dialect):
+        return self._literal_processor_portion(dialect, -1)
+
+    def _literal_processor_portion(self, dialect, _portion=None):
+        assert _portion in (None, 0, -1)
+        if _portion is not None:
+
+            def process(value):
+                return f"""'{value.isoformat().split("T")[_portion]}'"""
+
+        else:
+
+            def process(value):
+                return f"""'{value.isoformat().replace("T", " ")}'"""
+
+        return process
+
+
+class DateTime(
+    _RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.datetime]
+):
+    """A type for ``datetime.datetime()`` objects.
+
+    Date and time types return objects from the Python ``datetime``
+    module.  Most DBAPIs have built in support for the datetime
+    module, with the noted exception of SQLite.  In the case of
+    SQLite, date and time types are stored as strings which are then
+    converted back to datetime objects when rows are returned.
+
+    For the time representation within the datetime type, some
+    backends include additional options, such as timezone support and
+    fractional seconds support.  For fractional seconds, use the
+    dialect-specific datatype, such as :class:`.mysql.TIME`.  For
+    timezone support, use at least the :class:`_types.TIMESTAMP` datatype,
+    if not the dialect-specific datatype object.
+
+    """
+
+    __visit_name__ = "datetime"
+
+    def __init__(self, timezone: bool = False):
+        """Construct a new :class:`.DateTime`.
+
+        :param timezone: boolean.  Indicates that the datetime type should
+         enable timezone support, if available on the
+         **base date/time-holding type only**.   It is recommended
+         to make use of the :class:`_types.TIMESTAMP` datatype directly when
+         using this flag, as some databases include separate generic
+         date/time-holding types distinct from the timezone-capable
+         TIMESTAMP datatype, such as Oracle Database.
+
+
+        """
+        self.timezone = timezone
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.DATETIME
+
+    def _resolve_for_literal(self, value):
+        with_timezone = value.tzinfo is not None
+        if with_timezone and not self.timezone:
+            return DATETIME_TIMEZONE
+        else:
+            return self
+
+    def literal_processor(self, dialect):
+        return self._literal_processor_datetime(dialect)
+
+    @property
+    def python_type(self):
+        return dt.datetime
+
+    @util.memoized_property
+    def _expression_adaptations(self):
+        # Based on
+        # https://www.postgresql.org/docs/current/static/functions-datetime.html.
+
+        return {
+            operators.add: {Interval: self.__class__},
+            operators.sub: {Interval: self.__class__, DateTime: Interval},
+        }
+
+
+class Date(_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.date]):
+    """A type for ``datetime.date()`` objects."""
+
+    __visit_name__ = "date"
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.DATETIME
+
+    @property
+    def python_type(self):
+        return dt.date
+
+    def literal_processor(self, dialect):
+        return self._literal_processor_date(dialect)
+
+    @util.memoized_property
+    def _expression_adaptations(self):
+        # Based on
+        # https://www.postgresql.org/docs/current/static/functions-datetime.html.
+
+        return {
+            operators.add: {
+                Integer: self.__class__,
+                Interval: DateTime,
+                Time: DateTime,
+            },
+            operators.sub: {
+                # date - integer = date
+                Integer: self.__class__,
+                # date - date = integer.
+                Date: Integer,
+                Interval: DateTime,
+                # date - datetime = interval,
+                # this one is not in the PG docs
+                # but works
+                DateTime: Interval,
+            },
+        }
+
+
+class Time(_RenderISO8601NoT, HasExpressionLookup, TypeEngine[dt.time]):
+    """A type for ``datetime.time()`` objects."""
+
+    __visit_name__ = "time"
+
+    def __init__(self, timezone: bool = False):
+        self.timezone = timezone
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.DATETIME
+
+    @property
+    def python_type(self):
+        return dt.time
+
+    def _resolve_for_literal(self, value):
+        with_timezone = value.tzinfo is not None
+        if with_timezone and not self.timezone:
+            return TIME_TIMEZONE
+        else:
+            return self
+
+    @util.memoized_property
+    def _expression_adaptations(self):
+        # Based on
+        # https://www.postgresql.org/docs/current/static/functions-datetime.html.
+
+        return {
+            operators.add: {Date: DateTime, Interval: self.__class__},
+            operators.sub: {Time: Interval, Interval: self.__class__},
+        }
+
+    def literal_processor(self, dialect):
+        return self._literal_processor_time(dialect)
+
+
+class _Binary(TypeEngine[bytes]):
+    """Define base behavior for binary types."""
+
+    def __init__(self, length: Optional[int] = None):
+        self.length = length
+
+    @util.ro_memoized_property
+    def _generic_type_affinity(
+        self,
+    ) -> Type[TypeEngine[bytes]]:
+        return LargeBinary
+
+    def literal_processor(self, dialect):
+        def process(value):
+            # TODO: this is useless for real world scenarios; implement
+            # real binary literals
+            value = value.decode(
+                dialect._legacy_binary_type_literal_encoding
+            ).replace("'", "''")
+            return "'%s'" % value
+
+        return process
+
+    @property
+    def python_type(self):
+        return bytes
+
+    # Python 3 - sqlite3 doesn't need the `Binary` conversion
+    # here, though pg8000 does to indicate "bytea"
+    def bind_processor(self, dialect):
+        if dialect.dbapi is None:
+            return None
+
+        DBAPIBinary = dialect.dbapi.Binary
+
+        def process(value):
+            if value is not None:
+                return DBAPIBinary(value)
+            else:
+                return None
+
+        return process
+
+    # Python 3 has native bytes() type
+    # both sqlite3 and pg8000 seem to return it,
+    # psycopg2 as of 2.5 returns 'memoryview'
+    def result_processor(self, dialect, coltype):
+        if dialect.returns_native_bytes:
+            return None
+
+        def process(value):
+            if value is not None:
+                value = bytes(value)
+            return value
+
+        return process
+
+    def coerce_compared_value(self, op, value):
+        """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+
+        if isinstance(value, str):
+            return self
+        else:
+            return super().coerce_compared_value(op, value)
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.BINARY
+
+
+class LargeBinary(_Binary):
+    """A type for large binary byte data.
+
+    The :class:`.LargeBinary` type corresponds to a large and/or unlengthed
+    binary type for the target platform, such as BLOB on MySQL and BYTEA for
+    PostgreSQL.  It also handles the necessary conversions for the DBAPI.
+
+    """
+
+    __visit_name__ = "large_binary"
+
+    def __init__(self, length: Optional[int] = None):
+        """
+        Construct a LargeBinary type.
+
+        :param length: optional, a length for the column for use in
+          DDL statements, for those binary types that accept a length,
+          such as the MySQL BLOB type.
+
+        """
+        _Binary.__init__(self, length=length)
+
+
+class SchemaType(SchemaEventTarget, TypeEngineMixin):
+    """Add capabilities to a type which allow for schema-level DDL to be
+    associated with a type.
+
+    Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
+    as well as types that are complimented by table or schema level
+    constraints, triggers, and other rules.
+
+    :class:`.SchemaType` classes can also be targets for the
+    :meth:`.DDLEvents.before_parent_attach` and
+    :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
+    surrounding the association of the type object with a parent
+    :class:`_schema.Column`.
+
+    .. seealso::
+
+        :class:`.Enum`
+
+        :class:`.Boolean`
+
+
+    """
+
+    _use_schema_map = True
+
+    name: Optional[str]
+
+    def __init__(
+        self,
+        name: Optional[str] = None,
+        schema: Optional[str] = None,
+        metadata: Optional[MetaData] = None,
+        inherit_schema: bool = False,
+        quote: Optional[bool] = None,
+        _create_events: bool = True,
+        _adapted_from: Optional[SchemaType] = None,
+    ):
+        if name is not None:
+            self.name = quoted_name(name, quote)
+        else:
+            self.name = None
+        self.schema = schema
+        self.metadata = metadata
+        self.inherit_schema = inherit_schema
+        self._create_events = _create_events
+
+        if _create_events and self.metadata:
+            event.listen(
+                self.metadata,
+                "before_create",
+                util.portable_instancemethod(self._on_metadata_create),
+            )
+            event.listen(
+                self.metadata,
+                "after_drop",
+                util.portable_instancemethod(self._on_metadata_drop),
+            )
+
+        if _adapted_from:
+            self.dispatch = self.dispatch._join(_adapted_from.dispatch)
+
+    def _set_parent(self, parent, **kw):
+        # set parent hook is when this type is associated with a column.
+        # Column calls it for all SchemaEventTarget instances, either the
+        # base type and/or variants in _variant_mapping.
+
+        # we want to register a second hook to trigger when that column is
+        # associated with a table.  in that event, we and all of our variants
+        # may want to set up some state on the table such as a CheckConstraint
+        # that will conditionally render at DDL render time.
+
+        # the base SchemaType also sets up events for
+        # on_table/metadata_create/drop in this method, which is used by
+        # "native" types with a separate CREATE/DROP e.g. Postgresql.ENUM
+
+        parent._on_table_attach(util.portable_instancemethod(self._set_table))
+
+    def _variant_mapping_for_set_table(self, column):
+        if column.type._variant_mapping:
+            variant_mapping = dict(column.type._variant_mapping)
+            variant_mapping["_default"] = column.type
+        else:
+            variant_mapping = None
+        return variant_mapping
+
+    def _set_table(self, column, table):
+        if self.inherit_schema:
+            self.schema = table.schema
+        elif self.metadata and self.schema is None and self.metadata.schema:
+            self.schema = self.metadata.schema
+
+        if not self._create_events:
+            return
+
+        variant_mapping = self._variant_mapping_for_set_table(column)
+
+        event.listen(
+            table,
+            "before_create",
+            util.portable_instancemethod(
+                self._on_table_create, {"variant_mapping": variant_mapping}
+            ),
+        )
+        event.listen(
+            table,
+            "after_drop",
+            util.portable_instancemethod(
+                self._on_table_drop, {"variant_mapping": variant_mapping}
+            ),
+        )
+        if self.metadata is None:
+            # if SchemaType were created w/ a metadata argument, these
+            # events would already have been associated with that metadata
+            # and would preclude an association with table.metadata
+            event.listen(
+                table.metadata,
+                "before_create",
+                util.portable_instancemethod(
+                    self._on_metadata_create,
+                    {"variant_mapping": variant_mapping},
+                ),
+            )
+            event.listen(
+                table.metadata,
+                "after_drop",
+                util.portable_instancemethod(
+                    self._on_metadata_drop,
+                    {"variant_mapping": variant_mapping},
+                ),
+            )
+
+    def copy(self, **kw):
+        return self.adapt(
+            cast("Type[TypeEngine[Any]]", self.__class__),
+            _create_events=True,
+            metadata=(
+                kw.get("_to_metadata", self.metadata)
+                if self.metadata is not None
+                else None
+            ),
+        )
+
+    @overload
+    def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
+
+    @overload
+    def adapt(
+        self, cls: Type[TypeEngineMixin], **kw: Any
+    ) -> TypeEngine[Any]: ...
+
+    def adapt(
+        self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
+    ) -> TypeEngine[Any]:
+        kw.setdefault("_create_events", False)
+        kw.setdefault("_adapted_from", self)
+        return super().adapt(cls, **kw)
+
+    def create(self, bind, checkfirst=False):
+        """Issue CREATE DDL for this type, if applicable."""
+
+        t = self.dialect_impl(bind.dialect)
+        if isinstance(t, SchemaType) and t.__class__ is not self.__class__:
+            t.create(bind, checkfirst=checkfirst)
+
+    def drop(self, bind, checkfirst=False):
+        """Issue DROP DDL for this type, if applicable."""
+
+        t = self.dialect_impl(bind.dialect)
+        if isinstance(t, SchemaType) and t.__class__ is not self.__class__:
+            t.drop(bind, checkfirst=checkfirst)
+
+    def _on_table_create(self, target, bind, **kw):
+        if not self._is_impl_for_variant(bind.dialect, kw):
+            return
+
+        t = self.dialect_impl(bind.dialect)
+        if isinstance(t, SchemaType) and t.__class__ is not self.__class__:
+            t._on_table_create(target, bind, **kw)
+
+    def _on_table_drop(self, target, bind, **kw):
+        if not self._is_impl_for_variant(bind.dialect, kw):
+            return
+
+        t = self.dialect_impl(bind.dialect)
+        if isinstance(t, SchemaType) and t.__class__ is not self.__class__:
+            t._on_table_drop(target, bind, **kw)
+
+    def _on_metadata_create(self, target, bind, **kw):
+        if not self._is_impl_for_variant(bind.dialect, kw):
+            return
+
+        t = self.dialect_impl(bind.dialect)
+        if isinstance(t, SchemaType) and t.__class__ is not self.__class__:
+            t._on_metadata_create(target, bind, **kw)
+
+    def _on_metadata_drop(self, target, bind, **kw):
+        if not self._is_impl_for_variant(bind.dialect, kw):
+            return
+
+        t = self.dialect_impl(bind.dialect)
+        if isinstance(t, SchemaType) and t.__class__ is not self.__class__:
+            t._on_metadata_drop(target, bind, **kw)
+
+    def _is_impl_for_variant(self, dialect, kw):
+        variant_mapping = kw.pop("variant_mapping", None)
+
+        if not variant_mapping:
+            return True
+
+        # for types that have _variant_mapping, all the impls in the map
+        # that are SchemaEventTarget subclasses get set up as event holders.
+        # this is so that constructs that need
+        # to be associated with the Table at dialect-agnostic time etc. like
+        # CheckConstraints can be set up with that table.  they then add
+        # to these constraints a DDL check_rule that among other things
+        # will check this _is_impl_for_variant() method to determine when
+        # the dialect is known that we are part of the table's DDL sequence.
+
+        # since PostgreSQL is the only DB that has ARRAY this can only
+        # be integration tested by PG-specific tests
+        def _we_are_the_impl(typ):
+            return (
+                typ is self
+                or isinstance(typ, ARRAY)
+                and typ.item_type is self  # type: ignore[comparison-overlap]
+            )
+
+        if dialect.name in variant_mapping and _we_are_the_impl(
+            variant_mapping[dialect.name]
+        ):
+            return True
+        elif dialect.name not in variant_mapping:
+            return _we_are_the_impl(variant_mapping["_default"])
+
+
+class Enum(String, SchemaType, Emulated, TypeEngine[Union[str, enum.Enum]]):
+    """Generic Enum Type.
+
+    The :class:`.Enum` type provides a set of possible string values
+    which the column is constrained towards.
+
+    The :class:`.Enum` type will make use of the backend's native "ENUM"
+    type if one is available; otherwise, it uses a VARCHAR datatype.
+    An option also exists to automatically produce a CHECK constraint
+    when the VARCHAR (so called "non-native") variant is produced;
+    see the  :paramref:`.Enum.create_constraint` flag.
+
+    The :class:`.Enum` type also provides in-Python validation of string
+    values during both read and write operations.  When reading a value
+    from the database in a result set, the string value is always checked
+    against the list of possible values and a ``LookupError`` is raised
+    if no match is found.  When passing a value to the database as a
+    plain string within a SQL statement, if the
+    :paramref:`.Enum.validate_strings` parameter is
+    set to True, a ``LookupError`` is raised for any string value that's
+    not located in the given list of possible values; note that this
+    impacts usage of LIKE expressions with enumerated values (an unusual
+    use case).
+
+    The source of enumerated values may be a list of string values, or
+    alternatively a PEP-435-compliant enumerated class.  For the purposes
+    of the :class:`.Enum` datatype, this class need only provide a
+    ``__members__`` method.
+
+    When using an enumerated class, the enumerated objects are used
+    both for input and output, rather than strings as is the case with
+    a plain-string enumerated type::
+
+        import enum
+        from sqlalchemy import Enum
+
+
+        class MyEnum(enum.Enum):
+            one = 1
+            two = 2
+            three = 3
+
+
+        t = Table("data", MetaData(), Column("value", Enum(MyEnum)))
+
+        connection.execute(t.insert(), {"value": MyEnum.two})
+        assert connection.scalar(t.select()) is MyEnum.two
+
+    Above, the string names of each element, e.g. "one", "two", "three",
+    are persisted to the database; the values of the Python Enum, here
+    indicated as integers, are **not** used; the value of each enum can
+    therefore be any kind of Python object whether or not it is persistable.
+
+    In order to persist the values and not the names, the
+    :paramref:`.Enum.values_callable` parameter may be used.   The value of
+    this parameter is a user-supplied callable, which  is intended to be used
+    with a PEP-435-compliant enumerated class and  returns a list of string
+    values to be persisted.   For a simple enumeration that uses string values,
+    a callable such as  ``lambda x: [e.value for e in x]`` is sufficient.
+
+    .. seealso::
+
+        :ref:`orm_declarative_mapped_column_enums` - background on using
+        the :class:`_sqltypes.Enum` datatype with the ORM's
+        :ref:`ORM Annotated Declarative <orm_declarative_mapped_column>`
+        feature.
+
+        :class:`_postgresql.ENUM` - PostgreSQL-specific type,
+        which has additional functionality.
+
+        :class:`.mysql.ENUM` - MySQL-specific type
+
+    """
+
+    __visit_name__ = "enum"
+
+    def __init__(self, *enums: object, **kw: Any):
+        r"""Construct an enum.
+
+        Keyword arguments which don't apply to a specific backend are ignored
+        by that backend.
+
+        :param \*enums: either exactly one PEP-435 compliant enumerated type
+           or one or more string labels.
+
+        :param create_constraint: defaults to False.  When creating a
+           non-native enumerated type, also build a CHECK constraint on the
+           database against the valid values.
+
+           .. note:: it is strongly recommended that the CHECK constraint
+              have an explicit name in order to support schema-management
+              concerns.  This can be established either by setting the
+              :paramref:`.Enum.name` parameter or by setting up an
+              appropriate naming convention; see
+              :ref:`constraint_naming_conventions` for background.
+
+           .. versionchanged:: 1.4 - this flag now defaults to False, meaning
+              no CHECK constraint is generated for a non-native enumerated
+              type.
+
+        :param metadata: Associate this type directly with a ``MetaData``
+           object. For types that exist on the target database as an
+           independent schema construct (PostgreSQL), this type will be
+           created and dropped within ``create_all()`` and ``drop_all()``
+           operations. If the type is not associated with any ``MetaData``
+           object, it will associate itself with each ``Table`` in which it is
+           used, and will be created when any of those individual tables are
+           created, after a check is performed for its existence. The type is
+           only dropped when ``drop_all()`` is called for that ``Table``
+           object's metadata, however.
+
+           The value of the :paramref:`_schema.MetaData.schema` parameter of
+           the :class:`_schema.MetaData` object, if set, will be used as the
+           default value of the :paramref:`_types.Enum.schema` on this object
+           if an explicit value is not otherwise supplied.
+
+           .. versionchanged:: 1.4.12 :class:`_types.Enum` inherits the
+              :paramref:`_schema.MetaData.schema` parameter of the
+              :class:`_schema.MetaData` object if present, when passed using
+              the :paramref:`_types.Enum.metadata` parameter.
+
+        :param name: The name of this type. This is required for PostgreSQL
+           and any future supported database which requires an explicitly
+           named type, or an explicitly named constraint in order to generate
+           the type and/or a table that uses it. If a PEP-435 enumerated
+           class was used, its name (converted to lower case) is used by
+           default.
+
+        :param native_enum: Use the database's native ENUM type when
+           available. Defaults to True. When False, uses VARCHAR + check
+           constraint for all backends. When False, the VARCHAR length can be
+           controlled with :paramref:`.Enum.length`; currently "length" is
+           ignored if native_enum=True.
+
+        :param length: Allows specifying a custom length for the VARCHAR
+           when a non-native enumeration datatype is used.  By default it uses
+           the length of the longest value.
+
+           .. versionchanged:: 2.0.0 The :paramref:`.Enum.length` parameter
+              is used unconditionally for ``VARCHAR`` rendering regardless of
+              the :paramref:`.Enum.native_enum` parameter, for those backends
+              where ``VARCHAR`` is used for enumerated datatypes.
+
+
+        :param schema: Schema name of this type. For types that exist on the
+           target database as an independent schema construct (PostgreSQL),
+           this parameter specifies the named schema in which the type is
+           present.
+
+           If not present, the schema name will be taken from the
+           :class:`_schema.MetaData` collection if passed as
+           :paramref:`_types.Enum.metadata`, for a :class:`_schema.MetaData`
+           that includes the :paramref:`_schema.MetaData.schema` parameter.
+
+           .. versionchanged:: 1.4.12 :class:`_types.Enum` inherits the
+              :paramref:`_schema.MetaData.schema` parameter of the
+              :class:`_schema.MetaData` object if present, when passed using
+              the :paramref:`_types.Enum.metadata` parameter.
+
+           Otherwise, if the :paramref:`_types.Enum.inherit_schema` flag is set
+           to ``True``, the schema will be inherited from the associated
+           :class:`_schema.Table` object if any; when
+           :paramref:`_types.Enum.inherit_schema` is at its default of
+           ``False``, the owning table's schema is **not** used.
+
+
+        :param quote: Set explicit quoting preferences for the type's name.
+
+        :param inherit_schema: When ``True``, the "schema" from the owning
+           :class:`_schema.Table`
+           will be copied to the "schema" attribute of this
+           :class:`.Enum`, replacing whatever value was passed for the
+           ``schema`` attribute.   This also takes effect when using the
+           :meth:`_schema.Table.to_metadata` operation.
+
+        :param validate_strings: when True, string values that are being
+           passed to the database in a SQL statement will be checked
+           for validity against the list of enumerated values.  Unrecognized
+           values will result in a ``LookupError`` being raised.
+
+        :param values_callable: A callable which will be passed the PEP-435
+           compliant enumerated type, which should then return a list of string
+           values to be persisted. This allows for alternate usages such as
+           using the string value of an enum to be persisted to the database
+           instead of its name. The callable must return the values to be
+           persisted in the same order as iterating through the Enum's
+           ``__member__`` attribute. For example
+           ``lambda x: [i.value for i in x]``.
+
+           .. versionadded:: 1.2.3
+
+        :param sort_key_function: a Python callable which may be used as the
+           "key" argument in the Python ``sorted()`` built-in.   The SQLAlchemy
+           ORM requires that primary key columns which are mapped must
+           be sortable in some way.  When using an unsortable enumeration
+           object such as a Python 3 ``Enum`` object, this parameter may be
+           used to set a default sort key function for the objects.  By
+           default, the database value of the enumeration is used as the
+           sorting function.
+
+           .. versionadded:: 1.3.8
+
+        :param omit_aliases: A boolean that when true will remove aliases from
+           pep 435 enums. defaults to ``True``.
+
+           .. versionchanged:: 2.0 This parameter now defaults to True.
+
+        """
+        self._enum_init(enums, kw)
+
+    @property
+    def _enums_argument(self):
+        if self.enum_class is not None:
+            return [self.enum_class]
+        else:
+            return self.enums
+
+    def _enum_init(self, enums, kw):
+        """internal init for :class:`.Enum` and subclasses.
+
+        friendly init helper used by subclasses to remove
+        all the Enum-specific keyword arguments from kw.  Allows all
+        other arguments in kw to pass through.
+
+        """
+        self.native_enum = kw.pop("native_enum", True)
+        self.create_constraint = kw.pop("create_constraint", False)
+        self.values_callable = kw.pop("values_callable", None)
+        self._sort_key_function = kw.pop("sort_key_function", NO_ARG)
+        length_arg = kw.pop("length", NO_ARG)
+        self._omit_aliases = kw.pop("omit_aliases", True)
+        _disable_warnings = kw.pop("_disable_warnings", False)
+        values, objects = self._parse_into_values(enums, kw)
+        self._setup_for_values(values, objects, kw)
+
+        self.validate_strings = kw.pop("validate_strings", False)
+
+        if self.enums:
+            self._default_length = length = max(len(x) for x in self.enums)
+        else:
+            self._default_length = length = 0
+
+        if length_arg is not NO_ARG:
+            if (
+                not _disable_warnings
+                and length_arg is not None
+                and length_arg < length
+            ):
+                raise ValueError(
+                    "When provided, length must be larger or equal"
+                    " than the length of the longest enum value. %s < %s"
+                    % (length_arg, length)
+                )
+            length = length_arg
+
+        self._valid_lookup[None] = self._object_lookup[None] = None
+
+        super().__init__(length=length)
+
+        # assign name to the given enum class if no other name, and this
+        # enum is not an "empty" enum.  if the enum is "empty" we assume
+        # this is a template enum that will be used to generate
+        # new Enum classes.
+        if self.enum_class and values:
+            kw.setdefault("name", self.enum_class.__name__.lower())
+        SchemaType.__init__(
+            self,
+            name=kw.pop("name", None),
+            schema=kw.pop("schema", None),
+            metadata=kw.pop("metadata", None),
+            inherit_schema=kw.pop("inherit_schema", False),
+            quote=kw.pop("quote", None),
+            _create_events=kw.pop("_create_events", True),
+            _adapted_from=kw.pop("_adapted_from", None),
+        )
+
+    def _parse_into_values(self, enums, kw):
+        if not enums and "_enums" in kw:
+            enums = kw.pop("_enums")
+
+        if len(enums) == 1 and hasattr(enums[0], "__members__"):
+            self.enum_class = enums[0]
+
+            _members = self.enum_class.__members__
+
+            if self._omit_aliases is True:
+                # remove aliases
+                members = OrderedDict(
+                    (n, v) for n, v in _members.items() if v.name == n
+                )
+            else:
+                members = _members
+            if self.values_callable:
+                values = self.values_callable(self.enum_class)
+            else:
+                values = list(members)
+            objects = [members[k] for k in members]
+            return values, objects
+        else:
+            self.enum_class = None
+            return enums, enums
+
+    def _resolve_for_literal(self, value: Any) -> Enum:
+        tv = type(value)
+        typ = self._resolve_for_python_type(tv, tv, tv)
+        assert typ is not None
+        return typ
+
+    def _resolve_for_python_type(
+        self,
+        python_type: Type[Any],
+        matched_on: _MatchedOnType,
+        matched_on_flattened: Type[Any],
+    ) -> Optional[Enum]:
+        # "generic form" indicates we were placed in a type map
+        # as ``sqlalchemy.Enum(enum.Enum)`` which indicates we need to
+        # get enumerated values from the datatype
+        we_are_generic_form = self._enums_argument == [enum.Enum]
+
+        native_enum = None
+
+        def process_literal(pt):
+            # for a literal, where we need to get its contents, parse it out.
+            enum_args = get_args(pt)
+            bad_args = [arg for arg in enum_args if not isinstance(arg, str)]
+            if bad_args:
+                raise exc.ArgumentError(
+                    f"Can't create string-based Enum datatype from non-string "
+                    f"values: {', '.join(repr(x) for x in bad_args)}.  Please "
+                    f"provide an explicit Enum datatype for this Python type"
+                )
+            native_enum = False
+            return enum_args, native_enum
+
+        if not we_are_generic_form and python_type is matched_on:
+            # if we have enumerated values, and the incoming python
+            # type is exactly the one that matched in the type map,
+            # then we use these enumerated values and dont try to parse
+            # what's incoming
+            enum_args = self._enums_argument
+
+        elif is_literal(python_type):
+            enum_args, native_enum = process_literal(python_type)
+        elif is_pep695(python_type):
+            value = python_type.__value__
+            if is_pep695(value):
+                new_value = value
+                while is_pep695(new_value):
+                    new_value = new_value.__value__
+                if is_literal(new_value):
+                    value = new_value
+                    warn_deprecated(
+                        f"Mapping recursive TypeAliasType '{python_type}' "
+                        "that resolve to literal to generate an Enum is "
+                        "deprecated. SQLAlchemy 2.1 will not support this "
+                        "use case. Please avoid using recursing "
+                        "TypeAliasType.",
+                        "2.0",
+                    )
+            if not is_literal(value):
+                raise exc.ArgumentError(
+                    f"Can't associate TypeAliasType '{python_type}' to an "
+                    "Enum since it's not a direct alias of a Literal. Only "
+                    "aliases in this form `type my_alias = Literal['a', "
+                    "'b']` are supported when generating Enums."
+                )
+            enum_args, native_enum = process_literal(value)
+
+        elif isinstance(python_type, type) and issubclass(
+            python_type, enum.Enum
+        ):
+            # same for an enum.Enum
+            enum_args = [python_type]
+
+        else:
+            enum_args = self._enums_argument
+
+        # make a new Enum that looks like this one.
+        # arguments or other rules
+        kw = self._make_enum_kw({})
+
+        if native_enum is False:
+            kw["native_enum"] = False
+
+        kw["length"] = NO_ARG if self.length == 0 else self.length
+        return cast(
+            Enum,
+            self._generic_type_affinity(_enums=enum_args, **kw),  # type: ignore  # noqa: E501
+        )
+
+    def _setup_for_values(self, values, objects, kw):
+        self.enums = list(values)
+
+        self._valid_lookup = dict(zip(reversed(objects), reversed(values)))
+
+        self._object_lookup = dict(zip(values, objects))
+
+        self._valid_lookup.update(
+            [
+                (value, self._valid_lookup[self._object_lookup[value]])
+                for value in values
+            ]
+        )
+
+    @property
+    def sort_key_function(self):
+        if self._sort_key_function is NO_ARG:
+            return self._db_value_for_elem
+        else:
+            return self._sort_key_function
+
+    @property
+    def native(self):
+        return self.native_enum
+
+    def _db_value_for_elem(self, elem):
+        try:
+            return self._valid_lookup[elem]
+        except KeyError as err:
+            # for unknown string values, we return as is.  While we can
+            # validate these if we wanted, that does not allow for lesser-used
+            # end-user use cases, such as using a LIKE comparison with an enum,
+            # or for an application that wishes to apply string tests to an
+            # ENUM (see [ticket:3725]).  While we can decide to differentiate
+            # here between an INSERT statement and a criteria used in a SELECT,
+            # for now we're staying conservative w/ behavioral changes (perhaps
+            # someone has a trigger that handles strings on INSERT)
+            if not self.validate_strings and isinstance(elem, str):
+                return elem
+            else:
+                raise LookupError(
+                    "'%s' is not among the defined enum values. "
+                    "Enum name: %s. Possible values: %s"
+                    % (
+                        elem,
+                        self.name,
+                        langhelpers.repr_tuple_names(self.enums),
+                    )
+                ) from err
+
+    class Comparator(String.Comparator[str]):
+        __slots__ = ()
+
+        type: String
+
+        def _adapt_expression(
+            self,
+            op: OperatorType,
+            other_comparator: TypeEngine.Comparator[Any],
+        ) -> Tuple[OperatorType, TypeEngine[Any]]:
+            op, typ = super()._adapt_expression(op, other_comparator)
+            if op is operators.concat_op:
+                typ = String(self.type.length)
+            return op, typ
+
+    comparator_factory = Comparator
+
+    def _object_value_for_elem(self, elem):
+        try:
+            return self._object_lookup[elem]
+        except KeyError as err:
+            raise LookupError(
+                "'%s' is not among the defined enum values. "
+                "Enum name: %s. Possible values: %s"
+                % (
+                    elem,
+                    self.name,
+                    langhelpers.repr_tuple_names(self.enums),
+                )
+            ) from err
+
+    def __repr__(self):
+        return util.generic_repr(
+            self,
+            additional_kw=[
+                ("native_enum", True),
+                ("create_constraint", False),
+                ("length", self._default_length),
+            ],
+            to_inspect=[Enum, SchemaType],
+        )
+
+    def as_generic(self, allow_nulltype=False):
+        try:
+            args = self.enums
+        except AttributeError:
+            raise NotImplementedError(
+                "TypeEngine.as_generic() heuristic "
+                "is undefined for types that inherit Enum but do not have "
+                "an `enums` attribute."
+            ) from None
+
+        return util.constructor_copy(
+            self, self._generic_type_affinity, *args, _disable_warnings=True
+        )
+
+    def _make_enum_kw(self, kw):
+        kw.setdefault("validate_strings", self.validate_strings)
+        if self.name:
+            kw.setdefault("name", self.name)
+        kw.setdefault("schema", self.schema)
+        kw.setdefault("inherit_schema", self.inherit_schema)
+        kw.setdefault("metadata", self.metadata)
+        kw.setdefault("native_enum", self.native_enum)
+        kw.setdefault("values_callable", self.values_callable)
+        kw.setdefault("create_constraint", self.create_constraint)
+        kw.setdefault("length", self.length)
+        kw.setdefault("omit_aliases", self._omit_aliases)
+        return kw
+
+    def adapt_to_emulated(self, impltype, **kw):
+        self._make_enum_kw(kw)
+        kw["_disable_warnings"] = True
+        kw.setdefault("_create_events", False)
+        assert "_enums" in kw
+        return impltype(**kw)
+
+    def adapt(self, cls, **kw):
+        kw["_enums"] = self._enums_argument
+        kw["_disable_warnings"] = True
+        return super().adapt(cls, **kw)
+
+    def _should_create_constraint(self, compiler, **kw):
+        if not self._is_impl_for_variant(compiler.dialect, kw):
+            return False
+        return (
+            not self.native_enum or not compiler.dialect.supports_native_enum
+        )
+
+    @util.preload_module("sqlalchemy.sql.schema")
+    def _set_table(self, column, table):
+        schema = util.preloaded.sql_schema
+        SchemaType._set_table(self, column, table)
+
+        if not self.create_constraint:
+            return
+
+        variant_mapping = self._variant_mapping_for_set_table(column)
+
+        e = schema.CheckConstraint(
+            type_coerce(column, String()).in_(self.enums),
+            name=_NONE_NAME if self.name is None else self.name,
+            _create_rule=util.portable_instancemethod(
+                self._should_create_constraint,
+                {"variant_mapping": variant_mapping},
+            ),
+            _type_bound=True,
+        )
+        assert e.table is table
+
+    def literal_processor(self, dialect):
+        parent_processor = super().literal_processor(dialect)
+
+        def process(value):
+            value = self._db_value_for_elem(value)
+            if parent_processor:
+                value = parent_processor(value)
+            return value
+
+        return process
+
+    def bind_processor(self, dialect):
+        parent_processor = super().bind_processor(dialect)
+
+        def process(value):
+            value = self._db_value_for_elem(value)
+            if parent_processor:
+                value = parent_processor(value)
+            return value
+
+        return process
+
+    def result_processor(self, dialect, coltype):
+        parent_processor = super().result_processor(dialect, coltype)
+
+        def process(value):
+            if parent_processor:
+                value = parent_processor(value)
+
+            value = self._object_value_for_elem(value)
+            return value
+
+        return process
+
+    def copy(self, **kw):
+        return SchemaType.copy(self, **kw)
+
+    @property
+    def python_type(self):
+        if self.enum_class:
+            return self.enum_class
+        else:
+            return super().python_type
+
+
+class PickleType(TypeDecorator[object]):
+    """Holds Python objects, which are serialized using pickle.
+
+    PickleType builds upon the Binary type to apply Python's
+    ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
+    the way out, allowing any pickleable Python object to be stored as
+    a serialized binary field.
+
+    To allow ORM change events to propagate for elements associated
+    with :class:`.PickleType`, see :ref:`mutable_toplevel`.
+
+    """
+
+    impl = LargeBinary
+    cache_ok = True
+
+    def __init__(
+        self,
+        protocol: int = pickle.HIGHEST_PROTOCOL,
+        pickler: Any = None,
+        comparator: Optional[Callable[[Any, Any], bool]] = None,
+        impl: Optional[_TypeEngineArgument[Any]] = None,
+    ):
+        """
+        Construct a PickleType.
+
+        :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
+
+        :param pickler: defaults to pickle.  May be any object with
+          pickle-compatible ``dumps`` and ``loads`` methods.
+
+        :param comparator: a 2-arg callable predicate used
+          to compare values of this type.  If left as ``None``,
+          the Python "equals" operator is used to compare values.
+
+        :param impl: A binary-storing :class:`_types.TypeEngine` class or
+          instance to use in place of the default :class:`_types.LargeBinary`.
+          For example the :class: `_mysql.LONGBLOB` class may be more effective
+          when using MySQL.
+
+          .. versionadded:: 1.4.20
+
+        """
+        self.protocol = protocol
+        self.pickler = pickler or pickle
+        self.comparator = comparator
+        super().__init__()
+
+        if impl:
+            # custom impl is not necessarily a LargeBinary subclass.
+            # make an exception to typing for this
+            self.impl = to_instance(impl)  # type: ignore
+
+    def __reduce__(self):
+        return PickleType, (self.protocol, None, self.comparator)
+
+    def bind_processor(self, dialect):
+        impl_processor = self.impl_instance.bind_processor(dialect)
+        dumps = self.pickler.dumps
+        protocol = self.protocol
+        if impl_processor:
+            fixed_impl_processor = impl_processor
+
+            def process(value):
+                if value is not None:
+                    value = dumps(value, protocol)
+                return fixed_impl_processor(value)
+
+        else:
+
+            def process(value):
+                if value is not None:
+                    value = dumps(value, protocol)
+                return value
+
+        return process
+
+    def result_processor(self, dialect, coltype):
+        impl_processor = self.impl_instance.result_processor(dialect, coltype)
+        loads = self.pickler.loads
+        if impl_processor:
+            fixed_impl_processor = impl_processor
+
+            def process(value):
+                value = fixed_impl_processor(value)
+                if value is None:
+                    return None
+                return loads(value)
+
+        else:
+
+            def process(value):
+                if value is None:
+                    return None
+                return loads(value)
+
+        return process
+
+    def compare_values(self, x, y):
+        if self.comparator:
+            return self.comparator(x, y)
+        else:
+            return x == y
+
+
+class Boolean(SchemaType, Emulated, TypeEngine[bool]):
+    """A bool datatype.
+
+    :class:`.Boolean` typically uses BOOLEAN or SMALLINT on the DDL side,
+    and on the Python side deals in ``True`` or ``False``.
+
+    The :class:`.Boolean` datatype currently has two levels of assertion
+    that the values persisted are simple true/false values.  For all
+    backends, only the Python values ``None``, ``True``, ``False``, ``1``
+    or ``0`` are accepted as parameter values.   For those backends that
+    don't support a "native boolean" datatype, an option exists to
+    also create a CHECK constraint on the target column
+
+    .. versionchanged:: 1.2 the :class:`.Boolean` datatype now asserts that
+       incoming Python values are already in pure boolean form.
+
+
+    """
+
+    __visit_name__ = "boolean"
+    native = True
+
+    def __init__(
+        self,
+        create_constraint: bool = False,
+        name: Optional[str] = None,
+        _create_events: bool = True,
+        _adapted_from: Optional[SchemaType] = None,
+    ):
+        """Construct a Boolean.
+
+        :param create_constraint: defaults to False.  If the boolean
+          is generated as an int/smallint, also create a CHECK constraint
+          on the table that ensures 1 or 0 as a value.
+
+          .. note:: it is strongly recommended that the CHECK constraint
+             have an explicit name in order to support schema-management
+             concerns.  This can be established either by setting the
+             :paramref:`.Boolean.name` parameter or by setting up an
+             appropriate naming convention; see
+             :ref:`constraint_naming_conventions` for background.
+
+          .. versionchanged:: 1.4 - this flag now defaults to False, meaning
+             no CHECK constraint is generated for a non-native enumerated
+             type.
+
+        :param name: if a CHECK constraint is generated, specify
+          the name of the constraint.
+
+        """
+        self.create_constraint = create_constraint
+        self.name = name
+        self._create_events = _create_events
+        if _adapted_from:
+            self.dispatch = self.dispatch._join(_adapted_from.dispatch)
+
+    def copy(self, **kw):
+        # override SchemaType.copy() to not include to_metadata logic
+        return self.adapt(
+            cast("Type[TypeEngine[Any]]", self.__class__),
+            _create_events=True,
+        )
+
+    def _should_create_constraint(self, compiler, **kw):
+        if not self._is_impl_for_variant(compiler.dialect, kw):
+            return False
+        return (
+            not compiler.dialect.supports_native_boolean
+            and compiler.dialect.non_native_boolean_check_constraint
+        )
+
+    @util.preload_module("sqlalchemy.sql.schema")
+    def _set_table(self, column, table):
+        schema = util.preloaded.sql_schema
+        if not self.create_constraint:
+            return
+
+        variant_mapping = self._variant_mapping_for_set_table(column)
+
+        e = schema.CheckConstraint(
+            type_coerce(column, self).in_([0, 1]),
+            name=_NONE_NAME if self.name is None else self.name,
+            _create_rule=util.portable_instancemethod(
+                self._should_create_constraint,
+                {"variant_mapping": variant_mapping},
+            ),
+            _type_bound=True,
+        )
+        assert e.table is table
+
+    @property
+    def python_type(self):
+        return bool
+
+    _strict_bools = frozenset([None, True, False])
+
+    def _strict_as_bool(self, value):
+        if value not in self._strict_bools:
+            if not isinstance(value, int):
+                raise TypeError("Not a boolean value: %r" % (value,))
+            else:
+                raise ValueError(
+                    "Value %r is not None, True, or False" % (value,)
+                )
+        return value
+
+    def literal_processor(self, dialect):
+        compiler = dialect.statement_compiler(dialect, None)
+        true = compiler.visit_true(None)
+        false = compiler.visit_false(None)
+
+        def process(value):
+            return true if self._strict_as_bool(value) else false
+
+        return process
+
+    def bind_processor(self, dialect):
+        _strict_as_bool = self._strict_as_bool
+
+        _coerce: Union[Type[bool], Type[int]]
+
+        if dialect.supports_native_boolean:
+            _coerce = bool
+        else:
+            _coerce = int
+
+        def process(value):
+            value = _strict_as_bool(value)
+            if value is not None:
+                value = _coerce(value)
+            return value
+
+        return process
+
+    def result_processor(self, dialect, coltype):
+        if dialect.supports_native_boolean:
+            return None
+        else:
+            return processors.int_to_boolean
+
+
+class _AbstractInterval(HasExpressionLookup, TypeEngine[dt.timedelta]):
+    @util.memoized_property
+    def _expression_adaptations(self):
+        # Based on
+        # https://www.postgresql.org/docs/current/static/functions-datetime.html.
+
+        return {
+            operators.add: {
+                Date: DateTime,
+                Interval: self.__class__,
+                DateTime: DateTime,
+                Time: Time,
+            },
+            operators.sub: {Interval: self.__class__},
+            operators.mul: {Numeric: self.__class__},
+            operators.truediv: {Numeric: self.__class__},
+        }
+
+    @util.ro_non_memoized_property
+    def _type_affinity(self) -> Type[Interval]:
+        return Interval
+
+
+class Interval(Emulated, _AbstractInterval, TypeDecorator[dt.timedelta]):
+    """A type for ``datetime.timedelta()`` objects.
+
+    The Interval type deals with ``datetime.timedelta`` objects.  In PostgreSQL
+    and Oracle Database, the native ``INTERVAL`` type is used; for others, the
+    value is stored as a date which is relative to the "epoch" (Jan. 1, 1970).
+
+    Note that the ``Interval`` type does not currently provide date arithmetic
+    operations on platforms which do not support interval types natively. Such
+    operations usually require transformation of both sides of the expression
+    (such as, conversion of both sides into integer epoch values first) which
+    currently is a manual procedure (such as via
+    :attr:`~sqlalchemy.sql.expression.func`).
+
+    """
+
+    impl = DateTime
+    epoch = dt.datetime.fromtimestamp(0, dt.timezone.utc).replace(tzinfo=None)
+    cache_ok = True
+
+    def __init__(
+        self,
+        native: bool = True,
+        second_precision: Optional[int] = None,
+        day_precision: Optional[int] = None,
+    ):
+        """Construct an Interval object.
+
+        :param native: when True, use the actual
+          INTERVAL type provided by the database, if
+          supported (currently PostgreSQL, Oracle Database).
+          Otherwise, represent the interval data as
+          an epoch value regardless.
+
+        :param second_precision: For native interval types
+          which support a "fractional seconds precision" parameter,
+          i.e. Oracle Database and PostgreSQL
+
+        :param day_precision: for native interval types which
+          support a "day precision" parameter, i.e. Oracle Database.
+
+        """
+        super().__init__()
+        self.native = native
+        self.second_precision = second_precision
+        self.day_precision = day_precision
+
+    class Comparator(
+        TypeDecorator.Comparator[_CT],
+        _AbstractInterval.Comparator[_CT],
+    ):
+        __slots__ = ()
+
+    comparator_factory = Comparator
+
+    @property
+    def python_type(self):
+        return dt.timedelta
+
+    def adapt_to_emulated(self, impltype, **kw):
+        return _AbstractInterval.adapt(self, impltype, **kw)
+
+    def coerce_compared_value(self, op, value):
+        return self.impl_instance.coerce_compared_value(op, value)
+
+    def bind_processor(
+        self, dialect: Dialect
+    ) -> _BindProcessorType[dt.timedelta]:
+        if TYPE_CHECKING:
+            assert isinstance(self.impl_instance, DateTime)
+        impl_processor = self.impl_instance.bind_processor(dialect)
+        epoch = self.epoch
+        if impl_processor:
+            fixed_impl_processor = impl_processor
+
+            def process(
+                value: Optional[dt.timedelta],
+            ) -> Any:
+                if value is not None:
+                    dt_value = epoch + value
+                else:
+                    dt_value = None
+                return fixed_impl_processor(dt_value)
+
+        else:
+
+            def process(
+                value: Optional[dt.timedelta],
+            ) -> Any:
+                if value is not None:
+                    dt_value = epoch + value
+                else:
+                    dt_value = None
+                return dt_value
+
+        return process
+
+    def result_processor(
+        self, dialect: Dialect, coltype: Any
+    ) -> _ResultProcessorType[dt.timedelta]:
+        if TYPE_CHECKING:
+            assert isinstance(self.impl_instance, DateTime)
+        impl_processor = self.impl_instance.result_processor(dialect, coltype)
+        epoch = self.epoch
+        if impl_processor:
+            fixed_impl_processor = impl_processor
+
+            def process(value: Any) -> Optional[dt.timedelta]:
+                dt_value = fixed_impl_processor(value)
+                if dt_value is None:
+                    return None
+                return dt_value - epoch
+
+        else:
+
+            def process(value: Any) -> Optional[dt.timedelta]:
+                if value is None:
+                    return None
+                return value - epoch  # type: ignore
+
+        return process
+
+
+class JSON(Indexable, TypeEngine[Any]):
+    """Represent a SQL JSON type.
+
+    .. note::  :class:`_types.JSON`
+       is provided as a facade for vendor-specific
+       JSON types.  Since it supports JSON SQL operations, it only
+       works on backends that have an actual JSON type, currently:
+
+       * PostgreSQL - see :class:`sqlalchemy.dialects.postgresql.JSON` and
+         :class:`sqlalchemy.dialects.postgresql.JSONB` for backend-specific
+         notes
+
+       * MySQL - see
+         :class:`sqlalchemy.dialects.mysql.JSON` for backend-specific notes
+
+       * SQLite as of version 3.9 - see
+         :class:`sqlalchemy.dialects.sqlite.JSON` for backend-specific notes
+
+       * Microsoft SQL Server 2016 and later - see
+         :class:`sqlalchemy.dialects.mssql.JSON` for backend-specific notes
+
+    :class:`_types.JSON` is part of the Core in support of the growing
+    popularity of native JSON datatypes.
+
+    The :class:`_types.JSON` type stores arbitrary JSON format data, e.g.::
+
+        data_table = Table(
+            "data_table",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("data", JSON),
+        )
+
+        with engine.connect() as conn:
+            conn.execute(
+                data_table.insert(), {"data": {"key1": "value1", "key2": "value2"}}
+            )
+
+    **JSON-Specific Expression Operators**
+
+    The :class:`_types.JSON`
+    datatype provides these additional SQL operations:
+
+    * Keyed index operations::
+
+        data_table.c.data["some key"]
+
+    * Integer index operations::
+
+        data_table.c.data[3]
+
+    * Path index operations::
+
+        data_table.c.data[("key_1", "key_2", 5, ..., "key_n")]
+
+    * Data casters for specific JSON element types, subsequent to an index
+      or path operation being invoked::
+
+        data_table.c.data["some key"].as_integer()
+
+      .. versionadded:: 1.3.11
+
+    Additional operations may be available from the dialect-specific versions
+    of :class:`_types.JSON`, such as
+    :class:`sqlalchemy.dialects.postgresql.JSON` and
+    :class:`sqlalchemy.dialects.postgresql.JSONB` which both offer additional
+    PostgreSQL-specific operations.
+
+    **Casting JSON Elements to Other Types**
+
+    Index operations, i.e. those invoked by calling upon the expression using
+    the Python bracket operator as in ``some_column['some key']``, return an
+    expression object whose type defaults to :class:`_types.JSON` by default,
+    so that
+    further JSON-oriented instructions may be called upon the result type.
+    However, it is likely more common that an index operation is expected
+    to return a specific scalar element, such as a string or integer.  In
+    order to provide access to these elements in a backend-agnostic way,
+    a series of data casters are provided:
+
+    * :meth:`.JSON.Comparator.as_string` - return the element as a string
+
+    * :meth:`.JSON.Comparator.as_boolean` - return the element as a boolean
+
+    * :meth:`.JSON.Comparator.as_float` - return the element as a float
+
+    * :meth:`.JSON.Comparator.as_integer` - return the element as an integer
+
+    These data casters are implemented by supporting dialects in order to
+    assure that comparisons to the above types will work as expected, such as::
+
+        # integer comparison
+        data_table.c.data["some_integer_key"].as_integer() == 5
+
+        # boolean comparison
+        data_table.c.data["some_boolean"].as_boolean() == True
+
+    .. versionadded:: 1.3.11 Added type-specific casters for the basic JSON
+       data element types.
+
+    .. note::
+
+        The data caster functions are new in version 1.3.11, and supersede
+        the previous documented approaches of using CAST; for reference,
+        this looked like::
+
+           from sqlalchemy import cast, type_coerce
+           from sqlalchemy import String, JSON
+
+           cast(data_table.c.data["some_key"], String) == type_coerce(55, JSON)
+
+        The above case now works directly as::
+
+            data_table.c.data["some_key"].as_integer() == 5
+
+        For details on the previous comparison approach within the 1.3.x
+        series, see the documentation for SQLAlchemy 1.2 or the included HTML
+        files in the doc/ directory of the version's distribution.
+
+    **Detecting Changes in JSON columns when using the ORM**
+
+    The :class:`_types.JSON` type, when used with the SQLAlchemy ORM, does not
+    detect in-place mutations to the structure.  In order to detect these, the
+    :mod:`sqlalchemy.ext.mutable` extension must be used, most typically
+    using the :class:`.MutableDict` class.  This extension will
+    allow "in-place" changes to the datastructure to produce events which
+    will be detected by the unit of work.  See the example at :class:`.HSTORE`
+    for a simple example involving a dictionary.
+
+    Alternatively, assigning a JSON structure to an ORM element that
+    replaces the old one will always trigger a change event.
+
+    **Support for JSON null vs. SQL NULL**
+
+    When working with NULL values, the :class:`_types.JSON` type recommends the
+    use of two specific constants in order to differentiate between a column
+    that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string of
+    ``"null"``. To insert or select against a value that is SQL NULL, use the
+    constant :func:`.null`. This symbol may be passed as a parameter value
+    specifically when using the :class:`_types.JSON` datatype, which contains
+    special logic that interprets this symbol to mean that the column value
+    should be SQL NULL as opposed to JSON ``"null"``::
+
+        from sqlalchemy import null
+
+        conn.execute(table.insert(), {"json_value": null()})
+
+    To insert or select against a value that is JSON ``"null"``, use the
+    constant :attr:`_types.JSON.NULL`::
+
+        conn.execute(table.insert(), {"json_value": JSON.NULL})
+
+    The :class:`_types.JSON` type supports a flag
+    :paramref:`_types.JSON.none_as_null` which when set to True will result
+    in the Python constant ``None`` evaluating to the value of SQL
+    NULL, and when set to False results in the Python constant
+    ``None`` evaluating to the value of JSON ``"null"``.    The Python
+    value ``None`` may be used in conjunction with either
+    :attr:`_types.JSON.NULL` and :func:`.null` in order to indicate NULL
+    values, but care must be taken as to the value of the
+    :paramref:`_types.JSON.none_as_null` in these cases.
+
+    **Customizing the JSON Serializer**
+
+    The JSON serializer and deserializer used by :class:`_types.JSON`
+    defaults to
+    Python's ``json.dumps`` and ``json.loads`` functions; in the case of the
+    psycopg2 dialect, psycopg2 may be using its own custom loader function.
+
+    In order to affect the serializer / deserializer, they are currently
+    configurable at the :func:`_sa.create_engine` level via the
+    :paramref:`_sa.create_engine.json_serializer` and
+    :paramref:`_sa.create_engine.json_deserializer` parameters.  For example,
+    to turn off ``ensure_ascii``::
+
+        engine = create_engine(
+            "sqlite://",
+            json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False),
+        )
+
+    .. versionchanged:: 1.3.7
+
+        SQLite dialect's ``json_serializer`` and ``json_deserializer``
+        parameters renamed from ``_json_serializer`` and
+        ``_json_deserializer``.
+
+    .. seealso::
+
+        :class:`sqlalchemy.dialects.postgresql.JSON`
+
+        :class:`sqlalchemy.dialects.postgresql.JSONB`
+
+        :class:`sqlalchemy.dialects.mysql.JSON`
+
+        :class:`sqlalchemy.dialects.sqlite.JSON`
+
+    """  # noqa: E501
+
+    __visit_name__ = "JSON"
+
+    hashable = False
+    NULL = util.symbol("JSON_NULL")
+    """Describe the json value of NULL.
+
+    This value is used to force the JSON value of ``"null"`` to be
+    used as the value.   A value of Python ``None`` will be recognized
+    either as SQL NULL or JSON ``"null"``, based on the setting
+    of the :paramref:`_types.JSON.none_as_null` flag; the
+    :attr:`_types.JSON.NULL`
+    constant can be used to always resolve to JSON ``"null"`` regardless
+    of this setting.  This is in contrast to the :func:`_expression.null`
+    construct,
+    which always resolves to SQL NULL.  E.g.::
+
+        from sqlalchemy import null
+        from sqlalchemy.dialects.postgresql import JSON
+
+        # will *always* insert SQL NULL
+        obj1 = MyObject(json_value=null())
+
+        # will *always* insert JSON string "null"
+        obj2 = MyObject(json_value=JSON.NULL)
+
+        session.add_all([obj1, obj2])
+        session.commit()
+
+    In order to set JSON NULL as a default value for a column, the most
+    transparent method is to use :func:`_expression.text`::
+
+        Table(
+            "my_table", metadata, Column("json_data", JSON, default=text("'null'"))
+        )
+
+    While it is possible to use :attr:`_types.JSON.NULL` in this context, the
+    :attr:`_types.JSON.NULL` value will be returned as the value of the
+    column,
+    which in the context of the ORM or other repurposing of the default
+    value, may not be desirable.  Using a SQL expression means the value
+    will be re-fetched from the database within the context of retrieving
+    generated defaults.
+
+
+    """  # noqa: E501
+
+    def __init__(self, none_as_null: bool = False):
+        """Construct a :class:`_types.JSON` type.
+
+        :param none_as_null=False: if True, persist the value ``None`` as a
+         SQL NULL value, not the JSON encoding of ``null``. Note that when this
+         flag is False, the :func:`.null` construct can still be used to
+         persist a NULL value, which may be passed directly as a parameter
+         value that is specially interpreted by the :class:`_types.JSON` type
+         as SQL NULL::
+
+             from sqlalchemy import null
+
+             conn.execute(table.insert(), {"data": null()})
+
+         .. note::
+
+              :paramref:`_types.JSON.none_as_null` does **not** apply to the
+              values passed to :paramref:`_schema.Column.default` and
+              :paramref:`_schema.Column.server_default`; a value of ``None``
+              passed for these parameters means "no default present".
+
+              Additionally, when used in SQL comparison expressions, the
+              Python value ``None`` continues to refer to SQL null, and not
+              JSON NULL.  The :paramref:`_types.JSON.none_as_null` flag refers
+              explicitly to the **persistence** of the value within an
+              INSERT or UPDATE statement.   The :attr:`_types.JSON.NULL`
+              value should be used for SQL expressions that wish to compare to
+              JSON null.
+
+         .. seealso::
+
+              :attr:`.types.JSON.NULL`
+
+        """
+        self.none_as_null = none_as_null
+
+    class JSONElementType(TypeEngine[Any]):
+        """Common function for index / path elements in a JSON expression."""
+
+        _integer = Integer()
+        _string = String()
+
+        def string_bind_processor(
+            self, dialect: Dialect
+        ) -> Optional[_BindProcessorType[str]]:
+            return self._string._cached_bind_processor(dialect)
+
+        def string_literal_processor(
+            self, dialect: Dialect
+        ) -> Optional[_LiteralProcessorType[str]]:
+            return self._string._cached_literal_processor(dialect)
+
+        def bind_processor(self, dialect: Dialect) -> _BindProcessorType[Any]:
+            int_processor = self._integer._cached_bind_processor(dialect)
+            string_processor = self.string_bind_processor(dialect)
+
+            def process(value: Optional[Any]) -> Any:
+                if int_processor and isinstance(value, int):
+                    value = int_processor(value)
+                elif string_processor and isinstance(value, str):
+                    value = string_processor(value)
+                return value
+
+            return process
+
+        def literal_processor(
+            self, dialect: Dialect
+        ) -> _LiteralProcessorType[Any]:
+            int_processor = self._integer._cached_literal_processor(dialect)
+            string_processor = self.string_literal_processor(dialect)
+
+            def process(value: Optional[Any]) -> Any:
+                if int_processor and isinstance(value, int):
+                    value = int_processor(value)
+                elif string_processor and isinstance(value, str):
+                    value = string_processor(value)
+                else:
+                    raise NotImplementedError()
+
+                return value
+
+            return process
+
+    class JSONIndexType(JSONElementType):
+        """Placeholder for the datatype of a JSON index value.
+
+        This allows execution-time processing of JSON index values
+        for special syntaxes.
+
+        """
+
+    class JSONIntIndexType(JSONIndexType):
+        """Placeholder for the datatype of a JSON index value.
+
+        This allows execution-time processing of JSON index values
+        for special syntaxes.
+
+        """
+
+    class JSONStrIndexType(JSONIndexType):
+        """Placeholder for the datatype of a JSON index value.
+
+        This allows execution-time processing of JSON index values
+        for special syntaxes.
+
+        """
+
+    class JSONPathType(JSONElementType):
+        """Placeholder type for JSON path operations.
+
+        This allows execution-time processing of a path-based
+        index value into a specific SQL syntax.
+
+        """
+
+        __visit_name__ = "json_path"
+
+    class Comparator(Indexable.Comparator[_T], Concatenable.Comparator[_T]):
+        """Define comparison operations for :class:`_types.JSON`."""
+
+        __slots__ = ()
+
+        type: JSON
+
+        def _setup_getitem(self, index):
+            if not isinstance(index, str) and isinstance(
+                index, collections_abc.Sequence
+            ):
+                index = coercions.expect(
+                    roles.BinaryElementRole,
+                    index,
+                    expr=self.expr,
+                    operator=operators.json_path_getitem_op,
+                    bindparam_type=JSON.JSONPathType,
+                )
+
+                operator = operators.json_path_getitem_op
+            else:
+                index = coercions.expect(
+                    roles.BinaryElementRole,
+                    index,
+                    expr=self.expr,
+                    operator=operators.json_getitem_op,
+                    bindparam_type=(
+                        JSON.JSONIntIndexType
+                        if isinstance(index, int)
+                        else JSON.JSONStrIndexType
+                    ),
+                )
+                operator = operators.json_getitem_op
+
+            return operator, index, self.type
+
+        def as_boolean(self):
+            """Consider an indexed value as boolean.
+
+            This is similar to using :class:`_sql.type_coerce`, and will
+            usually not apply a ``CAST()``.
+
+            e.g.::
+
+                stmt = select(mytable.c.json_column["some_data"].as_boolean()).where(
+                    mytable.c.json_column["some_data"].as_boolean() == True
+                )
+
+            .. versionadded:: 1.3.11
+
+            """  # noqa: E501
+            return self._binary_w_type(Boolean(), "as_boolean")
+
+        def as_string(self):
+            """Consider an indexed value as string.
+
+            This is similar to using :class:`_sql.type_coerce`, and will
+            usually not apply a ``CAST()``.
+
+            e.g.::
+
+                stmt = select(mytable.c.json_column["some_data"].as_string()).where(
+                    mytable.c.json_column["some_data"].as_string() == "some string"
+                )
+
+            .. versionadded:: 1.3.11
+
+            """  # noqa: E501
+            return self._binary_w_type(Unicode(), "as_string")
+
+        def as_integer(self):
+            """Consider an indexed value as integer.
+
+            This is similar to using :class:`_sql.type_coerce`, and will
+            usually not apply a ``CAST()``.
+
+            e.g.::
+
+                stmt = select(mytable.c.json_column["some_data"].as_integer()).where(
+                    mytable.c.json_column["some_data"].as_integer() == 5
+                )
+
+            .. versionadded:: 1.3.11
+
+            """  # noqa: E501
+            return self._binary_w_type(Integer(), "as_integer")
+
+        def as_float(self):
+            """Consider an indexed value as float.
+
+            This is similar to using :class:`_sql.type_coerce`, and will
+            usually not apply a ``CAST()``.
+
+            e.g.::
+
+                stmt = select(mytable.c.json_column["some_data"].as_float()).where(
+                    mytable.c.json_column["some_data"].as_float() == 29.75
+                )
+
+            .. versionadded:: 1.3.11
+
+            """  # noqa: E501
+            return self._binary_w_type(Float(), "as_float")
+
+        def as_numeric(self, precision, scale, asdecimal=True):
+            """Consider an indexed value as numeric/decimal.
+
+            This is similar to using :class:`_sql.type_coerce`, and will
+            usually not apply a ``CAST()``.
+
+            e.g.::
+
+                stmt = select(mytable.c.json_column["some_data"].as_numeric(10, 6)).where(
+                    mytable.c.json_column["some_data"].as_numeric(10, 6) == 29.75
+                )
+
+            .. versionadded:: 1.4.0b2
+
+            """  # noqa: E501
+            return self._binary_w_type(
+                Numeric(precision, scale, asdecimal=asdecimal), "as_numeric"
+            )
+
+        def as_json(self):
+            """Consider an indexed value as JSON.
+
+            This is similar to using :class:`_sql.type_coerce`, and will
+            usually not apply a ``CAST()``.
+
+            e.g.::
+
+                stmt = select(mytable.c.json_column["some_data"].as_json())
+
+            This is typically the default behavior of indexed elements in any
+            case.
+
+            Note that comparison of full JSON structures may not be
+            supported by all backends.
+
+            .. versionadded:: 1.3.11
+
+            """
+            return self.expr
+
+        def _binary_w_type(self, typ, method_name):
+            if not isinstance(
+                self.expr, elements.BinaryExpression
+            ) or self.expr.operator not in (
+                operators.json_getitem_op,
+                operators.json_path_getitem_op,
+            ):
+                raise exc.InvalidRequestError(
+                    "The JSON cast operator JSON.%s() only works with a JSON "
+                    "index expression e.g. col['q'].%s()"
+                    % (method_name, method_name)
+                )
+            expr = self.expr._clone()
+            expr.type = typ
+            return expr
+
+    comparator_factory = Comparator
+
+    @property
+    def python_type(self):
+        return dict
+
+    @property  # type: ignore  # mypy property bug
+    def should_evaluate_none(self):
+        """Alias of :attr:`_types.JSON.none_as_null`"""
+        return not self.none_as_null
+
+    @should_evaluate_none.setter
+    def should_evaluate_none(self, value):
+        self.none_as_null = not value
+
+    @util.memoized_property
+    def _str_impl(self):
+        return String()
+
+    def _make_bind_processor(self, string_process, json_serializer):
+        if string_process:
+
+            def process(value):
+                if value is self.NULL:
+                    value = None
+                elif isinstance(value, elements.Null) or (
+                    value is None and self.none_as_null
+                ):
+                    return None
+
+                serialized = json_serializer(value)
+                return string_process(serialized)
+
+        else:
+
+            def process(value):
+                if value is self.NULL:
+                    value = None
+                elif isinstance(value, elements.Null) or (
+                    value is None and self.none_as_null
+                ):
+                    return None
+
+                return json_serializer(value)
+
+        return process
+
+    def bind_processor(self, dialect):
+        string_process = self._str_impl.bind_processor(dialect)
+        json_serializer = dialect._json_serializer or json.dumps
+
+        return self._make_bind_processor(string_process, json_serializer)
+
+    def result_processor(self, dialect, coltype):
+        string_process = self._str_impl.result_processor(dialect, coltype)
+        json_deserializer = dialect._json_deserializer or json.loads
+
+        def process(value):
+            if value is None:
+                return None
+            if string_process:
+                value = string_process(value)
+            return json_deserializer(value)
+
+        return process
+
+
+class ARRAY(
+    SchemaEventTarget, Indexable, Concatenable, TypeEngine[Sequence[Any]]
+):
+    """Represent a SQL Array type.
+
+    .. note::  This type serves as the basis for all ARRAY operations.
+       However, currently **only the PostgreSQL backend has support for SQL
+       arrays in SQLAlchemy**. It is recommended to use the PostgreSQL-specific
+       :class:`sqlalchemy.dialects.postgresql.ARRAY` type directly when using
+       ARRAY types with PostgreSQL, as it provides additional operators
+       specific to that backend.
+
+    :class:`_types.ARRAY` is part of the Core in support of various SQL
+    standard functions such as :class:`_functions.array_agg`
+    which explicitly involve
+    arrays; however, with the exception of the PostgreSQL backend and possibly
+    some third-party dialects, no other SQLAlchemy built-in dialect has support
+    for this type.
+
+    An :class:`_types.ARRAY` type is constructed given the "type"
+    of element::
+
+        mytable = Table("mytable", metadata, Column("data", ARRAY(Integer)))
+
+    The above type represents an N-dimensional array,
+    meaning a supporting backend such as PostgreSQL will interpret values
+    with any number of dimensions automatically.   To produce an INSERT
+    construct that passes in a 1-dimensional array of integers::
+
+        connection.execute(mytable.insert(), {"data": [1, 2, 3]})
+
+    The :class:`_types.ARRAY` type can be constructed given a fixed number
+    of dimensions::
+
+        mytable = Table(
+            "mytable", metadata, Column("data", ARRAY(Integer, dimensions=2))
+        )
+
+    Sending a number of dimensions is optional, but recommended if the
+    datatype is to represent arrays of more than one dimension.  This number
+    is used:
+
+    * When emitting the type declaration itself to the database, e.g.
+      ``INTEGER[][]``
+
+    * When translating Python values to database values, and vice versa, e.g.
+      an ARRAY of :class:`.Unicode` objects uses this number to efficiently
+      access the string values inside of array structures without resorting
+      to per-row type inspection
+
+    * When used with the Python ``getitem`` accessor, the number of dimensions
+      serves to define the kind of type that the ``[]`` operator should
+      return, e.g. for an ARRAY of INTEGER with two dimensions::
+
+          >>> expr = table.c.column[5]  # returns ARRAY(Integer, dimensions=1)
+          >>> expr = expr[6]  # returns Integer
+
+    For 1-dimensional arrays, an :class:`_types.ARRAY` instance with no
+    dimension parameter will generally assume single-dimensional behaviors.
+
+    SQL expressions of type :class:`_types.ARRAY` have support for "index" and
+    "slice" behavior.  The ``[]`` operator produces expression
+    constructs which will produce the appropriate SQL, both for
+    SELECT statements::
+
+        select(mytable.c.data[5], mytable.c.data[2:7])
+
+    as well as UPDATE statements when the :meth:`_expression.Update.values`
+    method is used::
+
+        mytable.update().values(
+            {mytable.c.data[5]: 7, mytable.c.data[2:7]: [1, 2, 3]}
+        )
+
+    Indexed access is one-based by default;
+    for zero-based index conversion, set :paramref:`_types.ARRAY.zero_indexes`.
+
+    The :class:`_types.ARRAY` type also provides for the operators
+    :meth:`.types.ARRAY.Comparator.any` and
+    :meth:`.types.ARRAY.Comparator.all`. The PostgreSQL-specific version of
+    :class:`_types.ARRAY` also provides additional operators.
+
+    .. container:: topic
+
+        **Detecting Changes in ARRAY columns when using the ORM**
+
+        The :class:`_sqltypes.ARRAY` type, when used with the SQLAlchemy ORM,
+        does not detect in-place mutations to the array. In order to detect
+        these, the :mod:`sqlalchemy.ext.mutable` extension must be used, using
+        the :class:`.MutableList` class::
+
+            from sqlalchemy import ARRAY
+            from sqlalchemy.ext.mutable import MutableList
+
+
+            class SomeOrmClass(Base):
+                # ...
+
+                data = Column(MutableList.as_mutable(ARRAY(Integer)))
+
+        This extension will allow "in-place" changes such to the array
+        such as ``.append()`` to produce events which will be detected by the
+        unit of work.  Note that changes to elements **inside** the array,
+        including subarrays that are mutated in place, are **not** detected.
+
+        Alternatively, assigning a new array value to an ORM element that
+        replaces the old one will always trigger a change event.
+
+    .. seealso::
+
+        :class:`sqlalchemy.dialects.postgresql.ARRAY`
+
+    """
+
+    __visit_name__ = "ARRAY"
+
+    _is_array = True
+
+    zero_indexes = False
+    """If True, Python zero-based indexes should be interpreted as one-based
+    on the SQL expression side."""
+
+    def __init__(
+        self,
+        item_type: _TypeEngineArgument[Any],
+        as_tuple: bool = False,
+        dimensions: Optional[int] = None,
+        zero_indexes: bool = False,
+    ):
+        """Construct an :class:`_types.ARRAY`.
+
+        E.g.::
+
+          Column("myarray", ARRAY(Integer))
+
+        Arguments are:
+
+        :param item_type: The data type of items of this array. Note that
+          dimensionality is irrelevant here, so multi-dimensional arrays like
+          ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
+          ``ARRAY(ARRAY(Integer))`` or such.
+
+        :param as_tuple=False: Specify whether return results
+          should be converted to tuples from lists.  This parameter is
+          not generally needed as a Python list corresponds well
+          to a SQL array.
+
+        :param dimensions: if non-None, the ARRAY will assume a fixed
+         number of dimensions.   This impacts how the array is declared
+         on the database, how it goes about interpreting Python and
+         result values, as well as how expression behavior in conjunction
+         with the "getitem" operator works.  See the description at
+         :class:`_types.ARRAY` for additional detail.
+
+        :param zero_indexes=False: when True, index values will be converted
+         between Python zero-based and SQL one-based indexes, e.g.
+         a value of one will be added to all index values before passing
+         to the database.
+
+        """
+        if isinstance(item_type, ARRAY):
+            raise ValueError(
+                "Do not nest ARRAY types; ARRAY(basetype) "
+                "handles multi-dimensional arrays of basetype"
+            )
+        if isinstance(item_type, type):
+            item_type = item_type()
+        self.item_type = item_type
+        self.as_tuple = as_tuple
+        self.dimensions = dimensions
+        self.zero_indexes = zero_indexes
+
+    class Comparator(
+        Indexable.Comparator[Sequence[Any]],
+        Concatenable.Comparator[Sequence[Any]],
+    ):
+        """Define comparison operations for :class:`_types.ARRAY`.
+
+        More operators are available on the dialect-specific form
+        of this type.  See :class:`.postgresql.ARRAY.Comparator`.
+
+        """
+
+        __slots__ = ()
+
+        type: ARRAY
+
+        def _setup_getitem(self, index):
+            arr_type = self.type
+
+            return_type: TypeEngine[Any]
+
+            if isinstance(index, slice):
+                return_type = arr_type
+                if arr_type.zero_indexes:
+                    index = slice(index.start + 1, index.stop + 1, index.step)
+                slice_ = Slice(
+                    index.start, index.stop, index.step, _name=self.expr.key
+                )
+                return operators.getitem, slice_, return_type
+            else:
+                if arr_type.zero_indexes:
+                    index += 1
+                if arr_type.dimensions is None or arr_type.dimensions == 1:
+                    return_type = arr_type.item_type
+                else:
+                    adapt_kw = {"dimensions": arr_type.dimensions - 1}
+                    return_type = arr_type.adapt(
+                        arr_type.__class__, **adapt_kw
+                    )
+
+                return operators.getitem, index, return_type
+
+        def contains(self, *arg, **kw):
+            """``ARRAY.contains()`` not implemented for the base ARRAY type.
+            Use the dialect-specific ARRAY type.
+
+            .. seealso::
+
+                :class:`_postgresql.ARRAY` - PostgreSQL specific version.
+            """
+            raise NotImplementedError(
+                "ARRAY.contains() not implemented for the base "
+                "ARRAY type; please use the dialect-specific ARRAY type"
+            )
+
+        @util.preload_module("sqlalchemy.sql.elements")
+        def any(self, other, operator=None):
+            """Return ``other operator ANY (array)`` clause.
+
+            .. legacy:: This method is an :class:`_types.ARRAY` - specific
+                construct that is now superseded by the :func:`_sql.any_`
+                function, which features a different calling style. The
+                :func:`_sql.any_` function is also mirrored at the method level
+                via the :meth:`_sql.ColumnOperators.any_` method.
+
+            Usage of array-specific :meth:`_types.ARRAY.Comparator.any`
+            is as follows::
+
+                from sqlalchemy.sql import operators
+
+                conn.execute(
+                    select(table.c.data).where(table.c.data.any(7, operator=operators.lt))
+                )
+
+            :param other: expression to be compared
+            :param operator: an operator object from the
+             :mod:`sqlalchemy.sql.operators`
+             package, defaults to :func:`.operators.eq`.
+
+            .. seealso::
+
+                :func:`_expression.any_`
+
+                :meth:`.types.ARRAY.Comparator.all`
+
+            """  # noqa: E501
+            elements = util.preloaded.sql_elements
+            operator = operator if operator else operators.eq
+
+            arr_type = self.type
+
+            return elements.CollectionAggregate._create_any(self.expr).operate(
+                operators.mirror(operator),
+                coercions.expect(
+                    roles.BinaryElementRole,
+                    element=other,
+                    operator=operator,
+                    expr=self.expr,
+                    bindparam_type=arr_type.item_type,
+                ),
+            )
+
+        @util.preload_module("sqlalchemy.sql.elements")
+        def all(self, other, operator=None):
+            """Return ``other operator ALL (array)`` clause.
+
+            .. legacy:: This method is an :class:`_types.ARRAY` - specific
+                construct that is now superseded by the :func:`_sql.all_`
+                function, which features a different calling style. The
+                :func:`_sql.all_` function is also mirrored at the method level
+                via the :meth:`_sql.ColumnOperators.all_` method.
+
+            Usage of array-specific :meth:`_types.ARRAY.Comparator.all`
+            is as follows::
+
+                from sqlalchemy.sql import operators
+
+                conn.execute(
+                    select(table.c.data).where(table.c.data.all(7, operator=operators.lt))
+                )
+
+            :param other: expression to be compared
+            :param operator: an operator object from the
+             :mod:`sqlalchemy.sql.operators`
+             package, defaults to :func:`.operators.eq`.
+
+            .. seealso::
+
+                :func:`_expression.all_`
+
+                :meth:`.types.ARRAY.Comparator.any`
+
+            """  # noqa: E501
+            elements = util.preloaded.sql_elements
+            operator = operator if operator else operators.eq
+
+            arr_type = self.type
+
+            return elements.CollectionAggregate._create_all(self.expr).operate(
+                operators.mirror(operator),
+                coercions.expect(
+                    roles.BinaryElementRole,
+                    element=other,
+                    operator=operator,
+                    expr=self.expr,
+                    bindparam_type=arr_type.item_type,
+                ),
+            )
+
+    comparator_factory = Comparator
+
+    @property
+    def hashable(self):
+        return self.as_tuple
+
+    @property
+    def python_type(self):
+        return list
+
+    def compare_values(self, x, y):
+        return x == y
+
+    def _set_parent(self, parent, outer=False, **kw):
+        """Support SchemaEventTarget"""
+
+        if not outer and isinstance(self.item_type, SchemaEventTarget):
+            self.item_type._set_parent(parent, **kw)
+
+    def _set_parent_with_dispatch(self, parent, **kw):
+        """Support SchemaEventTarget"""
+
+        super()._set_parent_with_dispatch(parent, outer=True)
+
+        if isinstance(self.item_type, SchemaEventTarget):
+            self.item_type._set_parent_with_dispatch(parent)
+
+    def literal_processor(self, dialect):
+        item_proc = self.item_type.dialect_impl(dialect).literal_processor(
+            dialect
+        )
+        if item_proc is None:
+            return None
+
+        def to_str(elements):
+            return f"[{', '.join(elements)}]"
+
+        def process(value):
+            inner = self._apply_item_processor(
+                value, item_proc, self.dimensions, to_str
+            )
+            return inner
+
+        return process
+
+    def _apply_item_processor(self, arr, itemproc, dim, collection_callable):
+        """Helper method that can be used by bind_processor(),
+        literal_processor(), etc. to apply an item processor to elements of
+        an array value, taking into account the 'dimensions' for this
+        array type.
+
+        See the Postgresql ARRAY datatype for usage examples.
+
+        .. versionadded:: 2.0
+
+        """
+
+        if dim is None:
+            arr = list(arr)
+        if (
+            dim == 1
+            or dim is None
+            and (
+                # this has to be (list, tuple), or at least
+                # not hasattr('__iter__'), since Py3K strings
+                # etc. have __iter__
+                not arr
+                or not isinstance(arr[0], (list, tuple))
+            )
+        ):
+            if itemproc:
+                return collection_callable(itemproc(x) for x in arr)
+            else:
+                return collection_callable(arr)
+        else:
+            return collection_callable(
+                (
+                    self._apply_item_processor(
+                        x,
+                        itemproc,
+                        dim - 1 if dim is not None else None,
+                        collection_callable,
+                    )
+                    if x is not None
+                    else None
+                )
+                for x in arr
+            )
+
+
+class TupleType(TypeEngine[Tuple[Any, ...]]):
+    """represent the composite type of a Tuple."""
+
+    _is_tuple_type = True
+
+    types: List[TypeEngine[Any]]
+
+    def __init__(self, *types: _TypeEngineArgument[Any]):
+        self._fully_typed = NULLTYPE not in types
+        self.types = [
+            item_type() if isinstance(item_type, type) else item_type
+            for item_type in types
+        ]
+
+    def coerce_compared_value(
+        self, op: Optional[OperatorType], value: Any
+    ) -> TypeEngine[Any]:
+        if value is type_api._NO_VALUE_IN_LIST:
+            return super().coerce_compared_value(op, value)
+        else:
+            return TupleType(
+                *[
+                    typ.coerce_compared_value(op, elem)
+                    for typ, elem in zip(self.types, value)
+                ]
+            )
+
+    def _resolve_values_to_types(self, value: Any) -> TupleType:
+        if self._fully_typed:
+            return self
+        else:
+            return TupleType(
+                *[
+                    _resolve_value_to_type(elem) if typ is NULLTYPE else typ
+                    for typ, elem in zip(self.types, value)
+                ]
+            )
+
+    def result_processor(self, dialect, coltype):
+        raise NotImplementedError(
+            "The tuple type does not support being fetched "
+            "as a column in a result row."
+        )
+
+
+class REAL(Float[_N]):
+    """The SQL REAL type.
+
+    .. seealso::
+
+        :class:`_types.Float` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "REAL"
+
+
+class FLOAT(Float[_N]):
+    """The SQL FLOAT type.
+
+    .. seealso::
+
+        :class:`_types.Float` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "FLOAT"
+
+
+class DOUBLE(Double[_N]):
+    """The SQL DOUBLE type.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :class:`_types.Double` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "DOUBLE"
+
+
+class DOUBLE_PRECISION(Double[_N]):
+    """The SQL DOUBLE PRECISION type.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :class:`_types.Double` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "DOUBLE_PRECISION"
+
+
+class NUMERIC(Numeric[_N]):
+    """The SQL NUMERIC type.
+
+    .. seealso::
+
+        :class:`_types.Numeric` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "NUMERIC"
+
+
+class DECIMAL(Numeric[_N]):
+    """The SQL DECIMAL type.
+
+    .. seealso::
+
+        :class:`_types.Numeric` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "DECIMAL"
+
+
+class INTEGER(Integer):
+    """The SQL INT or INTEGER type.
+
+    .. seealso::
+
+        :class:`_types.Integer` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "INTEGER"
+
+
+INT = INTEGER
+
+
+class SMALLINT(SmallInteger):
+    """The SQL SMALLINT type.
+
+    .. seealso::
+
+        :class:`_types.SmallInteger` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "SMALLINT"
+
+
+class BIGINT(BigInteger):
+    """The SQL BIGINT type.
+
+    .. seealso::
+
+        :class:`_types.BigInteger` - documentation for the base type.
+
+    """
+
+    __visit_name__ = "BIGINT"
+
+
+class TIMESTAMP(DateTime):
+    """The SQL TIMESTAMP type.
+
+    :class:`_types.TIMESTAMP` datatypes have support for timezone storage on
+    some backends, such as PostgreSQL and Oracle Database.  Use the
+    :paramref:`~types.TIMESTAMP.timezone` argument in order to enable
+    "TIMESTAMP WITH TIMEZONE" for these backends.
+
+    """
+
+    __visit_name__ = "TIMESTAMP"
+
+    def __init__(self, timezone: bool = False):
+        """Construct a new :class:`_types.TIMESTAMP`.
+
+        :param timezone: boolean.  Indicates that the TIMESTAMP type should
+         enable timezone support, if available on the target database.
+         On a per-dialect basis is similar to "TIMESTAMP WITH TIMEZONE".
+         If the target database does not support timezones, this flag is
+         ignored.
+
+
+        """
+        super().__init__(timezone=timezone)
+
+    def get_dbapi_type(self, dbapi):
+        return dbapi.TIMESTAMP
+
+
+class DATETIME(DateTime):
+    """The SQL DATETIME type."""
+
+    __visit_name__ = "DATETIME"
+
+
+class DATE(Date):
+    """The SQL DATE type."""
+
+    __visit_name__ = "DATE"
+
+
+class TIME(Time):
+    """The SQL TIME type."""
+
+    __visit_name__ = "TIME"
+
+
+class TEXT(Text):
+    """The SQL TEXT type."""
+
+    __visit_name__ = "TEXT"
+
+
+class CLOB(Text):
+    """The CLOB type.
+
+    This type is found in Oracle Database and Informix.
+    """
+
+    __visit_name__ = "CLOB"
+
+
+class VARCHAR(String):
+    """The SQL VARCHAR type."""
+
+    __visit_name__ = "VARCHAR"
+
+
+class NVARCHAR(Unicode):
+    """The SQL NVARCHAR type."""
+
+    __visit_name__ = "NVARCHAR"
+
+
+class CHAR(String):
+    """The SQL CHAR type."""
+
+    __visit_name__ = "CHAR"
+
+
+class NCHAR(Unicode):
+    """The SQL NCHAR type."""
+
+    __visit_name__ = "NCHAR"
+
+
+class BLOB(LargeBinary):
+    """The SQL BLOB type."""
+
+    __visit_name__ = "BLOB"
+
+
+class BINARY(_Binary):
+    """The SQL BINARY type."""
+
+    __visit_name__ = "BINARY"
+
+
+class VARBINARY(_Binary):
+    """The SQL VARBINARY type."""
+
+    __visit_name__ = "VARBINARY"
+
+
+class BOOLEAN(Boolean):
+    """The SQL BOOLEAN type."""
+
+    __visit_name__ = "BOOLEAN"
+
+
+class NullType(TypeEngine[None]):
+    """An unknown type.
+
+    :class:`.NullType` is used as a default type for those cases where
+    a type cannot be determined, including:
+
+    * During table reflection, when the type of a column is not recognized
+      by the :class:`.Dialect`
+    * When constructing SQL expressions using plain Python objects of
+      unknown types (e.g. ``somecolumn == my_special_object``)
+    * When a new :class:`_schema.Column` is created,
+      and the given type is passed
+      as ``None`` or is not passed at all.
+
+    The :class:`.NullType` can be used within SQL expression invocation
+    without issue, it just has no behavior either at the expression
+    construction level or at the bind-parameter/result processing level.
+    :class:`.NullType` will result in a :exc:`.CompileError` if the compiler
+    is asked to render the type itself, such as if it is used in a
+    :func:`.cast` operation or within a schema creation operation such as that
+    invoked by :meth:`_schema.MetaData.create_all` or the
+    :class:`.CreateTable`
+    construct.
+
+    """
+
+    __visit_name__ = "null"
+
+    _isnull = True
+
+    def literal_processor(self, dialect):
+        return None
+
+    class Comparator(TypeEngine.Comparator[_T]):
+        __slots__ = ()
+
+        def _adapt_expression(
+            self,
+            op: OperatorType,
+            other_comparator: TypeEngine.Comparator[Any],
+        ) -> Tuple[OperatorType, TypeEngine[Any]]:
+            if isinstance(
+                other_comparator, NullType.Comparator
+            ) or not operators.is_commutative(op):
+                return op, self.expr.type
+            else:
+                return other_comparator._adapt_expression(op, self)
+
+    comparator_factory = Comparator
+
+
+class TableValueType(HasCacheKey, TypeEngine[Any]):
+    """Refers to a table value type."""
+
+    _is_table_value = True
+
+    _traverse_internals = [
+        ("_elements", InternalTraversal.dp_clauseelement_list),
+    ]
+
+    def __init__(self, *elements: Union[str, _ColumnExpressionArgument[Any]]):
+        self._elements = [
+            coercions.expect(roles.StrAsPlainColumnRole, elem)
+            for elem in elements
+        ]
+
+
+class MatchType(Boolean):
+    """Refers to the return type of the MATCH operator.
+
+    As the :meth:`.ColumnOperators.match` is probably the most open-ended
+    operator in generic SQLAlchemy Core, we can't assume the return type
+    at SQL evaluation time, as MySQL returns a floating point, not a boolean,
+    and other backends might do something different.    So this type
+    acts as a placeholder, currently subclassing :class:`.Boolean`.
+    The type allows dialects to inject result-processing functionality
+    if needed, and on MySQL will return floating-point values.
+
+    """
+
+
+_UUID_RETURN = TypeVar("_UUID_RETURN", str, _python_UUID)
+
+
+class Uuid(Emulated, TypeEngine[_UUID_RETURN]):
+    """Represent a database agnostic UUID datatype.
+
+    For backends that have no "native" UUID datatype, the value will
+    make use of ``CHAR(32)`` and store the UUID as a 32-character alphanumeric
+    hex string.
+
+    For backends which are known to support ``UUID`` directly or a similar
+    uuid-storing datatype such as SQL Server's ``UNIQUEIDENTIFIER``, a
+    "native" mode enabled by default allows these types will be used on those
+    backends.
+
+    In its default mode of use, the :class:`_sqltypes.Uuid` datatype expects
+    **Python uuid objects**, from the Python
+    `uuid <https://docs.python.org/3/library/uuid.html>`_
+    module::
+
+        import uuid
+
+        from sqlalchemy import Uuid
+        from sqlalchemy import Table, Column, MetaData, String
+
+
+        metadata_obj = MetaData()
+
+        t = Table(
+            "t",
+            metadata_obj,
+            Column("uuid_data", Uuid, primary_key=True),
+            Column("other_data", String),
+        )
+
+        with engine.begin() as conn:
+            conn.execute(
+                t.insert(), {"uuid_data": uuid.uuid4(), "other_data": "some data"}
+            )
+
+    To have the :class:`_sqltypes.Uuid` datatype work with string-based
+    Uuids (e.g. 32 character hexadecimal strings), pass the
+    :paramref:`_sqltypes.Uuid.as_uuid` parameter with the value ``False``.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :class:`_sqltypes.UUID` - represents exactly the ``UUID`` datatype
+        without any backend-agnostic behaviors.
+
+    """  # noqa: E501
+
+    __visit_name__ = "uuid"
+
+    collation: Optional[str] = None
+
+    @overload
+    def __init__(
+        self: Uuid[_python_UUID],
+        as_uuid: Literal[True] = ...,
+        native_uuid: bool = ...,
+    ): ...
+
+    @overload
+    def __init__(
+        self: Uuid[str],
+        as_uuid: Literal[False] = ...,
+        native_uuid: bool = ...,
+    ): ...
+
+    def __init__(self, as_uuid: bool = True, native_uuid: bool = True):
+        """Construct a :class:`_sqltypes.Uuid` type.
+
+        :param as_uuid=True: if True, values will be interpreted
+         as Python uuid objects, converting to/from string via the
+         DBAPI.
+
+         .. versionchanged: 2.0 ``as_uuid`` now defaults to ``True``.
+
+        :param native_uuid=True: if True, backends that support either the
+         ``UUID`` datatype directly, or a UUID-storing value
+         (such as SQL Server's ``UNIQUEIDENTIFIER`` will be used by those
+         backends.   If False, a ``CHAR(32)`` datatype will be used for
+         all backends regardless of native support.
+
+        """
+        self.as_uuid = as_uuid
+        self.native_uuid = native_uuid
+
+    @property
+    def python_type(self):
+        return _python_UUID if self.as_uuid else str
+
+    @property
+    def native(self):
+        return self.native_uuid
+
+    def coerce_compared_value(self, op, value):
+        """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+
+        if isinstance(value, str):
+            return self
+        else:
+            return super().coerce_compared_value(op, value)
+
+    def bind_processor(self, dialect):
+        character_based_uuid = (
+            not dialect.supports_native_uuid or not self.native_uuid
+        )
+
+        if character_based_uuid:
+            if self.as_uuid:
+
+                def process(value):
+                    if value is not None:
+                        value = value.hex
+                    return value
+
+                return process
+            else:
+
+                def process(value):
+                    if value is not None:
+                        value = value.replace("-", "")
+                    return value
+
+                return process
+        else:
+            return None
+
+    def result_processor(self, dialect, coltype):
+        character_based_uuid = (
+            not dialect.supports_native_uuid or not self.native_uuid
+        )
+
+        if character_based_uuid:
+            if self.as_uuid:
+
+                def process(value):
+                    if value is not None:
+                        value = _python_UUID(value)
+                    return value
+
+                return process
+            else:
+
+                def process(value):
+                    if value is not None:
+                        value = str(_python_UUID(value))
+                    return value
+
+                return process
+        else:
+            if not self.as_uuid:
+
+                def process(value):
+                    if value is not None:
+                        value = str(value)
+                    return value
+
+                return process
+            else:
+                return None
+
+    def literal_processor(self, dialect):
+        character_based_uuid = (
+            not dialect.supports_native_uuid or not self.native_uuid
+        )
+
+        if not self.as_uuid:
+
+            def process(value):
+                return f"""'{value.replace("-", "").replace("'", "''")}'"""
+
+            return process
+        else:
+            if character_based_uuid:
+
+                def process(value):
+                    return f"""'{value.hex}'"""
+
+                return process
+            else:
+
+                def process(value):
+                    return f"""'{str(value).replace("'", "''")}'"""
+
+                return process
+
+
+class UUID(Uuid[_UUID_RETURN], type_api.NativeForEmulated):
+    """Represent the SQL UUID type.
+
+    This is the SQL-native form of the :class:`_types.Uuid` database agnostic
+    datatype, and is backwards compatible with the previous PostgreSQL-only
+    version of ``UUID``.
+
+    The :class:`_sqltypes.UUID` datatype only works on databases that have a
+    SQL datatype named ``UUID``. It will not function for backends which don't
+    have this exact-named type, including SQL Server. For backend-agnostic UUID
+    values with native support, including for SQL Server's ``UNIQUEIDENTIFIER``
+    datatype, use the :class:`_sqltypes.Uuid` datatype.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :class:`_sqltypes.Uuid`
+
+    """
+
+    __visit_name__ = "UUID"
+
+    @overload
+    def __init__(self: UUID[_python_UUID], as_uuid: Literal[True] = ...): ...
+
+    @overload
+    def __init__(self: UUID[str], as_uuid: Literal[False] = ...): ...
+
+    def __init__(self, as_uuid: bool = True):
+        """Construct a :class:`_sqltypes.UUID` type.
+
+
+        :param as_uuid=True: if True, values will be interpreted
+         as Python uuid objects, converting to/from string via the
+         DBAPI.
+
+         .. versionchanged: 2.0 ``as_uuid`` now defaults to ``True``.
+
+        """
+        self.as_uuid = as_uuid
+        self.native_uuid = True
+
+    @classmethod
+    def adapt_emulated_to_native(cls, impl, **kw):
+        kw.setdefault("as_uuid", impl.as_uuid)
+        return cls(**kw)
+
+
+NULLTYPE = NullType()
+BOOLEANTYPE = Boolean()
+STRINGTYPE = String()
+INTEGERTYPE = Integer()
+NUMERICTYPE: Numeric[decimal.Decimal] = Numeric()
+MATCHTYPE = MatchType()
+TABLEVALUE = TableValueType()
+DATETIME_TIMEZONE = DateTime(timezone=True)
+TIME_TIMEZONE = Time(timezone=True)
+_BIGINTEGER = BigInteger()
+_DATETIME = DateTime()
+_TIME = Time()
+_STRING = String()
+_UNICODE = Unicode()
+
+_type_map: Dict[Type[Any], TypeEngine[Any]] = {
+    int: Integer(),
+    float: Float(),
+    bool: BOOLEANTYPE,
+    _python_UUID: Uuid(),
+    decimal.Decimal: Numeric(),
+    dt.date: Date(),
+    dt.datetime: _DATETIME,
+    dt.time: _TIME,
+    dt.timedelta: Interval(),
+    type(None): NULLTYPE,
+    bytes: LargeBinary(),
+    str: _STRING,
+    enum.Enum: Enum(enum.Enum),
+    Literal: Enum(enum.Enum),  # type: ignore[dict-item]
+}
+
+
+_type_map_get = _type_map.get
+
+
+def _resolve_value_to_type(value: Any) -> TypeEngine[Any]:
+    _result_type = _type_map_get(type(value), False)
+
+    if _result_type is False:
+        _result_type = getattr(value, "__sa_type_engine__", False)
+
+    if _result_type is False:
+        # use inspect() to detect SQLAlchemy built-in
+        # objects.
+        insp = inspection.inspect(value, False)
+        if (
+            insp is not None
+            and
+            # foil mock.Mock() and other impostors by ensuring
+            # the inspection target itself self-inspects
+            insp.__class__ in inspection._registrars
+        ):
+            raise exc.ArgumentError(
+                "Object %r is not legal as a SQL literal value" % (value,)
+            )
+        return NULLTYPE
+    else:
+        return _result_type._resolve_for_literal(  # type: ignore [union-attr]
+            value
+        )
+
+
+# back-assign to type_api
+type_api.BOOLEANTYPE = BOOLEANTYPE
+type_api.STRINGTYPE = STRINGTYPE
+type_api.INTEGERTYPE = INTEGERTYPE
+type_api.NULLTYPE = NULLTYPE
+type_api.NUMERICTYPE = NUMERICTYPE
+type_api.MATCHTYPE = MATCHTYPE
+type_api.INDEXABLE = INDEXABLE = Indexable
+type_api.TABLEVALUE = TABLEVALUE
+type_api._resolve_value_to_type = _resolve_value_to_type
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/traversals.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/traversals.py
new file mode 100644
index 00000000..13ad2899
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/traversals.py
@@ -0,0 +1,1024 @@
+# sql/traversals.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+from __future__ import annotations
+
+from collections import deque
+import collections.abc as collections_abc
+import itertools
+from itertools import zip_longest
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import Deque
+from typing import Dict
+from typing import Iterable
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Type
+
+from . import operators
+from .cache_key import HasCacheKey
+from .visitors import _TraverseInternalsType
+from .visitors import anon_map
+from .visitors import ExternallyTraversible
+from .visitors import HasTraversalDispatch
+from .visitors import HasTraverseInternals
+from .. import util
+from ..util import langhelpers
+from ..util.typing import Self
+
+
+SKIP_TRAVERSE = util.symbol("skip_traverse")
+COMPARE_FAILED = False
+COMPARE_SUCCEEDED = True
+
+
+def compare(obj1: Any, obj2: Any, **kw: Any) -> bool:
+    strategy: TraversalComparatorStrategy
+    if kw.get("use_proxies", False):
+        strategy = ColIdentityComparatorStrategy()
+    else:
+        strategy = TraversalComparatorStrategy()
+
+    return strategy.compare(obj1, obj2, **kw)
+
+
+def _preconfigure_traversals(target_hierarchy: Type[Any]) -> None:
+    for cls in util.walk_subclasses(target_hierarchy):
+        if hasattr(cls, "_generate_cache_attrs") and hasattr(
+            cls, "_traverse_internals"
+        ):
+            cls._generate_cache_attrs()
+            _copy_internals.generate_dispatch(
+                cls,
+                cls._traverse_internals,
+                "_generated_copy_internals_traversal",
+            )
+            _get_children.generate_dispatch(
+                cls,
+                cls._traverse_internals,
+                "_generated_get_children_traversal",
+            )
+
+
+class HasShallowCopy(HasTraverseInternals):
+    """attribute-wide operations that are useful for classes that use
+    __slots__ and therefore can't operate on their attributes in a dictionary.
+
+
+    """
+
+    __slots__ = ()
+
+    if typing.TYPE_CHECKING:
+
+        def _generated_shallow_copy_traversal(self, other: Self) -> None: ...
+
+        def _generated_shallow_from_dict_traversal(
+            self, d: Dict[str, Any]
+        ) -> None: ...
+
+        def _generated_shallow_to_dict_traversal(self) -> Dict[str, Any]: ...
+
+    @classmethod
+    def _generate_shallow_copy(
+        cls,
+        internal_dispatch: _TraverseInternalsType,
+        method_name: str,
+    ) -> Callable[[Self, Self], None]:
+        code = "\n".join(
+            f"    other.{attrname} = self.{attrname}"
+            for attrname, _ in internal_dispatch
+        )
+        meth_text = f"def {method_name}(self, other):\n{code}\n"
+        return langhelpers._exec_code_in_env(meth_text, {}, method_name)
+
+    @classmethod
+    def _generate_shallow_to_dict(
+        cls,
+        internal_dispatch: _TraverseInternalsType,
+        method_name: str,
+    ) -> Callable[[Self], Dict[str, Any]]:
+        code = ",\n".join(
+            f"    '{attrname}': self.{attrname}"
+            for attrname, _ in internal_dispatch
+        )
+        meth_text = f"def {method_name}(self):\n    return {{{code}}}\n"
+        return langhelpers._exec_code_in_env(meth_text, {}, method_name)
+
+    @classmethod
+    def _generate_shallow_from_dict(
+        cls,
+        internal_dispatch: _TraverseInternalsType,
+        method_name: str,
+    ) -> Callable[[Self, Dict[str, Any]], None]:
+        code = "\n".join(
+            f"    self.{attrname} = d['{attrname}']"
+            for attrname, _ in internal_dispatch
+        )
+        meth_text = f"def {method_name}(self, d):\n{code}\n"
+        return langhelpers._exec_code_in_env(meth_text, {}, method_name)
+
+    def _shallow_from_dict(self, d: Dict[str, Any]) -> None:
+        cls = self.__class__
+
+        shallow_from_dict: Callable[[HasShallowCopy, Dict[str, Any]], None]
+        try:
+            shallow_from_dict = cls.__dict__[
+                "_generated_shallow_from_dict_traversal"
+            ]
+        except KeyError:
+            shallow_from_dict = self._generate_shallow_from_dict(
+                cls._traverse_internals,
+                "_generated_shallow_from_dict_traversal",
+            )
+
+            cls._generated_shallow_from_dict_traversal = shallow_from_dict  # type: ignore  # noqa: E501
+
+        shallow_from_dict(self, d)
+
+    def _shallow_to_dict(self) -> Dict[str, Any]:
+        cls = self.__class__
+
+        shallow_to_dict: Callable[[HasShallowCopy], Dict[str, Any]]
+
+        try:
+            shallow_to_dict = cls.__dict__[
+                "_generated_shallow_to_dict_traversal"
+            ]
+        except KeyError:
+            shallow_to_dict = self._generate_shallow_to_dict(
+                cls._traverse_internals, "_generated_shallow_to_dict_traversal"
+            )
+
+            cls._generated_shallow_to_dict_traversal = shallow_to_dict  # type: ignore  # noqa: E501
+        return shallow_to_dict(self)
+
+    def _shallow_copy_to(self, other: Self) -> None:
+        cls = self.__class__
+
+        shallow_copy: Callable[[Self, Self], None]
+        try:
+            shallow_copy = cls.__dict__["_generated_shallow_copy_traversal"]
+        except KeyError:
+            shallow_copy = self._generate_shallow_copy(
+                cls._traverse_internals, "_generated_shallow_copy_traversal"
+            )
+
+            cls._generated_shallow_copy_traversal = shallow_copy  # type: ignore  # noqa: E501
+        shallow_copy(self, other)
+
+    def _clone(self, **kw: Any) -> Self:
+        """Create a shallow copy"""
+        c = self.__class__.__new__(self.__class__)
+        self._shallow_copy_to(c)
+        return c
+
+
+class GenerativeOnTraversal(HasShallowCopy):
+    """Supplies Generative behavior but making use of traversals to shallow
+    copy.
+
+    .. seealso::
+
+        :class:`sqlalchemy.sql.base.Generative`
+
+
+    """
+
+    __slots__ = ()
+
+    def _generate(self) -> Self:
+        cls = self.__class__
+        s = cls.__new__(cls)
+        self._shallow_copy_to(s)
+        return s
+
+
+def _clone(element, **kw):
+    return element._clone()
+
+
+class HasCopyInternals(HasTraverseInternals):
+    __slots__ = ()
+
+    def _clone(self, **kw):
+        raise NotImplementedError()
+
+    def _copy_internals(
+        self, *, omit_attrs: Iterable[str] = (), **kw: Any
+    ) -> None:
+        """Reassign internal elements to be clones of themselves.
+
+        Called during a copy-and-traverse operation on newly
+        shallow-copied elements to create a deep copy.
+
+        The given clone function should be used, which may be applying
+        additional transformations to the element (i.e. replacement
+        traversal, cloned traversal, annotations).
+
+        """
+
+        try:
+            traverse_internals = self._traverse_internals
+        except AttributeError:
+            # user-defined classes may not have a _traverse_internals
+            return
+
+        for attrname, obj, meth in _copy_internals.run_generated_dispatch(
+            self, traverse_internals, "_generated_copy_internals_traversal"
+        ):
+            if attrname in omit_attrs:
+                continue
+
+            if obj is not None:
+                result = meth(attrname, self, obj, **kw)
+                if result is not None:
+                    setattr(self, attrname, result)
+
+
+class _CopyInternalsTraversal(HasTraversalDispatch):
+    """Generate a _copy_internals internal traversal dispatch for classes
+    with a _traverse_internals collection."""
+
+    def visit_clauseelement(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return clone(element, **kw)
+
+    def visit_clauseelement_list(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return [clone(clause, **kw) for clause in element]
+
+    def visit_clauseelement_tuple(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return tuple([clone(clause, **kw) for clause in element])
+
+    def visit_executable_options(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return tuple([clone(clause, **kw) for clause in element])
+
+    def visit_clauseelement_unordered_set(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return {clone(clause, **kw) for clause in element}
+
+    def visit_clauseelement_tuples(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return [
+            tuple(clone(tup_elem, **kw) for tup_elem in elem)
+            for elem in element
+        ]
+
+    def visit_string_clauseelement_dict(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return {key: clone(value, **kw) for key, value in element.items()}
+
+    def visit_setup_join_tuple(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return tuple(
+            (
+                clone(target, **kw) if target is not None else None,
+                clone(onclause, **kw) if onclause is not None else None,
+                clone(from_, **kw) if from_ is not None else None,
+                flags,
+            )
+            for (target, onclause, from_, flags) in element
+        )
+
+    def visit_memoized_select_entities(self, attrname, parent, element, **kw):
+        return self.visit_clauseelement_tuple(attrname, parent, element, **kw)
+
+    def visit_dml_ordered_values(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        # sequence of 2-tuples
+        return [
+            (
+                (
+                    clone(key, **kw)
+                    if hasattr(key, "__clause_element__")
+                    else key
+                ),
+                clone(value, **kw),
+            )
+            for key, value in element
+        ]
+
+    def visit_dml_values(self, attrname, parent, element, clone=_clone, **kw):
+        return {
+            (
+                clone(key, **kw) if hasattr(key, "__clause_element__") else key
+            ): clone(value, **kw)
+            for key, value in element.items()
+        }
+
+    def visit_dml_multi_values(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        # sequence of sequences, each sequence contains a list/dict/tuple
+
+        def copy(elem):
+            if isinstance(elem, (list, tuple)):
+                return [
+                    (
+                        clone(value, **kw)
+                        if hasattr(value, "__clause_element__")
+                        else value
+                    )
+                    for value in elem
+                ]
+            elif isinstance(elem, dict):
+                return {
+                    (
+                        clone(key, **kw)
+                        if hasattr(key, "__clause_element__")
+                        else key
+                    ): (
+                        clone(value, **kw)
+                        if hasattr(value, "__clause_element__")
+                        else value
+                    )
+                    for key, value in elem.items()
+                }
+            else:
+                # TODO: use abc classes
+                assert False
+
+        return [
+            [copy(sub_element) for sub_element in sequence]
+            for sequence in element
+        ]
+
+    def visit_propagate_attrs(
+        self, attrname, parent, element, clone=_clone, **kw
+    ):
+        return element
+
+
+_copy_internals = _CopyInternalsTraversal()
+
+
+def _flatten_clauseelement(element):
+    while hasattr(element, "__clause_element__") and not getattr(
+        element, "is_clause_element", False
+    ):
+        element = element.__clause_element__()
+
+    return element
+
+
+class _GetChildrenTraversal(HasTraversalDispatch):
+    """Generate a _children_traversal internal traversal dispatch for classes
+    with a _traverse_internals collection."""
+
+    def visit_has_cache_key(self, element, **kw):
+        # the GetChildren traversal refers explicitly to ClauseElement
+        # structures.  Within these, a plain HasCacheKey is not a
+        # ClauseElement, so don't include these.
+        return ()
+
+    def visit_clauseelement(self, element, **kw):
+        return (element,)
+
+    def visit_clauseelement_list(self, element, **kw):
+        return element
+
+    def visit_clauseelement_tuple(self, element, **kw):
+        return element
+
+    def visit_clauseelement_tuples(self, element, **kw):
+        return itertools.chain.from_iterable(element)
+
+    def visit_fromclause_canonical_column_collection(self, element, **kw):
+        return ()
+
+    def visit_string_clauseelement_dict(self, element, **kw):
+        return element.values()
+
+    def visit_fromclause_ordered_set(self, element, **kw):
+        return element
+
+    def visit_clauseelement_unordered_set(self, element, **kw):
+        return element
+
+    def visit_setup_join_tuple(self, element, **kw):
+        for target, onclause, from_, flags in element:
+            if from_ is not None:
+                yield from_
+
+            if not isinstance(target, str):
+                yield _flatten_clauseelement(target)
+
+            if onclause is not None and not isinstance(onclause, str):
+                yield _flatten_clauseelement(onclause)
+
+    def visit_memoized_select_entities(self, element, **kw):
+        return self.visit_clauseelement_tuple(element, **kw)
+
+    def visit_dml_ordered_values(self, element, **kw):
+        for k, v in element:
+            if hasattr(k, "__clause_element__"):
+                yield k
+            yield v
+
+    def visit_dml_values(self, element, **kw):
+        expr_values = {k for k in element if hasattr(k, "__clause_element__")}
+        str_values = expr_values.symmetric_difference(element)
+
+        for k in sorted(str_values):
+            yield element[k]
+        for k in expr_values:
+            yield k
+            yield element[k]
+
+    def visit_dml_multi_values(self, element, **kw):
+        return ()
+
+    def visit_propagate_attrs(self, element, **kw):
+        return ()
+
+
+_get_children = _GetChildrenTraversal()
+
+
+@util.preload_module("sqlalchemy.sql.elements")
+def _resolve_name_for_compare(element, name, anon_map, **kw):
+    if isinstance(name, util.preloaded.sql_elements._anonymous_label):
+        name = name.apply_map(anon_map)
+
+    return name
+
+
+class TraversalComparatorStrategy(HasTraversalDispatch, util.MemoizedSlots):
+    __slots__ = "stack", "cache", "anon_map"
+
+    def __init__(self):
+        self.stack: Deque[
+            Tuple[
+                Optional[ExternallyTraversible],
+                Optional[ExternallyTraversible],
+            ]
+        ] = deque()
+        self.cache = set()
+
+    def _memoized_attr_anon_map(self):
+        return (anon_map(), anon_map())
+
+    def compare(
+        self,
+        obj1: ExternallyTraversible,
+        obj2: ExternallyTraversible,
+        **kw: Any,
+    ) -> bool:
+        stack = self.stack
+        cache = self.cache
+
+        compare_annotations = kw.get("compare_annotations", False)
+
+        stack.append((obj1, obj2))
+
+        while stack:
+            left, right = stack.popleft()
+
+            if left is right:
+                continue
+            elif left is None or right is None:
+                # we know they are different so no match
+                return False
+            elif (left, right) in cache:
+                continue
+            cache.add((left, right))
+
+            visit_name = left.__visit_name__
+            if visit_name != right.__visit_name__:
+                return False
+
+            meth = getattr(self, "compare_%s" % visit_name, None)
+
+            if meth:
+                attributes_compared = meth(left, right, **kw)
+                if attributes_compared is COMPARE_FAILED:
+                    return False
+                elif attributes_compared is SKIP_TRAVERSE:
+                    continue
+
+                # attributes_compared is returned as a list of attribute
+                # names that were "handled" by the comparison method above.
+                # remaining attribute names in the _traverse_internals
+                # will be compared.
+            else:
+                attributes_compared = ()
+
+            for (
+                (left_attrname, left_visit_sym),
+                (right_attrname, right_visit_sym),
+            ) in zip_longest(
+                left._traverse_internals,
+                right._traverse_internals,
+                fillvalue=(None, None),
+            ):
+                if not compare_annotations and (
+                    (left_attrname == "_annotations")
+                    or (right_attrname == "_annotations")
+                ):
+                    continue
+
+                if (
+                    left_attrname != right_attrname
+                    or left_visit_sym is not right_visit_sym
+                ):
+                    return False
+                elif left_attrname in attributes_compared:
+                    continue
+
+                assert left_visit_sym is not None
+                assert left_attrname is not None
+                assert right_attrname is not None
+
+                dispatch = self.dispatch(left_visit_sym)
+                assert dispatch is not None, (
+                    f"{self.__class__} has no dispatch for "
+                    f"'{self._dispatch_lookup[left_visit_sym]}'"
+                )
+                left_child = operator.attrgetter(left_attrname)(left)
+                right_child = operator.attrgetter(right_attrname)(right)
+                if left_child is None:
+                    if right_child is not None:
+                        return False
+                    else:
+                        continue
+                elif right_child is None:
+                    return False
+
+                comparison = dispatch(
+                    left_attrname, left, left_child, right, right_child, **kw
+                )
+                if comparison is COMPARE_FAILED:
+                    return False
+
+        return True
+
+    def compare_inner(self, obj1, obj2, **kw):
+        comparator = self.__class__()
+        return comparator.compare(obj1, obj2, **kw)
+
+    def visit_has_cache_key(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        if left._gen_cache_key(self.anon_map[0], []) != right._gen_cache_key(
+            self.anon_map[1], []
+        ):
+            return COMPARE_FAILED
+
+    def visit_propagate_attrs(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return self.compare_inner(
+            left.get("plugin_subject", None), right.get("plugin_subject", None)
+        )
+
+    def visit_has_cache_key_list(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for l, r in zip_longest(left, right, fillvalue=None):
+            if l is None:
+                if r is not None:
+                    return COMPARE_FAILED
+                else:
+                    continue
+            elif r is None:
+                return COMPARE_FAILED
+
+            if l._gen_cache_key(self.anon_map[0], []) != r._gen_cache_key(
+                self.anon_map[1], []
+            ):
+                return COMPARE_FAILED
+
+    def visit_executable_options(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for l, r in zip_longest(left, right, fillvalue=None):
+            if l is None:
+                if r is not None:
+                    return COMPARE_FAILED
+                else:
+                    continue
+            elif r is None:
+                return COMPARE_FAILED
+
+            if (
+                l._gen_cache_key(self.anon_map[0], [])
+                if l._is_has_cache_key
+                else l
+            ) != (
+                r._gen_cache_key(self.anon_map[1], [])
+                if r._is_has_cache_key
+                else r
+            ):
+                return COMPARE_FAILED
+
+    def visit_clauseelement(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        self.stack.append((left, right))
+
+    def visit_fromclause_canonical_column_collection(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for lcol, rcol in zip_longest(left, right, fillvalue=None):
+            self.stack.append((lcol, rcol))
+
+    def visit_fromclause_derived_column_collection(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        pass
+
+    def visit_string_clauseelement_dict(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for lstr, rstr in zip_longest(
+            sorted(left), sorted(right), fillvalue=None
+        ):
+            if lstr != rstr:
+                return COMPARE_FAILED
+            self.stack.append((left[lstr], right[rstr]))
+
+    def visit_clauseelement_tuples(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for ltup, rtup in zip_longest(left, right, fillvalue=None):
+            if ltup is None or rtup is None:
+                return COMPARE_FAILED
+
+            for l, r in zip_longest(ltup, rtup, fillvalue=None):
+                self.stack.append((l, r))
+
+    def visit_clauseelement_list(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for l, r in zip_longest(left, right, fillvalue=None):
+            self.stack.append((l, r))
+
+    def visit_clauseelement_tuple(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for l, r in zip_longest(left, right, fillvalue=None):
+            self.stack.append((l, r))
+
+    def _compare_unordered_sequences(self, seq1, seq2, **kw):
+        if seq1 is None:
+            return seq2 is None
+
+        completed: Set[object] = set()
+        for clause in seq1:
+            for other_clause in set(seq2).difference(completed):
+                if self.compare_inner(clause, other_clause, **kw):
+                    completed.add(other_clause)
+                    break
+        return len(completed) == len(seq1) == len(seq2)
+
+    def visit_clauseelement_unordered_set(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return self._compare_unordered_sequences(left, right, **kw)
+
+    def visit_fromclause_ordered_set(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for l, r in zip_longest(left, right, fillvalue=None):
+            self.stack.append((l, r))
+
+    def visit_string(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_string_list(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_string_multi_dict(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for lk, rk in zip_longest(
+            sorted(left.keys()), sorted(right.keys()), fillvalue=(None, None)
+        ):
+            if lk != rk:
+                return COMPARE_FAILED
+
+            lv, rv = left[lk], right[rk]
+
+            lhc = isinstance(left, HasCacheKey)
+            rhc = isinstance(right, HasCacheKey)
+            if lhc and rhc:
+                if lv._gen_cache_key(
+                    self.anon_map[0], []
+                ) != rv._gen_cache_key(self.anon_map[1], []):
+                    return COMPARE_FAILED
+            elif lhc != rhc:
+                return COMPARE_FAILED
+            elif lv != rv:
+                return COMPARE_FAILED
+
+    def visit_multi(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        lhc = isinstance(left, HasCacheKey)
+        rhc = isinstance(right, HasCacheKey)
+        if lhc and rhc:
+            if left._gen_cache_key(
+                self.anon_map[0], []
+            ) != right._gen_cache_key(self.anon_map[1], []):
+                return COMPARE_FAILED
+        elif lhc != rhc:
+            return COMPARE_FAILED
+        else:
+            return left == right
+
+    def visit_anon_name(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return _resolve_name_for_compare(
+            left_parent, left, self.anon_map[0], **kw
+        ) == _resolve_name_for_compare(
+            right_parent, right, self.anon_map[1], **kw
+        )
+
+    def visit_boolean(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_operator(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_type(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left._compare_type_affinity(right)
+
+    def visit_plain_dict(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_dialect_options(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_annotations_key(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        if left and right:
+            return (
+                left_parent._annotations_cache_key
+                == right_parent._annotations_cache_key
+            )
+        else:
+            return left == right
+
+    def visit_with_context_options(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return tuple((fn.__code__, c_key) for fn, c_key in left) == tuple(
+            (fn.__code__, c_key) for fn, c_key in right
+        )
+
+    def visit_plain_obj(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_named_ddl_element(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        if left is None:
+            if right is not None:
+                return COMPARE_FAILED
+
+        return left.name == right.name
+
+    def visit_prefix_sequence(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for (l_clause, l_str), (r_clause, r_str) in zip_longest(
+            left, right, fillvalue=(None, None)
+        ):
+            if l_str != r_str:
+                return COMPARE_FAILED
+            else:
+                self.stack.append((l_clause, r_clause))
+
+    def visit_setup_join_tuple(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        # TODO: look at attrname for "legacy_join" and use different structure
+        for (
+            (l_target, l_onclause, l_from, l_flags),
+            (r_target, r_onclause, r_from, r_flags),
+        ) in zip_longest(left, right, fillvalue=(None, None, None, None)):
+            if l_flags != r_flags:
+                return COMPARE_FAILED
+            self.stack.append((l_target, r_target))
+            self.stack.append((l_onclause, r_onclause))
+            self.stack.append((l_from, r_from))
+
+    def visit_memoized_select_entities(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return self.visit_clauseelement_tuple(
+            attrname, left_parent, left, right_parent, right, **kw
+        )
+
+    def visit_table_hint_list(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        left_keys = sorted(left, key=lambda elem: (elem[0].fullname, elem[1]))
+        right_keys = sorted(
+            right, key=lambda elem: (elem[0].fullname, elem[1])
+        )
+        for (ltable, ldialect), (rtable, rdialect) in zip_longest(
+            left_keys, right_keys, fillvalue=(None, None)
+        ):
+            if ldialect != rdialect:
+                return COMPARE_FAILED
+            elif left[(ltable, ldialect)] != right[(rtable, rdialect)]:
+                return COMPARE_FAILED
+            else:
+                self.stack.append((ltable, rtable))
+
+    def visit_statement_hint_list(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        return left == right
+
+    def visit_unknown_structure(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        raise NotImplementedError()
+
+    def visit_dml_ordered_values(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        # sequence of tuple pairs
+
+        for (lk, lv), (rk, rv) in zip_longest(
+            left, right, fillvalue=(None, None)
+        ):
+            if not self._compare_dml_values_or_ce(lk, rk, **kw):
+                return COMPARE_FAILED
+
+    def _compare_dml_values_or_ce(self, lv, rv, **kw):
+        lvce = hasattr(lv, "__clause_element__")
+        rvce = hasattr(rv, "__clause_element__")
+        if lvce != rvce:
+            return False
+        elif lvce and not self.compare_inner(lv, rv, **kw):
+            return False
+        elif not lvce and lv != rv:
+            return False
+        elif not self.compare_inner(lv, rv, **kw):
+            return False
+
+        return True
+
+    def visit_dml_values(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        if left is None or right is None or len(left) != len(right):
+            return COMPARE_FAILED
+
+        if isinstance(left, collections_abc.Sequence):
+            for lv, rv in zip(left, right):
+                if not self._compare_dml_values_or_ce(lv, rv, **kw):
+                    return COMPARE_FAILED
+        elif isinstance(right, collections_abc.Sequence):
+            return COMPARE_FAILED
+        else:
+            # dictionaries guaranteed to support insert ordering in
+            # py37 so that we can compare the keys in order.  without
+            # this, we can't compare SQL expression keys because we don't
+            # know which key is which
+            for (lk, lv), (rk, rv) in zip(left.items(), right.items()):
+                if not self._compare_dml_values_or_ce(lk, rk, **kw):
+                    return COMPARE_FAILED
+                if not self._compare_dml_values_or_ce(lv, rv, **kw):
+                    return COMPARE_FAILED
+
+    def visit_dml_multi_values(
+        self, attrname, left_parent, left, right_parent, right, **kw
+    ):
+        for lseq, rseq in zip_longest(left, right, fillvalue=None):
+            if lseq is None or rseq is None:
+                return COMPARE_FAILED
+
+            for ld, rd in zip_longest(lseq, rseq, fillvalue=None):
+                if (
+                    self.visit_dml_values(
+                        attrname, left_parent, ld, right_parent, rd, **kw
+                    )
+                    is COMPARE_FAILED
+                ):
+                    return COMPARE_FAILED
+
+    def compare_expression_clauselist(self, left, right, **kw):
+        if left.operator is right.operator:
+            if operators.is_associative(left.operator):
+                if self._compare_unordered_sequences(
+                    left.clauses, right.clauses, **kw
+                ):
+                    return ["operator", "clauses"]
+                else:
+                    return COMPARE_FAILED
+            else:
+                return ["operator"]
+        else:
+            return COMPARE_FAILED
+
+    def compare_clauselist(self, left, right, **kw):
+        return self.compare_expression_clauselist(left, right, **kw)
+
+    def compare_binary(self, left, right, **kw):
+        if left.operator == right.operator:
+            if operators.is_commutative(left.operator):
+                if (
+                    self.compare_inner(left.left, right.left, **kw)
+                    and self.compare_inner(left.right, right.right, **kw)
+                ) or (
+                    self.compare_inner(left.left, right.right, **kw)
+                    and self.compare_inner(left.right, right.left, **kw)
+                ):
+                    return ["operator", "negate", "left", "right"]
+                else:
+                    return COMPARE_FAILED
+            else:
+                return ["operator", "negate"]
+        else:
+            return COMPARE_FAILED
+
+    def compare_bindparam(self, left, right, **kw):
+        compare_keys = kw.pop("compare_keys", True)
+        compare_values = kw.pop("compare_values", True)
+
+        if compare_values:
+            omit = []
+        else:
+            # this means, "skip these, we already compared"
+            omit = ["callable", "value"]
+
+        if not compare_keys:
+            omit.append("key")
+
+        return omit
+
+
+class ColIdentityComparatorStrategy(TraversalComparatorStrategy):
+    def compare_column_element(
+        self, left, right, use_proxies=True, equivalents=(), **kw
+    ):
+        """Compare ColumnElements using proxies and equivalent collections.
+
+        This is a comparison strategy specific to the ORM.
+        """
+
+        to_compare = (right,)
+        if equivalents and right in equivalents:
+            to_compare = equivalents[right].union(to_compare)
+
+        for oth in to_compare:
+            if use_proxies and left.shares_lineage(oth):
+                return SKIP_TRAVERSE
+            elif hash(left) == hash(right):
+                return SKIP_TRAVERSE
+        else:
+            return COMPARE_FAILED
+
+    def compare_column(self, left, right, **kw):
+        return self.compare_column_element(left, right, **kw)
+
+    def compare_label(self, left, right, **kw):
+        return self.compare_column_element(left, right, **kw)
+
+    def compare_table(self, left, right, **kw):
+        # tables compare on identity, since it's not really feasible to
+        # compare them column by column with the above rules
+        return SKIP_TRAVERSE if left is right else COMPARE_FAILED
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/type_api.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/type_api.py
new file mode 100644
index 00000000..8cdb323b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/type_api.py
@@ -0,0 +1,2358 @@
+# sql/type_api.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Base types API.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+from types import ModuleType
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import Mapping
+from typing import NewType
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .base import SchemaEventTarget
+from .cache_key import CacheConst
+from .cache_key import NO_CACHE
+from .operators import ColumnOperators
+from .visitors import Visitable
+from .. import exc
+from .. import util
+from ..util.typing import Protocol
+from ..util.typing import Self
+from ..util.typing import TypeAliasType
+from ..util.typing import TypedDict
+from ..util.typing import TypeGuard
+
+# these are back-assigned by sqltypes.
+if typing.TYPE_CHECKING:
+    from ._typing import _TypeEngineArgument
+    from .elements import BindParameter
+    from .elements import ColumnElement
+    from .operators import OperatorType
+    from .sqltypes import _resolve_value_to_type as _resolve_value_to_type
+    from .sqltypes import BOOLEANTYPE as BOOLEANTYPE  # noqa: F401
+    from .sqltypes import INDEXABLE as INDEXABLE  # noqa: F401
+    from .sqltypes import INTEGERTYPE as INTEGERTYPE  # noqa: F401
+    from .sqltypes import MATCHTYPE as MATCHTYPE  # noqa: F401
+    from .sqltypes import NULLTYPE as NULLTYPE
+    from .sqltypes import NUMERICTYPE as NUMERICTYPE  # noqa: F401
+    from .sqltypes import STRINGTYPE as STRINGTYPE  # noqa: F401
+    from .sqltypes import TABLEVALUE as TABLEVALUE  # noqa: F401
+    from ..engine.interfaces import Dialect
+    from ..util.typing import GenericProtocol
+
+_T = TypeVar("_T", bound=Any)
+_T_co = TypeVar("_T_co", bound=Any, covariant=True)
+_T_con = TypeVar("_T_con", bound=Any, contravariant=True)
+_O = TypeVar("_O", bound=object)
+_TE = TypeVar("_TE", bound="TypeEngine[Any]")
+_CT = TypeVar("_CT", bound=Any)
+_RT = TypeVar("_RT", bound=Any)
+
+_MatchedOnType = Union[
+    "GenericProtocol[Any]", TypeAliasType, NewType, Type[Any]
+]
+
+
+class _NoValueInList(Enum):
+    NO_VALUE_IN_LIST = 0
+    """indicates we are trying to determine the type of an expression
+    against an empty list."""
+
+
+_NO_VALUE_IN_LIST = _NoValueInList.NO_VALUE_IN_LIST
+
+
+class _LiteralProcessorType(Protocol[_T_co]):
+    def __call__(self, value: Any) -> str: ...
+
+
+class _BindProcessorType(Protocol[_T_con]):
+    def __call__(self, value: Optional[_T_con]) -> Any: ...
+
+
+class _ResultProcessorType(Protocol[_T_co]):
+    def __call__(self, value: Any) -> Optional[_T_co]: ...
+
+
+class _SentinelProcessorType(Protocol[_T_co]):
+    def __call__(self, value: Any) -> Optional[_T_co]: ...
+
+
+class _BaseTypeMemoDict(TypedDict):
+    impl: TypeEngine[Any]
+    result: Dict[Any, Optional[_ResultProcessorType[Any]]]
+
+
+class _TypeMemoDict(_BaseTypeMemoDict, total=False):
+    literal: Optional[_LiteralProcessorType[Any]]
+    bind: Optional[_BindProcessorType[Any]]
+    sentinel: Optional[_SentinelProcessorType[Any]]
+    custom: Dict[Any, object]
+
+
+class _ComparatorFactory(Protocol[_T]):
+    def __call__(
+        self, expr: ColumnElement[_T]
+    ) -> TypeEngine.Comparator[_T]: ...
+
+
+class TypeEngine(Visitable, Generic[_T]):
+    """The ultimate base class for all SQL datatypes.
+
+    Common subclasses of :class:`.TypeEngine` include
+    :class:`.String`, :class:`.Integer`, and :class:`.Boolean`.
+
+    For an overview of the SQLAlchemy typing system, see
+    :ref:`types_toplevel`.
+
+    .. seealso::
+
+        :ref:`types_toplevel`
+
+    """
+
+    _sqla_type = True
+    _isnull = False
+    _is_tuple_type = False
+    _is_table_value = False
+    _is_array = False
+    _is_type_decorator = False
+
+    render_bind_cast = False
+    """Render bind casts for :attr:`.BindTyping.RENDER_CASTS` mode.
+
+    If True, this type (usually a dialect level impl type) signals
+    to the compiler that a cast should be rendered around a bound parameter
+    for this type.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :class:`.BindTyping`
+
+    """
+
+    render_literal_cast = False
+    """render casts when rendering a value as an inline literal,
+    e.g. with :meth:`.TypeEngine.literal_processor`.
+
+    .. versionadded:: 2.0
+
+    """
+
+    class Comparator(
+        ColumnOperators,
+        Generic[_CT],
+    ):
+        """Base class for custom comparison operations defined at the
+        type level.  See :attr:`.TypeEngine.comparator_factory`.
+
+
+        """
+
+        __slots__ = "expr", "type"
+
+        expr: ColumnElement[_CT]
+        type: TypeEngine[_CT]
+
+        def __clause_element__(self) -> ColumnElement[_CT]:
+            return self.expr
+
+        def __init__(self, expr: ColumnElement[_CT]):
+            self.expr = expr
+            self.type = expr.type
+
+        def __reduce__(self) -> Any:
+            return self.__class__, (self.expr,)
+
+        @overload
+        def operate(
+            self,
+            op: OperatorType,
+            *other: Any,
+            result_type: Type[TypeEngine[_RT]],
+            **kwargs: Any,
+        ) -> ColumnElement[_RT]: ...
+
+        @overload
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnElement[_CT]: ...
+
+        @util.preload_module("sqlalchemy.sql.default_comparator")
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnElement[Any]:
+            default_comparator = util.preloaded.sql_default_comparator
+            op_fn, addtl_kw = default_comparator.operator_lookup[op.__name__]
+            if kwargs:
+                addtl_kw = addtl_kw.union(kwargs)
+            return op_fn(self.expr, op, *other, **addtl_kw)
+
+        @util.preload_module("sqlalchemy.sql.default_comparator")
+        def reverse_operate(
+            self, op: OperatorType, other: Any, **kwargs: Any
+        ) -> ColumnElement[_CT]:
+            default_comparator = util.preloaded.sql_default_comparator
+            op_fn, addtl_kw = default_comparator.operator_lookup[op.__name__]
+            if kwargs:
+                addtl_kw = addtl_kw.union(kwargs)
+            return op_fn(self.expr, op, other, reverse=True, **addtl_kw)
+
+        def _adapt_expression(
+            self,
+            op: OperatorType,
+            other_comparator: TypeEngine.Comparator[Any],
+        ) -> Tuple[OperatorType, TypeEngine[Any]]:
+            """evaluate the return type of <self> <op> <othertype>,
+            and apply any adaptations to the given operator.
+
+            This method determines the type of a resulting binary expression
+            given two source types and an operator.   For example, two
+            :class:`_schema.Column` objects, both of the type
+            :class:`.Integer`, will
+            produce a :class:`.BinaryExpression` that also has the type
+            :class:`.Integer` when compared via the addition (``+``) operator.
+            However, using the addition operator with an :class:`.Integer`
+            and a :class:`.Date` object will produce a :class:`.Date`, assuming
+            "days delta" behavior by the database (in reality, most databases
+            other than PostgreSQL don't accept this particular operation).
+
+            The method returns a tuple of the form <operator>, <type>.
+            The resulting operator and type will be those applied to the
+            resulting :class:`.BinaryExpression` as the final operator and the
+            right-hand side of the expression.
+
+            Note that only a subset of operators make usage of
+            :meth:`._adapt_expression`,
+            including math operators and user-defined operators, but not
+            boolean comparison or special SQL keywords like MATCH or BETWEEN.
+
+            """
+
+            return op, self.type
+
+    hashable = True
+    """Flag, if False, means values from this type aren't hashable.
+
+    Used by the ORM when uniquing result lists.
+
+    """
+
+    comparator_factory: _ComparatorFactory[Any] = Comparator
+    """A :class:`.TypeEngine.Comparator` class which will apply
+    to operations performed by owning :class:`_expression.ColumnElement`
+    objects.
+
+    The :attr:`.comparator_factory` attribute is a hook consulted by
+    the core expression system when column and SQL expression operations
+    are performed.   When a :class:`.TypeEngine.Comparator` class is
+    associated with this attribute, it allows custom re-definition of
+    all existing operators, as well as definition of new operators.
+    Existing operators include those provided by Python operator overloading
+    such as :meth:`.operators.ColumnOperators.__add__` and
+    :meth:`.operators.ColumnOperators.__eq__`,
+    those provided as standard
+    attributes of :class:`.operators.ColumnOperators` such as
+    :meth:`.operators.ColumnOperators.like`
+    and :meth:`.operators.ColumnOperators.in_`.
+
+    Rudimentary usage of this hook is allowed through simple subclassing
+    of existing types, or alternatively by using :class:`.TypeDecorator`.
+    See the documentation section :ref:`types_operators` for examples.
+
+    """
+
+    sort_key_function: Optional[Callable[[Any], Any]] = None
+    """A sorting function that can be passed as the key to sorted.
+
+    The default value of ``None`` indicates that the values stored by
+    this type are self-sorting.
+
+    .. versionadded:: 1.3.8
+
+    """
+
+    should_evaluate_none: bool = False
+    """If True, the Python constant ``None`` is considered to be handled
+    explicitly by this type.
+
+    The ORM uses this flag to indicate that a positive value of ``None``
+    is passed to the column in an INSERT statement, rather than omitting
+    the column from the INSERT statement which has the effect of firing
+    off column-level defaults.   It also allows types which have special
+    behavior for Python None, such as a JSON type, to indicate that
+    they'd like to handle the None value explicitly.
+
+    To set this flag on an existing type, use the
+    :meth:`.TypeEngine.evaluates_none` method.
+
+    .. seealso::
+
+        :meth:`.TypeEngine.evaluates_none`
+
+    """
+
+    _variant_mapping: util.immutabledict[str, TypeEngine[Any]] = (
+        util.EMPTY_DICT
+    )
+
+    def evaluates_none(self) -> Self:
+        """Return a copy of this type which has the
+        :attr:`.should_evaluate_none` flag set to True.
+
+        E.g.::
+
+                Table(
+                    "some_table",
+                    metadata,
+                    Column(
+                        String(50).evaluates_none(),
+                        nullable=True,
+                        server_default="no value",
+                    ),
+                )
+
+        The ORM uses this flag to indicate that a positive value of ``None``
+        is passed to the column in an INSERT statement, rather than omitting
+        the column from the INSERT statement which has the effect of firing
+        off column-level defaults.   It also allows for types which have
+        special behavior associated with the Python None value to indicate
+        that the value doesn't necessarily translate into SQL NULL; a
+        prime example of this is a JSON type which may wish to persist the
+        JSON value ``'null'``.
+
+        In all cases, the actual NULL SQL value can be always be
+        persisted in any column by using
+        the :obj:`_expression.null` SQL construct in an INSERT statement
+        or associated with an ORM-mapped attribute.
+
+        .. note::
+
+            The "evaluates none" flag does **not** apply to a value
+            of ``None`` passed to :paramref:`_schema.Column.default` or
+            :paramref:`_schema.Column.server_default`; in these cases,
+            ``None``
+            still means "no default".
+
+        .. seealso::
+
+            :ref:`session_forcing_null` - in the ORM documentation
+
+            :paramref:`.postgresql.JSON.none_as_null` - PostgreSQL JSON
+            interaction with this flag.
+
+            :attr:`.TypeEngine.should_evaluate_none` - class-level flag
+
+        """
+        typ = self.copy()
+        typ.should_evaluate_none = True
+        return typ
+
+    def copy(self, **kw: Any) -> Self:
+        return self.adapt(self.__class__)
+
+    def copy_value(self, value: Any) -> Any:
+        return value
+
+    def literal_processor(
+        self, dialect: Dialect
+    ) -> Optional[_LiteralProcessorType[_T]]:
+        """Return a conversion function for processing literal values that are
+        to be rendered directly without using binds.
+
+        This function is used when the compiler makes use of the
+        "literal_binds" flag, typically used in DDL generation as well
+        as in certain scenarios where backends don't accept bound parameters.
+
+        Returns a callable which will receive a literal Python value
+        as the sole positional argument and will return a string representation
+        to be rendered in a SQL statement.
+
+        .. note::
+
+            This method is only called relative to a **dialect specific type
+            object**, which is often **private to a dialect in use** and is not
+            the same type object as the public facing one, which means it's not
+            feasible to subclass a :class:`.types.TypeEngine` class in order to
+            provide an alternate :meth:`_types.TypeEngine.literal_processor`
+            method, unless subclassing the :class:`_types.UserDefinedType`
+            class explicitly.
+
+            To provide alternate behavior for
+            :meth:`_types.TypeEngine.literal_processor`, implement a
+            :class:`_types.TypeDecorator` class and provide an implementation
+            of :meth:`_types.TypeDecorator.process_literal_param`.
+
+            .. seealso::
+
+                :ref:`types_typedecorator`
+
+
+        """
+        return None
+
+    def bind_processor(
+        self, dialect: Dialect
+    ) -> Optional[_BindProcessorType[_T]]:
+        """Return a conversion function for processing bind values.
+
+        Returns a callable which will receive a bind parameter value
+        as the sole positional argument and will return a value to
+        send to the DB-API.
+
+        If processing is not necessary, the method should return ``None``.
+
+        .. note::
+
+            This method is only called relative to a **dialect specific type
+            object**, which is often **private to a dialect in use** and is not
+            the same type object as the public facing one, which means it's not
+            feasible to subclass a :class:`.types.TypeEngine` class in order to
+            provide an alternate :meth:`_types.TypeEngine.bind_processor`
+            method, unless subclassing the :class:`_types.UserDefinedType`
+            class explicitly.
+
+            To provide alternate behavior for
+            :meth:`_types.TypeEngine.bind_processor`, implement a
+            :class:`_types.TypeDecorator` class and provide an implementation
+            of :meth:`_types.TypeDecorator.process_bind_param`.
+
+            .. seealso::
+
+                :ref:`types_typedecorator`
+
+
+        :param dialect: Dialect instance in use.
+
+        """
+        return None
+
+    def result_processor(
+        self, dialect: Dialect, coltype: object
+    ) -> Optional[_ResultProcessorType[_T]]:
+        """Return a conversion function for processing result row values.
+
+        Returns a callable which will receive a result row column
+        value as the sole positional argument and will return a value
+        to return to the user.
+
+        If processing is not necessary, the method should return ``None``.
+
+        .. note::
+
+            This method is only called relative to a **dialect specific type
+            object**, which is often **private to a dialect in use** and is not
+            the same type object as the public facing one, which means it's not
+            feasible to subclass a :class:`.types.TypeEngine` class in order to
+            provide an alternate :meth:`_types.TypeEngine.result_processor`
+            method, unless subclassing the :class:`_types.UserDefinedType`
+            class explicitly.
+
+            To provide alternate behavior for
+            :meth:`_types.TypeEngine.result_processor`, implement a
+            :class:`_types.TypeDecorator` class and provide an implementation
+            of :meth:`_types.TypeDecorator.process_result_value`.
+
+            .. seealso::
+
+                :ref:`types_typedecorator`
+
+        :param dialect: Dialect instance in use.
+
+        :param coltype: DBAPI coltype argument received in cursor.description.
+
+        """
+        return None
+
+    def column_expression(
+        self, colexpr: ColumnElement[_T]
+    ) -> Optional[ColumnElement[_T]]:
+        """Given a SELECT column expression, return a wrapping SQL expression.
+
+        This is typically a SQL function that wraps a column expression
+        as rendered in the columns clause of a SELECT statement.
+        It is used for special data types that require
+        columns to be wrapped in some special database function in order
+        to coerce the value before being sent back to the application.
+        It is the SQL analogue of the :meth:`.TypeEngine.result_processor`
+        method.
+
+        This method is called during the **SQL compilation** phase of a
+        statement, when rendering a SQL string. It is **not** called
+        against specific values.
+
+        .. note::
+
+            This method is only called relative to a **dialect specific type
+            object**, which is often **private to a dialect in use** and is not
+            the same type object as the public facing one, which means it's not
+            feasible to subclass a :class:`.types.TypeEngine` class in order to
+            provide an alternate :meth:`_types.TypeEngine.column_expression`
+            method, unless subclassing the :class:`_types.UserDefinedType`
+            class explicitly.
+
+            To provide alternate behavior for
+            :meth:`_types.TypeEngine.column_expression`, implement a
+            :class:`_types.TypeDecorator` class and provide an implementation
+            of :meth:`_types.TypeDecorator.column_expression`.
+
+            .. seealso::
+
+                :ref:`types_typedecorator`
+
+
+        .. seealso::
+
+            :ref:`types_sql_value_processing`
+
+        """
+
+        return None
+
+    @util.memoized_property
+    def _has_column_expression(self) -> bool:
+        """memoized boolean, check if column_expression is implemented.
+
+        Allows the method to be skipped for the vast majority of expression
+        types that don't use this feature.
+
+        """
+
+        return (
+            self.__class__.column_expression.__code__
+            is not TypeEngine.column_expression.__code__
+        )
+
+    def bind_expression(
+        self, bindvalue: BindParameter[_T]
+    ) -> Optional[ColumnElement[_T]]:
+        """Given a bind value (i.e. a :class:`.BindParameter` instance),
+        return a SQL expression in its place.
+
+        This is typically a SQL function that wraps the existing bound
+        parameter within the statement.  It is used for special data types
+        that require literals being wrapped in some special database function
+        in order to coerce an application-level value into a database-specific
+        format.  It is the SQL analogue of the
+        :meth:`.TypeEngine.bind_processor` method.
+
+        This method is called during the **SQL compilation** phase of a
+        statement, when rendering a SQL string. It is **not** called
+        against specific values.
+
+        Note that this method, when implemented, should always return
+        the exact same structure, without any conditional logic, as it
+        may be used in an executemany() call against an arbitrary number
+        of bound parameter sets.
+
+        .. note::
+
+            This method is only called relative to a **dialect specific type
+            object**, which is often **private to a dialect in use** and is not
+            the same type object as the public facing one, which means it's not
+            feasible to subclass a :class:`.types.TypeEngine` class in order to
+            provide an alternate :meth:`_types.TypeEngine.bind_expression`
+            method, unless subclassing the :class:`_types.UserDefinedType`
+            class explicitly.
+
+            To provide alternate behavior for
+            :meth:`_types.TypeEngine.bind_expression`, implement a
+            :class:`_types.TypeDecorator` class and provide an implementation
+            of :meth:`_types.TypeDecorator.bind_expression`.
+
+            .. seealso::
+
+                :ref:`types_typedecorator`
+
+        .. seealso::
+
+            :ref:`types_sql_value_processing`
+
+        """
+        return None
+
+    @util.memoized_property
+    def _has_bind_expression(self) -> bool:
+        """memoized boolean, check if bind_expression is implemented.
+
+        Allows the method to be skipped for the vast majority of expression
+        types that don't use this feature.
+
+        """
+
+        return util.method_is_overridden(self, TypeEngine.bind_expression)
+
+    @staticmethod
+    def _to_instance(cls_or_self: Union[Type[_TE], _TE]) -> _TE:
+        return to_instance(cls_or_self)
+
+    def compare_values(self, x: Any, y: Any) -> bool:
+        """Compare two values for equality."""
+
+        return x == y  # type: ignore[no-any-return]
+
+    def get_dbapi_type(self, dbapi: ModuleType) -> Optional[Any]:
+        """Return the corresponding type object from the underlying DB-API, if
+        any.
+
+        This can be useful for calling ``setinputsizes()``, for example.
+
+        """
+        return None
+
+    @property
+    def python_type(self) -> Type[Any]:
+        """Return the Python type object expected to be returned
+        by instances of this type, if known.
+
+        Basically, for those types which enforce a return type,
+        or are known across the board to do such for all common
+        DBAPIs (like ``int`` for example), will return that type.
+
+        If a return type is not defined, raises
+        ``NotImplementedError``.
+
+        Note that any type also accommodates NULL in SQL which
+        means you can also get back ``None`` from any type
+        in practice.
+
+        """
+        raise NotImplementedError()
+
+    def with_variant(
+        self,
+        type_: _TypeEngineArgument[Any],
+        *dialect_names: str,
+    ) -> Self:
+        r"""Produce a copy of this type object that will utilize the given
+        type when applied to the dialect of the given name.
+
+        e.g.::
+
+            from sqlalchemy.types import String
+            from sqlalchemy.dialects import mysql
+
+            string_type = String()
+
+            string_type = string_type.with_variant(
+                mysql.VARCHAR(collation="foo"), "mysql", "mariadb"
+            )
+
+        The variant mapping indicates that when this type is
+        interpreted by a specific dialect, it will instead be
+        transmuted into the given type, rather than using the
+        primary type.
+
+        .. versionchanged:: 2.0 the :meth:`_types.TypeEngine.with_variant`
+           method now works with a :class:`_types.TypeEngine` object "in
+           place", returning a copy of the original type rather than returning
+           a wrapping object; the ``Variant`` class is no longer used.
+
+        :param type\_: a :class:`.TypeEngine` that will be selected
+         as a variant from the originating type, when a dialect
+         of the given name is in use.
+        :param \*dialect_names: one or more base names of the dialect which
+         uses this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
+
+         .. versionchanged:: 2.0 multiple dialect names can be specified
+            for one variant.
+
+        .. seealso::
+
+            :ref:`types_with_variant` - illustrates the use of
+            :meth:`_types.TypeEngine.with_variant`.
+
+        """
+
+        if not dialect_names:
+            raise exc.ArgumentError("At least one dialect name is required")
+        for dialect_name in dialect_names:
+            if dialect_name in self._variant_mapping:
+                raise exc.ArgumentError(
+                    f"Dialect {dialect_name!r} is already present in "
+                    f"the mapping for this {self!r}"
+                )
+        new_type = self.copy()
+        type_ = to_instance(type_)
+        if type_._variant_mapping:
+            raise exc.ArgumentError(
+                "can't pass a type that already has variants as a "
+                "dialect-level type to with_variant()"
+            )
+
+        new_type._variant_mapping = self._variant_mapping.union(
+            {dialect_name: type_ for dialect_name in dialect_names}
+        )
+        return new_type
+
+    def _resolve_for_literal(self, value: Any) -> Self:
+        """adjust this type given a literal Python value that will be
+        stored in a bound parameter.
+
+        Used exclusively by _resolve_value_to_type().
+
+        .. versionadded:: 1.4.30 or 2.0
+
+        TODO: this should be part of public API
+
+        .. seealso::
+
+            :meth:`.TypeEngine._resolve_for_python_type`
+
+        """
+        return self
+
+    def _resolve_for_python_type(
+        self,
+        python_type: Type[Any],
+        matched_on: _MatchedOnType,
+        matched_on_flattened: Type[Any],
+    ) -> Optional[Self]:
+        """given a Python type (e.g. ``int``, ``str``, etc. ) return an
+        instance of this :class:`.TypeEngine` that's appropriate for this type.
+
+        An additional argument ``matched_on`` is passed, which indicates an
+        entry from the ``__mro__`` of the given ``python_type`` that more
+        specifically matches how the caller located this :class:`.TypeEngine`
+        object.   Such as, if a lookup of some kind links the ``int`` Python
+        type to the :class:`.Integer` SQL type, and the original object
+        was some custom subclass of ``int`` such as ``MyInt(int)``, the
+        arguments passed would be ``(MyInt, int)``.
+
+        If the given Python type does not correspond to this
+        :class:`.TypeEngine`, or the Python type is otherwise ambiguous, the
+        method should return None.
+
+        For simple cases, the method checks that the ``python_type``
+        and ``matched_on`` types are the same (i.e. not a subclass), and
+        returns self; for all other cases, it returns ``None``.
+
+        The initial use case here is for the ORM to link user-defined
+        Python standard library ``enum.Enum`` classes to the SQLAlchemy
+        :class:`.Enum` SQL type when constructing ORM Declarative mappings.
+
+        :param python_type: the Python type we want to use
+        :param matched_on: the Python type that led us to choose this
+         particular :class:`.TypeEngine` class, which would be a supertype
+         of ``python_type``.   By default, the request is rejected if
+         ``python_type`` doesn't match ``matched_on`` (None is returned).
+
+        .. versionadded:: 2.0.0b4
+
+        TODO: this should be part of public API
+
+        .. seealso::
+
+            :meth:`.TypeEngine._resolve_for_literal`
+
+        """
+
+        if python_type is not matched_on_flattened:
+            return None
+
+        return self
+
+    def _with_collation(self, collation: str) -> Self:
+        """set up error handling for the collate expression"""
+        raise NotImplementedError("this datatype does not support collation")
+
+    @util.ro_memoized_property
+    def _type_affinity(self) -> Optional[Type[TypeEngine[_T]]]:
+        """Return a rudimental 'affinity' value expressing the general class
+        of type."""
+
+        typ = None
+        for t in self.__class__.__mro__:
+            if t is TypeEngine or TypeEngineMixin in t.__bases__:
+                return typ
+            elif issubclass(t, TypeEngine):
+                typ = t
+        else:
+            return self.__class__
+
+    @util.ro_memoized_property
+    def _generic_type_affinity(
+        self,
+    ) -> Type[TypeEngine[_T]]:
+        best_camelcase = None
+        best_uppercase = None
+
+        if not isinstance(self, TypeEngine):
+            return self.__class__
+
+        for t in self.__class__.__mro__:
+            if (
+                t.__module__
+                in (
+                    "sqlalchemy.sql.sqltypes",
+                    "sqlalchemy.sql.type_api",
+                )
+                and issubclass(t, TypeEngine)
+                and TypeEngineMixin not in t.__bases__
+                and t not in (TypeEngine, TypeEngineMixin)
+                and t.__name__[0] != "_"
+            ):
+                if t.__name__.isupper() and not best_uppercase:
+                    best_uppercase = t
+                elif not t.__name__.isupper() and not best_camelcase:
+                    best_camelcase = t
+
+        return (
+            best_camelcase
+            or best_uppercase
+            or cast("Type[TypeEngine[_T]]", NULLTYPE.__class__)
+        )
+
+    def as_generic(self, allow_nulltype: bool = False) -> TypeEngine[_T]:
+        """
+        Return an instance of the generic type corresponding to this type
+        using heuristic rule. The method may be overridden if this
+        heuristic rule is not sufficient.
+
+        >>> from sqlalchemy.dialects.mysql import INTEGER
+        >>> INTEGER(display_width=4).as_generic()
+        Integer()
+
+        >>> from sqlalchemy.dialects.mysql import NVARCHAR
+        >>> NVARCHAR(length=100).as_generic()
+        Unicode(length=100)
+
+        .. versionadded:: 1.4.0b2
+
+
+        .. seealso::
+
+            :ref:`metadata_reflection_dbagnostic_types` - describes the
+            use of :meth:`_types.TypeEngine.as_generic` in conjunction with
+            the :meth:`_sql.DDLEvents.column_reflect` event, which is its
+            intended use.
+
+        """
+        if (
+            not allow_nulltype
+            and self._generic_type_affinity == NULLTYPE.__class__
+        ):
+            raise NotImplementedError(
+                "Default TypeEngine.as_generic() "
+                "heuristic method was unsuccessful for {}. A custom "
+                "as_generic() method must be implemented for this "
+                "type class.".format(
+                    self.__class__.__module__ + "." + self.__class__.__name__
+                )
+            )
+
+        return util.constructor_copy(self, self._generic_type_affinity)
+
+    def dialect_impl(self, dialect: Dialect) -> TypeEngine[_T]:
+        """Return a dialect-specific implementation for this
+        :class:`.TypeEngine`.
+
+        """
+        try:
+            tm = dialect._type_memos[self]
+        except KeyError:
+            pass
+        else:
+            return tm["impl"]
+        return self._dialect_info(dialect)["impl"]
+
+    def _unwrapped_dialect_impl(self, dialect: Dialect) -> TypeEngine[_T]:
+        """Return the 'unwrapped' dialect impl for this type.
+
+        For a type that applies wrapping logic (e.g. TypeDecorator), give
+        us the real, actual dialect-level type that is used.
+
+        This is used by TypeDecorator itself as well at least one case where
+        dialects need to check that a particular specific dialect-level
+        type is in use, within the :meth:`.DefaultDialect.set_input_sizes`
+        method.
+
+        """
+        return self.dialect_impl(dialect)
+
+    def _cached_literal_processor(
+        self, dialect: Dialect
+    ) -> Optional[_LiteralProcessorType[_T]]:
+        """Return a dialect-specific literal processor for this type."""
+
+        try:
+            return dialect._type_memos[self]["literal"]
+        except KeyError:
+            pass
+
+        # avoid KeyError context coming into literal_processor() function
+        # raises
+        d = self._dialect_info(dialect)
+        d["literal"] = lp = d["impl"].literal_processor(dialect)
+        return lp
+
+    def _cached_bind_processor(
+        self, dialect: Dialect
+    ) -> Optional[_BindProcessorType[_T]]:
+        """Return a dialect-specific bind processor for this type."""
+
+        try:
+            return dialect._type_memos[self]["bind"]
+        except KeyError:
+            pass
+
+        # avoid KeyError context coming into bind_processor() function
+        # raises
+        d = self._dialect_info(dialect)
+        d["bind"] = bp = d["impl"].bind_processor(dialect)
+        return bp
+
+    def _cached_result_processor(
+        self, dialect: Dialect, coltype: Any
+    ) -> Optional[_ResultProcessorType[_T]]:
+        """Return a dialect-specific result processor for this type."""
+
+        try:
+            return dialect._type_memos[self]["result"][coltype]
+        except KeyError:
+            pass
+
+        # avoid KeyError context coming into result_processor() function
+        # raises
+        d = self._dialect_info(dialect)
+        # key assumption: DBAPI type codes are
+        # constants.  Else this dictionary would
+        # grow unbounded.
+        rp = d["impl"].result_processor(dialect, coltype)
+        d["result"][coltype] = rp
+        return rp
+
+    def _cached_custom_processor(
+        self, dialect: Dialect, key: str, fn: Callable[[TypeEngine[_T]], _O]
+    ) -> _O:
+        """return a dialect-specific processing object for
+        custom purposes.
+
+        The cx_Oracle dialect uses this at the moment.
+
+        """
+        try:
+            return cast(_O, dialect._type_memos[self]["custom"][key])
+        except KeyError:
+            pass
+        # avoid KeyError context coming into fn() function
+        # raises
+        d = self._dialect_info(dialect)
+        impl = d["impl"]
+        custom_dict = d.setdefault("custom", {})
+        custom_dict[key] = result = fn(impl)
+        return result
+
+    def _dialect_info(self, dialect: Dialect) -> _TypeMemoDict:
+        """Return a dialect-specific registry which
+        caches a dialect-specific implementation, bind processing
+        function, and one or more result processing functions."""
+
+        if self in dialect._type_memos:
+            return dialect._type_memos[self]
+        else:
+            impl = self._gen_dialect_impl(dialect)
+            if impl is self:
+                impl = self.adapt(type(self))
+            # this can't be self, else we create a cycle
+            assert impl is not self
+            d: _TypeMemoDict = {"impl": impl, "result": {}}
+            dialect._type_memos[self] = d
+            return d
+
+    def _gen_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
+        if dialect.name in self._variant_mapping:
+            return self._variant_mapping[dialect.name]._gen_dialect_impl(
+                dialect
+            )
+        else:
+            return dialect.type_descriptor(self)
+
+    @util.memoized_property
+    def _static_cache_key(
+        self,
+    ) -> Union[CacheConst, Tuple[Any, ...]]:
+        names = util.get_cls_kwargs(self.__class__)
+        return (self.__class__,) + tuple(
+            (
+                k,
+                (
+                    self.__dict__[k]._static_cache_key
+                    if isinstance(self.__dict__[k], TypeEngine)
+                    else self.__dict__[k]
+                ),
+            )
+            for k in names
+            if k in self.__dict__
+            and not k.startswith("_")
+            and self.__dict__[k] is not None
+        )
+
+    @overload
+    def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
+
+    @overload
+    def adapt(
+        self, cls: Type[TypeEngineMixin], **kw: Any
+    ) -> TypeEngine[Any]: ...
+
+    def adapt(
+        self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
+    ) -> TypeEngine[Any]:
+        """Produce an "adapted" form of this type, given an "impl" class
+        to work with.
+
+        This method is used internally to associate generic
+        types with "implementation" types that are specific to a particular
+        dialect.
+        """
+        typ = util.constructor_copy(
+            self, cast(Type[TypeEngine[Any]], cls), **kw
+        )
+        typ._variant_mapping = self._variant_mapping
+        return typ
+
+    def coerce_compared_value(
+        self, op: Optional[OperatorType], value: Any
+    ) -> TypeEngine[Any]:
+        """Suggest a type for a 'coerced' Python value in an expression.
+
+        Given an operator and value, gives the type a chance
+        to return a type which the value should be coerced into.
+
+        The default behavior here is conservative; if the right-hand
+        side is already coerced into a SQL type based on its
+        Python type, it is usually left alone.
+
+        End-user functionality extension here should generally be via
+        :class:`.TypeDecorator`, which provides more liberal behavior in that
+        it defaults to coercing the other side of the expression into this
+        type, thus applying special Python conversions above and beyond those
+        needed by the DBAPI to both ides. It also provides the public method
+        :meth:`.TypeDecorator.coerce_compared_value` which is intended for
+        end-user customization of this behavior.
+
+        """
+        _coerced_type = _resolve_value_to_type(value)
+        if (
+            _coerced_type is NULLTYPE
+            or _coerced_type._type_affinity is self._type_affinity
+        ):
+            return self
+        else:
+            return _coerced_type
+
+    def _compare_type_affinity(self, other: TypeEngine[Any]) -> bool:
+        return self._type_affinity is other._type_affinity
+
+    def compile(self, dialect: Optional[Dialect] = None) -> str:
+        """Produce a string-compiled form of this :class:`.TypeEngine`.
+
+        When called with no arguments, uses a "default" dialect
+        to produce a string result.
+
+        :param dialect: a :class:`.Dialect` instance.
+
+        """
+        # arg, return value is inconsistent with
+        # ClauseElement.compile()....this is a mistake.
+
+        if dialect is None:
+            dialect = self._default_dialect()
+
+        return dialect.type_compiler_instance.process(self)
+
+    @util.preload_module("sqlalchemy.engine.default")
+    def _default_dialect(self) -> Dialect:
+        default = util.preloaded.engine_default
+
+        # dmypy / mypy seems to sporadically keep thinking this line is
+        # returning Any, which seems to be caused by the @deprecated_params
+        # decorator on the DefaultDialect constructor
+        return default.StrCompileDialect()  # type: ignore
+
+    def __str__(self) -> str:
+        return str(self.compile())
+
+    def __repr__(self) -> str:
+        return util.generic_repr(self)
+
+
+class TypeEngineMixin:
+    """classes which subclass this can act as "mixin" classes for
+    TypeEngine."""
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        @util.memoized_property
+        def _static_cache_key(
+            self,
+        ) -> Union[CacheConst, Tuple[Any, ...]]: ...
+
+        @overload
+        def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
+
+        @overload
+        def adapt(
+            self, cls: Type[TypeEngineMixin], **kw: Any
+        ) -> TypeEngine[Any]: ...
+
+        def adapt(
+            self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
+        ) -> TypeEngine[Any]: ...
+
+        def dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: ...
+
+
+class ExternalType(TypeEngineMixin):
+    """mixin that defines attributes and behaviors specific to third-party
+    datatypes.
+
+    "Third party" refers to datatypes that are defined outside the scope
+    of SQLAlchemy within either end-user application code or within
+    external extensions to SQLAlchemy.
+
+    Subclasses currently include :class:`.TypeDecorator` and
+    :class:`.UserDefinedType`.
+
+    .. versionadded:: 1.4.28
+
+    """
+
+    cache_ok: Optional[bool] = None
+    '''Indicate if statements using this :class:`.ExternalType` are "safe to
+    cache".
+
+    The default value ``None`` will emit a warning and then not allow caching
+    of a statement which includes this type.   Set to ``False`` to disable
+    statements using this type from being cached at all without a warning.
+    When set to ``True``, the object's class and selected elements from its
+    state will be used as part of the cache key.  For example, using a
+    :class:`.TypeDecorator`::
+
+        class MyType(TypeDecorator):
+            impl = String
+
+            cache_ok = True
+
+            def __init__(self, choices):
+                self.choices = tuple(choices)
+                self.internal_only = True
+
+    The cache key for the above type would be equivalent to::
+
+        >>> MyType(["a", "b", "c"])._static_cache_key
+        (<class '__main__.MyType'>, ('choices', ('a', 'b', 'c')))
+
+    The caching scheme will extract attributes from the type that correspond
+    to the names of parameters in the ``__init__()`` method.  Above, the
+    "choices" attribute becomes part of the cache key but "internal_only"
+    does not, because there is no parameter named "internal_only".
+
+    The requirements for cacheable elements is that they are hashable
+    and also that they indicate the same SQL rendered for expressions using
+    this type every time for a given cache value.
+
+    To accommodate for datatypes that refer to unhashable structures such
+    as dictionaries, sets and lists, these objects can be made "cacheable"
+    by assigning hashable structures to the attributes whose names
+    correspond with the names of the arguments.  For example, a datatype
+    which accepts a dictionary of lookup values may publish this as a sorted
+    series of tuples.   Given a previously un-cacheable type as::
+
+        class LookupType(UserDefinedType):
+            """a custom type that accepts a dictionary as a parameter.
+
+            this is the non-cacheable version, as "self.lookup" is not
+            hashable.
+
+            """
+
+            def __init__(self, lookup):
+                self.lookup = lookup
+
+            def get_col_spec(self, **kw):
+                return "VARCHAR(255)"
+
+            def bind_processor(self, dialect): ...  # works with "self.lookup" ...
+
+    Where "lookup" is a dictionary.  The type will not be able to generate
+    a cache key::
+
+        >>> type_ = LookupType({"a": 10, "b": 20})
+        >>> type_._static_cache_key
+        <stdin>:1: SAWarning: UserDefinedType LookupType({'a': 10, 'b': 20}) will not
+        produce a cache key because the ``cache_ok`` flag is not set to True.
+        Set this flag to True if this type object's state is safe to use
+        in a cache key, or False to disable this warning.
+        symbol('no_cache')
+
+    If we **did** set up such a cache key, it wouldn't be usable. We would
+    get a tuple structure that contains a dictionary inside of it, which
+    cannot itself be used as a key in a "cache dictionary" such as SQLAlchemy's
+    statement cache, since Python dictionaries aren't hashable::
+
+        >>> # set cache_ok = True
+        >>> type_.cache_ok = True
+
+        >>> # this is the cache key it would generate
+        >>> key = type_._static_cache_key
+        >>> key
+        (<class '__main__.LookupType'>, ('lookup', {'a': 10, 'b': 20}))
+
+        >>> # however this key is not hashable, will fail when used with
+        >>> # SQLAlchemy statement cache
+        >>> some_cache = {key: "some sql value"}
+        Traceback (most recent call last): File "<stdin>", line 1,
+        in <module> TypeError: unhashable type: 'dict'
+
+    The type may be made cacheable by assigning a sorted tuple of tuples
+    to the ".lookup" attribute::
+
+        class LookupType(UserDefinedType):
+            """a custom type that accepts a dictionary as a parameter.
+
+            The dictionary is stored both as itself in a private variable,
+            and published in a public variable as a sorted tuple of tuples,
+            which is hashable and will also return the same value for any
+            two equivalent dictionaries.  Note it assumes the keys and
+            values of the dictionary are themselves hashable.
+
+            """
+
+            cache_ok = True
+
+            def __init__(self, lookup):
+                self._lookup = lookup
+
+                # assume keys/values of "lookup" are hashable; otherwise
+                # they would also need to be converted in some way here
+                self.lookup = tuple((key, lookup[key]) for key in sorted(lookup))
+
+            def get_col_spec(self, **kw):
+                return "VARCHAR(255)"
+
+            def bind_processor(self, dialect): ...  # works with "self._lookup" ...
+
+    Where above, the cache key for ``LookupType({"a": 10, "b": 20})`` will be::
+
+        >>> LookupType({"a": 10, "b": 20})._static_cache_key
+        (<class '__main__.LookupType'>, ('lookup', (('a', 10), ('b', 20))))
+
+    .. versionadded:: 1.4.14 - added the ``cache_ok`` flag to allow
+       some configurability of caching for :class:`.TypeDecorator` classes.
+
+    .. versionadded:: 1.4.28 - added the :class:`.ExternalType` mixin which
+       generalizes the ``cache_ok`` flag to both the :class:`.TypeDecorator`
+       and :class:`.UserDefinedType` classes.
+
+    .. seealso::
+
+        :ref:`sql_caching`
+
+    '''  # noqa: E501
+
+    @util.non_memoized_property
+    def _static_cache_key(
+        self,
+    ) -> Union[CacheConst, Tuple[Any, ...]]:
+        cache_ok = self.__class__.__dict__.get("cache_ok", None)
+
+        if cache_ok is None:
+            for subtype in self.__class__.__mro__:
+                if ExternalType in subtype.__bases__:
+                    break
+            else:
+                subtype = self.__class__.__mro__[1]
+
+            util.warn(
+                "%s %r will not produce a cache key because "
+                "the ``cache_ok`` attribute is not set to True.  This can "
+                "have significant performance implications including some "
+                "performance degradations in comparison to prior SQLAlchemy "
+                "versions.  Set this attribute to True if this type object's "
+                "state is safe to use in a cache key, or False to "
+                "disable this warning." % (subtype.__name__, self),
+                code="cprf",
+            )
+        elif cache_ok is True:
+            return super()._static_cache_key
+
+        return NO_CACHE
+
+
+class UserDefinedType(
+    ExternalType, TypeEngineMixin, TypeEngine[_T], util.EnsureKWArg
+):
+    """Base for user defined types.
+
+    This should be the base of new types.  Note that
+    for most cases, :class:`.TypeDecorator` is probably
+    more appropriate::
+
+      import sqlalchemy.types as types
+
+
+      class MyType(types.UserDefinedType):
+          cache_ok = True
+
+          def __init__(self, precision=8):
+              self.precision = precision
+
+          def get_col_spec(self, **kw):
+              return "MYTYPE(%s)" % self.precision
+
+          def bind_processor(self, dialect):
+              def process(value):
+                  return value
+
+              return process
+
+          def result_processor(self, dialect, coltype):
+              def process(value):
+                  return value
+
+              return process
+
+    Once the type is made, it's immediately usable::
+
+      table = Table(
+          "foo",
+          metadata_obj,
+          Column("id", Integer, primary_key=True),
+          Column("data", MyType(16)),
+      )
+
+    The ``get_col_spec()`` method will in most cases receive a keyword
+    argument ``type_expression`` which refers to the owning expression
+    of the type as being compiled, such as a :class:`_schema.Column` or
+    :func:`.cast` construct.  This keyword is only sent if the method
+    accepts keyword arguments (e.g. ``**kw``) in its argument signature;
+    introspection is used to check for this in order to support legacy
+    forms of this function.
+
+    The :attr:`.UserDefinedType.cache_ok` class-level flag indicates if this
+    custom :class:`.UserDefinedType` is safe to be used as part of a cache key.
+    This flag defaults to ``None`` which will initially generate a warning
+    when the SQL compiler attempts to generate a cache key for a statement
+    that uses this type.  If the :class:`.UserDefinedType` is not guaranteed
+    to produce the same bind/result behavior and SQL generation
+    every time, this flag should be set to ``False``; otherwise if the
+    class produces the same behavior each time, it may be set to ``True``.
+    See :attr:`.UserDefinedType.cache_ok` for further notes on how this works.
+
+    .. versionadded:: 1.4.28 Generalized the :attr:`.ExternalType.cache_ok`
+       flag so that it is available for both :class:`.TypeDecorator` as well
+       as :class:`.UserDefinedType`.
+
+    """
+
+    __visit_name__ = "user_defined"
+
+    ensure_kwarg = "get_col_spec"
+
+    def coerce_compared_value(
+        self, op: Optional[OperatorType], value: Any
+    ) -> TypeEngine[Any]:
+        """Suggest a type for a 'coerced' Python value in an expression.
+
+        Default behavior for :class:`.UserDefinedType` is the
+        same as that of :class:`.TypeDecorator`; by default it returns
+        ``self``, assuming the compared value should be coerced into
+        the same type as this one.  See
+        :meth:`.TypeDecorator.coerce_compared_value` for more detail.
+
+        """
+
+        return self
+
+
+class Emulated(TypeEngineMixin):
+    """Mixin for base types that emulate the behavior of a DB-native type.
+
+    An :class:`.Emulated` type will use an available database type
+    in conjunction with Python-side routines and/or database constraints
+    in order to approximate the behavior of a database type that is provided
+    natively by some backends.  When a native-providing backend is in
+    use, the native version of the type is used.  This native version
+    should include the :class:`.NativeForEmulated` mixin to allow it to be
+    distinguished from :class:`.Emulated`.
+
+    Current examples of :class:`.Emulated` are:  :class:`.Interval`,
+    :class:`.Enum`, :class:`.Boolean`.
+
+    .. versionadded:: 1.2.0b3
+
+    """
+
+    native: bool
+
+    def adapt_to_emulated(
+        self,
+        impltype: Type[Union[TypeEngine[Any], TypeEngineMixin]],
+        **kw: Any,
+    ) -> TypeEngine[Any]:
+        """Given an impl class, adapt this type to the impl assuming
+        "emulated".
+
+        The impl should also be an "emulated" version of this type,
+        most likely the same class as this type itself.
+
+        e.g.: sqltypes.Enum adapts to the Enum class.
+
+        """
+        return super().adapt(impltype, **kw)
+
+    @overload
+    def adapt(self, cls: Type[_TE], **kw: Any) -> _TE: ...
+
+    @overload
+    def adapt(
+        self, cls: Type[TypeEngineMixin], **kw: Any
+    ) -> TypeEngine[Any]: ...
+
+    def adapt(
+        self, cls: Type[Union[TypeEngine[Any], TypeEngineMixin]], **kw: Any
+    ) -> TypeEngine[Any]:
+        if _is_native_for_emulated(cls):
+            if self.native:
+                # native support requested, dialect gave us a native
+                # implementor, pass control over to it
+                return cls.adapt_emulated_to_native(self, **kw)
+            else:
+                # non-native support, let the native implementor
+                # decide also, at the moment this is just to help debugging
+                # as only the default logic is implemented.
+                return cls.adapt_native_to_emulated(self, **kw)
+        else:
+            # this would be, both classes are Enum, or both classes
+            # are postgresql.ENUM
+            if issubclass(cls, self.__class__):
+                return self.adapt_to_emulated(cls, **kw)
+            else:
+                return super().adapt(cls, **kw)
+
+
+def _is_native_for_emulated(
+    typ: Type[Union[TypeEngine[Any], TypeEngineMixin]],
+) -> TypeGuard[Type[NativeForEmulated]]:
+    return hasattr(typ, "adapt_emulated_to_native")
+
+
+class NativeForEmulated(TypeEngineMixin):
+    """Indicates DB-native types supported by an :class:`.Emulated` type.
+
+    .. versionadded:: 1.2.0b3
+
+    """
+
+    @classmethod
+    def adapt_native_to_emulated(
+        cls,
+        impl: Union[TypeEngine[Any], TypeEngineMixin],
+        **kw: Any,
+    ) -> TypeEngine[Any]:
+        """Given an impl, adapt this type's class to the impl assuming
+        "emulated".
+
+
+        """
+        impltype = impl.__class__
+        return impl.adapt(impltype, **kw)
+
+    @classmethod
+    def adapt_emulated_to_native(
+        cls,
+        impl: Union[TypeEngine[Any], TypeEngineMixin],
+        **kw: Any,
+    ) -> TypeEngine[Any]:
+        """Given an impl, adapt this type's class to the impl assuming
+        "native".
+
+        The impl will be an :class:`.Emulated` class but not a
+        :class:`.NativeForEmulated`.
+
+        e.g.: postgresql.ENUM produces a type given an Enum instance.
+
+        """
+
+        # dmypy seems to crash on this
+        return cls(**kw)  # type: ignore
+
+    # dmypy seems to crash with this, on repeated runs with changes
+    # if TYPE_CHECKING:
+    #    def __init__(self, **kw: Any):
+    #        ...
+
+
+class TypeDecorator(SchemaEventTarget, ExternalType, TypeEngine[_T]):
+    '''Allows the creation of types which add additional functionality
+    to an existing type.
+
+    This method is preferred to direct subclassing of SQLAlchemy's
+    built-in types as it ensures that all required functionality of
+    the underlying type is kept in place.
+
+    Typical usage::
+
+      import sqlalchemy.types as types
+
+
+      class MyType(types.TypeDecorator):
+          """Prefixes Unicode values with "PREFIX:" on the way in and
+          strips it off on the way out.
+          """
+
+          impl = types.Unicode
+
+          cache_ok = True
+
+          def process_bind_param(self, value, dialect):
+              return "PREFIX:" + value
+
+          def process_result_value(self, value, dialect):
+              return value[7:]
+
+          def copy(self, **kw):
+              return MyType(self.impl.length)
+
+    The class-level ``impl`` attribute is required, and can reference any
+    :class:`.TypeEngine` class.  Alternatively, the :meth:`load_dialect_impl`
+    method can be used to provide different type classes based on the dialect
+    given; in this case, the ``impl`` variable can reference
+    ``TypeEngine`` as a placeholder.
+
+    The :attr:`.TypeDecorator.cache_ok` class-level flag indicates if this
+    custom :class:`.TypeDecorator` is safe to be used as part of a cache key.
+    This flag defaults to ``None`` which will initially generate a warning
+    when the SQL compiler attempts to generate a cache key for a statement
+    that uses this type.  If the :class:`.TypeDecorator` is not guaranteed
+    to produce the same bind/result behavior and SQL generation
+    every time, this flag should be set to ``False``; otherwise if the
+    class produces the same behavior each time, it may be set to ``True``.
+    See :attr:`.TypeDecorator.cache_ok` for further notes on how this works.
+
+    Types that receive a Python type that isn't similar to the ultimate type
+    used may want to define the :meth:`TypeDecorator.coerce_compared_value`
+    method. This is used to give the expression system a hint when coercing
+    Python objects into bind parameters within expressions. Consider this
+    expression::
+
+        mytable.c.somecol + datetime.date(2009, 5, 15)
+
+    Above, if "somecol" is an ``Integer`` variant, it makes sense that
+    we're doing date arithmetic, where above is usually interpreted
+    by databases as adding a number of days to the given date.
+    The expression system does the right thing by not attempting to
+    coerce the "date()" value into an integer-oriented bind parameter.
+
+    However, in the case of ``TypeDecorator``, we are usually changing an
+    incoming Python type to something new - ``TypeDecorator`` by default will
+    "coerce" the non-typed side to be the same type as itself. Such as below,
+    we define an "epoch" type that stores a date value as an integer::
+
+        class MyEpochType(types.TypeDecorator):
+            impl = types.Integer
+
+            cache_ok = True
+
+            epoch = datetime.date(1970, 1, 1)
+
+            def process_bind_param(self, value, dialect):
+                return (value - self.epoch).days
+
+            def process_result_value(self, value, dialect):
+                return self.epoch + timedelta(days=value)
+
+    Our expression of ``somecol + date`` with the above type will coerce the
+    "date" on the right side to also be treated as ``MyEpochType``.
+
+    This behavior can be overridden via the
+    :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
+    that should be used for the value of the expression. Below we set it such
+    that an integer value will be treated as an ``Integer``, and any other
+    value is assumed to be a date and will be treated as a ``MyEpochType``::
+
+        def coerce_compared_value(self, op, value):
+            if isinstance(value, int):
+                return Integer()
+            else:
+                return self
+
+    .. warning::
+
+       Note that the **behavior of coerce_compared_value is not inherited
+       by default from that of the base type**.
+       If the :class:`.TypeDecorator` is augmenting a
+       type that requires special logic for certain types of operators,
+       this method **must** be overridden.  A key example is when decorating
+       the :class:`_postgresql.JSON` and :class:`_postgresql.JSONB` types;
+       the default rules of :meth:`.TypeEngine.coerce_compared_value` should
+       be used in order to deal with operators like index operations::
+
+            from sqlalchemy import JSON
+            from sqlalchemy import TypeDecorator
+
+
+            class MyJsonType(TypeDecorator):
+                impl = JSON
+
+                cache_ok = True
+
+                def coerce_compared_value(self, op, value):
+                    return self.impl.coerce_compared_value(op, value)
+
+       Without the above step, index operations such as ``mycol['foo']``
+       will cause the index value ``'foo'`` to be JSON encoded.
+
+       Similarly, when working with the :class:`.ARRAY` datatype, the
+       type coercion for index operations (e.g. ``mycol[5]``) is also
+       handled by :meth:`.TypeDecorator.coerce_compared_value`, where
+       again a simple override is sufficient unless special rules are needed
+       for particular operators::
+
+            from sqlalchemy import ARRAY
+            from sqlalchemy import TypeDecorator
+
+
+            class MyArrayType(TypeDecorator):
+                impl = ARRAY
+
+                cache_ok = True
+
+                def coerce_compared_value(self, op, value):
+                    return self.impl.coerce_compared_value(op, value)
+
+    '''
+
+    __visit_name__ = "type_decorator"
+
+    _is_type_decorator = True
+
+    # this is that pattern I've used in a few places (Dialect.dbapi,
+    # Dialect.type_compiler) where the "cls.attr" is a class to make something,
+    # and "instance.attr" is an instance of that thing.  It's such a nifty,
+    # great pattern, and there is zero chance Python typing tools will ever be
+    # OK with it.  For TypeDecorator.impl, this is a highly public attribute so
+    # we really can't change its behavior without a major deprecation routine.
+    impl: Union[TypeEngine[Any], Type[TypeEngine[Any]]]
+
+    # we are changing its behavior *slightly*, which is that we now consume
+    # the instance level version from this memoized property instead, so you
+    # can't reassign "impl" on an existing TypeDecorator that's already been
+    # used (something one shouldn't do anyway) without also updating
+    # impl_instance.
+    @util.memoized_property
+    def impl_instance(self) -> TypeEngine[Any]:
+        return self.impl  # type: ignore
+
+    def __init__(self, *args: Any, **kwargs: Any):
+        """Construct a :class:`.TypeDecorator`.
+
+        Arguments sent here are passed to the constructor
+        of the class assigned to the ``impl`` class level attribute,
+        assuming the ``impl`` is a callable, and the resulting
+        object is assigned to the ``self.impl`` instance attribute
+        (thus overriding the class attribute of the same name).
+
+        If the class level ``impl`` is not a callable (the unusual case),
+        it will be assigned to the same instance attribute 'as-is',
+        ignoring those arguments passed to the constructor.
+
+        Subclasses can override this to customize the generation
+        of ``self.impl`` entirely.
+
+        """
+
+        if not hasattr(self.__class__, "impl"):
+            raise AssertionError(
+                "TypeDecorator implementations "
+                "require a class-level variable "
+                "'impl' which refers to the class of "
+                "type being decorated"
+            )
+
+        self.impl = to_instance(self.__class__.impl, *args, **kwargs)
+
+    coerce_to_is_types: Sequence[Type[Any]] = (type(None),)
+    """Specify those Python types which should be coerced at the expression
+    level to "IS <constant>" when compared using ``==`` (and same for
+    ``IS NOT`` in conjunction with ``!=``).
+
+    For most SQLAlchemy types, this includes ``NoneType``, as well as
+    ``bool``.
+
+    :class:`.TypeDecorator` modifies this list to only include ``NoneType``,
+    as typedecorator implementations that deal with boolean types are common.
+
+    Custom :class:`.TypeDecorator` classes can override this attribute to
+    return an empty tuple, in which case no values will be coerced to
+    constants.
+
+    """
+
+    class Comparator(TypeEngine.Comparator[_CT]):
+        """A :class:`.TypeEngine.Comparator` that is specific to
+        :class:`.TypeDecorator`.
+
+        User-defined :class:`.TypeDecorator` classes should not typically
+        need to modify this.
+
+
+        """
+
+        __slots__ = ()
+
+        def operate(
+            self, op: OperatorType, *other: Any, **kwargs: Any
+        ) -> ColumnElement[_CT]:
+            if TYPE_CHECKING:
+                assert isinstance(self.expr.type, TypeDecorator)
+            kwargs["_python_is_types"] = self.expr.type.coerce_to_is_types
+            return super().operate(op, *other, **kwargs)
+
+        def reverse_operate(
+            self, op: OperatorType, other: Any, **kwargs: Any
+        ) -> ColumnElement[_CT]:
+            if TYPE_CHECKING:
+                assert isinstance(self.expr.type, TypeDecorator)
+            kwargs["_python_is_types"] = self.expr.type.coerce_to_is_types
+            return super().reverse_operate(op, other, **kwargs)
+
+    @staticmethod
+    def _reduce_td_comparator(
+        impl: TypeEngine[Any], expr: ColumnElement[_T]
+    ) -> Any:
+        return TypeDecorator._create_td_comparator_type(impl)(expr)
+
+    @staticmethod
+    def _create_td_comparator_type(
+        impl: TypeEngine[Any],
+    ) -> _ComparatorFactory[Any]:
+
+        def __reduce__(self: TypeDecorator.Comparator[Any]) -> Any:
+            return (TypeDecorator._reduce_td_comparator, (impl, self.expr))
+
+        return type(
+            "TDComparator",
+            (TypeDecorator.Comparator, impl.comparator_factory),  # type: ignore # noqa: E501
+            {"__reduce__": __reduce__},
+        )
+
+    @property
+    def comparator_factory(  # type: ignore  # mypy properties bug
+        self,
+    ) -> _ComparatorFactory[Any]:
+        if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__:  # type: ignore # noqa: E501
+            return self.impl_instance.comparator_factory
+        else:
+            # reconcile the Comparator class on the impl with that
+            # of TypeDecorator.
+            # the use of multiple staticmethods is to support repeated
+            # pickling of the Comparator itself
+            return TypeDecorator._create_td_comparator_type(self.impl_instance)
+
+    def _copy_with_check(self) -> Self:
+        tt = self.copy()
+        if not isinstance(tt, self.__class__):
+            raise AssertionError(
+                "Type object %s does not properly "
+                "implement the copy() method, it must "
+                "return an object of type %s" % (self, self.__class__)
+            )
+        return tt
+
+    def _gen_dialect_impl(self, dialect: Dialect) -> TypeEngine[_T]:
+        if dialect.name in self._variant_mapping:
+            adapted = dialect.type_descriptor(
+                self._variant_mapping[dialect.name]
+            )
+        else:
+            adapted = dialect.type_descriptor(self)
+        if adapted is not self:
+            return adapted
+
+        # otherwise adapt the impl type, link
+        # to a copy of this TypeDecorator and return
+        # that.
+        typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
+        tt = self._copy_with_check()
+        tt.impl = tt.impl_instance = typedesc
+        return tt
+
+    def _with_collation(self, collation: str) -> Self:
+        tt = self._copy_with_check()
+        tt.impl = tt.impl_instance = self.impl_instance._with_collation(
+            collation
+        )
+        return tt
+
+    @util.ro_non_memoized_property
+    def _type_affinity(self) -> Optional[Type[TypeEngine[Any]]]:
+        return self.impl_instance._type_affinity
+
+    def _set_parent(
+        self, parent: SchemaEventTarget, outer: bool = False, **kw: Any
+    ) -> None:
+        """Support SchemaEventTarget"""
+
+        super()._set_parent(parent)
+
+        if not outer and isinstance(self.impl_instance, SchemaEventTarget):
+            self.impl_instance._set_parent(parent, outer=False, **kw)
+
+    def _set_parent_with_dispatch(
+        self, parent: SchemaEventTarget, **kw: Any
+    ) -> None:
+        """Support SchemaEventTarget"""
+
+        super()._set_parent_with_dispatch(parent, outer=True, **kw)
+
+        if isinstance(self.impl_instance, SchemaEventTarget):
+            self.impl_instance._set_parent_with_dispatch(parent)
+
+    def type_engine(self, dialect: Dialect) -> TypeEngine[Any]:
+        """Return a dialect-specific :class:`.TypeEngine` instance
+        for this :class:`.TypeDecorator`.
+
+        In most cases this returns a dialect-adapted form of
+        the :class:`.TypeEngine` type represented by ``self.impl``.
+        Makes usage of :meth:`dialect_impl`.
+        Behavior can be customized here by overriding
+        :meth:`load_dialect_impl`.
+
+        """
+        adapted = dialect.type_descriptor(self)
+        if not isinstance(adapted, type(self)):
+            return adapted
+        else:
+            return self.load_dialect_impl(dialect)
+
+    def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
+        """Return a :class:`.TypeEngine` object corresponding to a dialect.
+
+        This is an end-user override hook that can be used to provide
+        differing types depending on the given dialect.  It is used
+        by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
+        to help determine what type should ultimately be returned
+        for a given :class:`.TypeDecorator`.
+
+        By default returns ``self.impl``.
+
+        """
+        return self.impl_instance
+
+    def _unwrapped_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
+        """Return the 'unwrapped' dialect impl for this type.
+
+        This is used by the :meth:`.DefaultDialect.set_input_sizes`
+        method.
+
+        """
+        # some dialects have a lookup for a TypeDecorator subclass directly.
+        # postgresql.INTERVAL being the main example
+        typ = self.dialect_impl(dialect)
+
+        # if we are still a type decorator, load the per-dialect switch
+        # (such as what Variant uses), then get the dialect impl for that.
+        if isinstance(typ, self.__class__):
+            return typ.load_dialect_impl(dialect).dialect_impl(dialect)
+        else:
+            return typ
+
+    def __getattr__(self, key: str) -> Any:
+        """Proxy all other undefined accessors to the underlying
+        implementation."""
+        return getattr(self.impl_instance, key)
+
+    def process_literal_param(
+        self, value: Optional[_T], dialect: Dialect
+    ) -> str:
+        """Receive a literal parameter value to be rendered inline within
+        a statement.
+
+        .. note::
+
+            This method is called during the **SQL compilation** phase of a
+            statement, when rendering a SQL string. Unlike other SQL
+            compilation methods, it is passed a specific Python value to be
+            rendered as a string. However it should not be confused with the
+            :meth:`_types.TypeDecorator.process_bind_param` method, which is
+            the more typical method that processes the actual value passed to a
+            particular parameter at statement execution time.
+
+        Custom subclasses of :class:`_types.TypeDecorator` should override
+        this method to provide custom behaviors for incoming data values
+        that are in the special case of being rendered as literals.
+
+        The returned string will be rendered into the output string.
+
+        """
+        raise NotImplementedError()
+
+    def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any:
+        """Receive a bound parameter value to be converted.
+
+        Custom subclasses of :class:`_types.TypeDecorator` should override
+        this method to provide custom behaviors for incoming data values.
+        This method is called at **statement execution time** and is passed
+        the literal Python data value which is to be associated with a bound
+        parameter in the statement.
+
+        The operation could be anything desired to perform custom
+        behavior, such as transforming or serializing data.
+        This could also be used as a hook for validating logic.
+
+        :param value: Data to operate upon, of any type expected by
+         this method in the subclass.  Can be ``None``.
+        :param dialect: the :class:`.Dialect` in use.
+
+        .. seealso::
+
+            :ref:`types_typedecorator`
+
+            :meth:`_types.TypeDecorator.process_result_value`
+
+        """
+
+        raise NotImplementedError()
+
+    def process_result_value(
+        self, value: Optional[Any], dialect: Dialect
+    ) -> Optional[_T]:
+        """Receive a result-row column value to be converted.
+
+        Custom subclasses of :class:`_types.TypeDecorator` should override
+        this method to provide custom behaviors for data values
+        being received in result rows coming from the database.
+        This method is called at **result fetching time** and is passed
+        the literal Python data value that's extracted from a database result
+        row.
+
+        The operation could be anything desired to perform custom
+        behavior, such as transforming or deserializing data.
+
+        :param value: Data to operate upon, of any type expected by
+         this method in the subclass.  Can be ``None``.
+        :param dialect: the :class:`.Dialect` in use.
+
+        .. seealso::
+
+            :ref:`types_typedecorator`
+
+            :meth:`_types.TypeDecorator.process_bind_param`
+
+
+        """
+
+        raise NotImplementedError()
+
+    @util.memoized_property
+    def _has_bind_processor(self) -> bool:
+        """memoized boolean, check if process_bind_param is implemented.
+
+        Allows the base process_bind_param to raise
+        NotImplementedError without needing to test an expensive
+        exception throw.
+
+        """
+
+        return util.method_is_overridden(
+            self, TypeDecorator.process_bind_param
+        )
+
+    @util.memoized_property
+    def _has_literal_processor(self) -> bool:
+        """memoized boolean, check if process_literal_param is implemented."""
+
+        return util.method_is_overridden(
+            self, TypeDecorator.process_literal_param
+        )
+
+    def literal_processor(
+        self, dialect: Dialect
+    ) -> Optional[_LiteralProcessorType[_T]]:
+        """Provide a literal processing function for the given
+        :class:`.Dialect`.
+
+        This is the method that fulfills the :class:`.TypeEngine`
+        contract for literal value conversion which normally occurs via
+        the :meth:`_types.TypeEngine.literal_processor` method.
+
+        .. note::
+
+            User-defined subclasses of :class:`_types.TypeDecorator` should
+            **not** implement this method, and should instead implement
+            :meth:`_types.TypeDecorator.process_literal_param` so that the
+            "inner" processing provided by the implementing type is maintained.
+
+        """
+
+        if self._has_literal_processor:
+            process_literal_param = self.process_literal_param
+            process_bind_param = None
+        elif self._has_bind_processor:
+            # use the bind processor if dont have a literal processor,
+            # but we have an impl literal processor
+            process_literal_param = None
+            process_bind_param = self.process_bind_param
+        else:
+            process_literal_param = None
+            process_bind_param = None
+
+        if process_literal_param is not None:
+            impl_processor = self.impl_instance.literal_processor(dialect)
+            if impl_processor:
+                fixed_impl_processor = impl_processor
+                fixed_process_literal_param = process_literal_param
+
+                def process(value: Any) -> str:
+                    return fixed_impl_processor(
+                        fixed_process_literal_param(value, dialect)
+                    )
+
+            else:
+                fixed_process_literal_param = process_literal_param
+
+                def process(value: Any) -> str:
+                    return fixed_process_literal_param(value, dialect)
+
+            return process
+
+        elif process_bind_param is not None:
+            impl_processor = self.impl_instance.literal_processor(dialect)
+            if not impl_processor:
+                return None
+            else:
+                fixed_impl_processor = impl_processor
+                fixed_process_bind_param = process_bind_param
+
+                def process(value: Any) -> str:
+                    return fixed_impl_processor(
+                        fixed_process_bind_param(value, dialect)
+                    )
+
+                return process
+        else:
+            return self.impl_instance.literal_processor(dialect)
+
+    def bind_processor(
+        self, dialect: Dialect
+    ) -> Optional[_BindProcessorType[_T]]:
+        """Provide a bound value processing function for the
+        given :class:`.Dialect`.
+
+        This is the method that fulfills the :class:`.TypeEngine`
+        contract for bound value conversion which normally occurs via
+        the :meth:`_types.TypeEngine.bind_processor` method.
+
+        .. note::
+
+            User-defined subclasses of :class:`_types.TypeDecorator` should
+            **not** implement this method, and should instead implement
+            :meth:`_types.TypeDecorator.process_bind_param` so that the "inner"
+            processing provided by the implementing type is maintained.
+
+        :param dialect: Dialect instance in use.
+
+        """
+        if self._has_bind_processor:
+            process_param = self.process_bind_param
+            impl_processor = self.impl_instance.bind_processor(dialect)
+            if impl_processor:
+                fixed_impl_processor = impl_processor
+                fixed_process_param = process_param
+
+                def process(value: Optional[_T]) -> Any:
+                    return fixed_impl_processor(
+                        fixed_process_param(value, dialect)
+                    )
+
+            else:
+                fixed_process_param = process_param
+
+                def process(value: Optional[_T]) -> Any:
+                    return fixed_process_param(value, dialect)
+
+            return process
+        else:
+            return self.impl_instance.bind_processor(dialect)
+
+    @util.memoized_property
+    def _has_result_processor(self) -> bool:
+        """memoized boolean, check if process_result_value is implemented.
+
+        Allows the base process_result_value to raise
+        NotImplementedError without needing to test an expensive
+        exception throw.
+
+        """
+
+        return util.method_is_overridden(
+            self, TypeDecorator.process_result_value
+        )
+
+    def result_processor(
+        self, dialect: Dialect, coltype: Any
+    ) -> Optional[_ResultProcessorType[_T]]:
+        """Provide a result value processing function for the given
+        :class:`.Dialect`.
+
+        This is the method that fulfills the :class:`.TypeEngine`
+        contract for bound value conversion which normally occurs via
+        the :meth:`_types.TypeEngine.result_processor` method.
+
+        .. note::
+
+            User-defined subclasses of :class:`_types.TypeDecorator` should
+            **not** implement this method, and should instead implement
+            :meth:`_types.TypeDecorator.process_result_value` so that the
+            "inner" processing provided by the implementing type is maintained.
+
+        :param dialect: Dialect instance in use.
+        :param coltype: A SQLAlchemy data type
+
+        """
+        if self._has_result_processor:
+            process_value = self.process_result_value
+            impl_processor = self.impl_instance.result_processor(
+                dialect, coltype
+            )
+            if impl_processor:
+                fixed_process_value = process_value
+                fixed_impl_processor = impl_processor
+
+                def process(value: Any) -> Optional[_T]:
+                    return fixed_process_value(
+                        fixed_impl_processor(value), dialect
+                    )
+
+            else:
+                fixed_process_value = process_value
+
+                def process(value: Any) -> Optional[_T]:
+                    return fixed_process_value(value, dialect)
+
+            return process
+        else:
+            return self.impl_instance.result_processor(dialect, coltype)
+
+    @util.memoized_property
+    def _has_bind_expression(self) -> bool:
+        return (
+            util.method_is_overridden(self, TypeDecorator.bind_expression)
+            or self.impl_instance._has_bind_expression
+        )
+
+    def bind_expression(
+        self, bindparam: BindParameter[_T]
+    ) -> Optional[ColumnElement[_T]]:
+        """Given a bind value (i.e. a :class:`.BindParameter` instance),
+        return a SQL expression which will typically wrap the given parameter.
+
+        .. note::
+
+            This method is called during the **SQL compilation** phase of a
+            statement, when rendering a SQL string. It is **not** necessarily
+            called against specific values, and should not be confused with the
+            :meth:`_types.TypeDecorator.process_bind_param` method, which is
+            the more typical method that processes the actual value passed to a
+            particular parameter at statement execution time.
+
+        Subclasses of :class:`_types.TypeDecorator` can override this method
+        to provide custom bind expression behavior for the type.  This
+        implementation will **replace** that of the underlying implementation
+        type.
+
+        """
+        return self.impl_instance.bind_expression(bindparam)
+
+    @util.memoized_property
+    def _has_column_expression(self) -> bool:
+        """memoized boolean, check if column_expression is implemented.
+
+        Allows the method to be skipped for the vast majority of expression
+        types that don't use this feature.
+
+        """
+
+        return (
+            util.method_is_overridden(self, TypeDecorator.column_expression)
+            or self.impl_instance._has_column_expression
+        )
+
+    def column_expression(
+        self, column: ColumnElement[_T]
+    ) -> Optional[ColumnElement[_T]]:
+        """Given a SELECT column expression, return a wrapping SQL expression.
+
+        .. note::
+
+            This method is called during the **SQL compilation** phase of a
+            statement, when rendering a SQL string. It is **not** called
+            against specific values, and should not be confused with the
+            :meth:`_types.TypeDecorator.process_result_value` method, which is
+            the more typical method that processes the actual value returned
+            in a result row subsequent to statement execution time.
+
+        Subclasses of :class:`_types.TypeDecorator` can override this method
+        to provide custom column expression behavior for the type.  This
+        implementation will **replace** that of the underlying implementation
+        type.
+
+        See the description of :meth:`_types.TypeEngine.column_expression`
+        for a complete description of the method's use.
+
+        """
+
+        return self.impl_instance.column_expression(column)
+
+    def coerce_compared_value(
+        self, op: Optional[OperatorType], value: Any
+    ) -> Any:
+        """Suggest a type for a 'coerced' Python value in an expression.
+
+        By default, returns self.   This method is called by
+        the expression system when an object using this type is
+        on the left or right side of an expression against a plain Python
+        object which does not yet have a SQLAlchemy type assigned::
+
+            expr = table.c.somecolumn + 35
+
+        Where above, if ``somecolumn`` uses this type, this method will
+        be called with the value ``operator.add``
+        and ``35``.  The return value is whatever SQLAlchemy type should
+        be used for ``35`` for this particular operation.
+
+        """
+        return self
+
+    def copy(self, **kw: Any) -> Self:
+        """Produce a copy of this :class:`.TypeDecorator` instance.
+
+        This is a shallow copy and is provided to fulfill part of
+        the :class:`.TypeEngine` contract.  It usually does not
+        need to be overridden unless the user-defined :class:`.TypeDecorator`
+        has local state that should be deep-copied.
+
+        """
+
+        instance = self.__class__.__new__(self.__class__)
+        instance.__dict__.update(self.__dict__)
+        return instance
+
+    def get_dbapi_type(self, dbapi: ModuleType) -> Optional[Any]:
+        """Return the DBAPI type object represented by this
+        :class:`.TypeDecorator`.
+
+        By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
+        underlying "impl".
+        """
+        return self.impl_instance.get_dbapi_type(dbapi)
+
+    def compare_values(self, x: Any, y: Any) -> bool:
+        """Given two values, compare them for equality.
+
+        By default this calls upon :meth:`.TypeEngine.compare_values`
+        of the underlying "impl", which in turn usually
+        uses the Python equals operator ``==``.
+
+        This function is used by the ORM to compare
+        an original-loaded value with an intercepted
+        "changed" value, to determine if a net change
+        has occurred.
+
+        """
+        return self.impl_instance.compare_values(x, y)
+
+    # mypy property bug
+    @property
+    def sort_key_function(self) -> Optional[Callable[[Any], Any]]:  # type: ignore # noqa: E501
+        return self.impl_instance.sort_key_function
+
+    def __repr__(self) -> str:
+        return util.generic_repr(self, to_inspect=self.impl_instance)
+
+
+class Variant(TypeDecorator[_T]):
+    """deprecated.  symbol is present for backwards-compatibility with
+    workaround recipes, however this actual type should not be used.
+
+    """
+
+    def __init__(self, *arg: Any, **kw: Any):
+        raise NotImplementedError(
+            "Variant is no longer used in SQLAlchemy; this is a "
+            "placeholder symbol for backwards compatibility."
+        )
+
+
+@overload
+def to_instance(
+    typeobj: Union[Type[_TE], _TE], *arg: Any, **kw: Any
+) -> _TE: ...
+
+
+@overload
+def to_instance(typeobj: None, *arg: Any, **kw: Any) -> TypeEngine[None]: ...
+
+
+def to_instance(
+    typeobj: Union[Type[_TE], _TE, None], *arg: Any, **kw: Any
+) -> Union[_TE, TypeEngine[None]]:
+    if typeobj is None:
+        return NULLTYPE
+
+    if callable(typeobj):
+        return typeobj(*arg, **kw)
+    else:
+        return typeobj
+
+
+def adapt_type(
+    typeobj: _TypeEngineArgument[Any],
+    colspecs: Mapping[Type[Any], Type[TypeEngine[Any]]],
+) -> TypeEngine[Any]:
+    typeobj = to_instance(typeobj)
+    for t in typeobj.__class__.__mro__[0:-1]:
+        try:
+            impltype = colspecs[t]
+            break
+        except KeyError:
+            pass
+    else:
+        # couldn't adapt - so just return the type itself
+        # (it may be a user-defined type)
+        return typeobj
+    # if we adapted the given generic type to a database-specific type,
+    # but it turns out the originally given "generic" type
+    # is actually a subclass of our resulting type, then we were already
+    # given a more specific type than that required; so use that.
+    if issubclass(typeobj.__class__, impltype):
+        return typeobj
+    return typeobj.adapt(impltype)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/util.py
new file mode 100644
index 00000000..29cd0e2b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/util.py
@@ -0,0 +1,1487 @@
+# sql/util.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""High level utilities which build upon other modules here.
+
+"""
+from __future__ import annotations
+
+from collections import deque
+import copy
+from itertools import chain
+import typing
+from typing import AbstractSet
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from . import coercions
+from . import operators
+from . import roles
+from . import visitors
+from ._typing import is_text_clause
+from .annotation import _deep_annotate as _deep_annotate  # noqa: F401
+from .annotation import _deep_deannotate as _deep_deannotate  # noqa: F401
+from .annotation import _shallow_annotate as _shallow_annotate  # noqa: F401
+from .base import _expand_cloned
+from .base import _from_objects
+from .cache_key import HasCacheKey as HasCacheKey  # noqa: F401
+from .ddl import sort_tables as sort_tables  # noqa: F401
+from .elements import _find_columns as _find_columns
+from .elements import _label_reference
+from .elements import _textual_label_reference
+from .elements import BindParameter
+from .elements import ClauseElement
+from .elements import ColumnClause
+from .elements import ColumnElement
+from .elements import Grouping
+from .elements import KeyedColumnElement
+from .elements import Label
+from .elements import NamedColumn
+from .elements import Null
+from .elements import UnaryExpression
+from .schema import Column
+from .selectable import Alias
+from .selectable import FromClause
+from .selectable import FromGrouping
+from .selectable import Join
+from .selectable import ScalarSelect
+from .selectable import SelectBase
+from .selectable import TableClause
+from .visitors import _ET
+from .. import exc
+from .. import util
+from ..util.typing import Literal
+from ..util.typing import Protocol
+
+if typing.TYPE_CHECKING:
+    from ._typing import _EquivalentColumnMap
+    from ._typing import _LimitOffsetType
+    from ._typing import _TypeEngineArgument
+    from .elements import BinaryExpression
+    from .elements import TextClause
+    from .selectable import _JoinTargetElement
+    from .selectable import _SelectIterable
+    from .selectable import Selectable
+    from .visitors import _TraverseCallableType
+    from .visitors import ExternallyTraversible
+    from .visitors import ExternalTraversal
+    from ..engine.interfaces import _AnyExecuteParams
+    from ..engine.interfaces import _AnyMultiExecuteParams
+    from ..engine.interfaces import _AnySingleExecuteParams
+    from ..engine.interfaces import _CoreSingleExecuteParams
+    from ..engine.row import Row
+
+_CE = TypeVar("_CE", bound="ColumnElement[Any]")
+
+
+def join_condition(
+    a: FromClause,
+    b: FromClause,
+    a_subset: Optional[FromClause] = None,
+    consider_as_foreign_keys: Optional[AbstractSet[ColumnClause[Any]]] = None,
+) -> ColumnElement[bool]:
+    """Create a join condition between two tables or selectables.
+
+    e.g.::
+
+        join_condition(tablea, tableb)
+
+    would produce an expression along the lines of::
+
+        tablea.c.id == tableb.c.tablea_id
+
+    The join is determined based on the foreign key relationships
+    between the two selectables.   If there are multiple ways
+    to join, or no way to join, an error is raised.
+
+    :param a_subset: An optional expression that is a sub-component
+        of ``a``.  An attempt will be made to join to just this sub-component
+        first before looking at the full ``a`` construct, and if found
+        will be successful even if there are other ways to join to ``a``.
+        This allows the "right side" of a join to be passed thereby
+        providing a "natural join".
+
+    """
+    return Join._join_condition(
+        a,
+        b,
+        a_subset=a_subset,
+        consider_as_foreign_keys=consider_as_foreign_keys,
+    )
+
+
+def find_join_source(
+    clauses: List[FromClause], join_to: FromClause
+) -> List[int]:
+    """Given a list of FROM clauses and a selectable,
+    return the first index and element from the list of
+    clauses which can be joined against the selectable.  returns
+    None, None if no match is found.
+
+    e.g.::
+
+        clause1 = table1.join(table2)
+        clause2 = table4.join(table5)
+
+        join_to = table2.join(table3)
+
+        find_join_source([clause1, clause2], join_to) == clause1
+
+    """
+
+    selectables = list(_from_objects(join_to))
+    idx = []
+    for i, f in enumerate(clauses):
+        for s in selectables:
+            if f.is_derived_from(s):
+                idx.append(i)
+    return idx
+
+
+def find_left_clause_that_matches_given(
+    clauses: Sequence[FromClause], join_from: FromClause
+) -> List[int]:
+    """Given a list of FROM clauses and a selectable,
+    return the indexes from the list of
+    clauses which is derived from the selectable.
+
+    """
+
+    selectables = list(_from_objects(join_from))
+    liberal_idx = []
+    for i, f in enumerate(clauses):
+        for s in selectables:
+            # basic check, if f is derived from s.
+            # this can be joins containing a table, or an aliased table
+            # or select statement matching to a table.  This check
+            # will match a table to a selectable that is adapted from
+            # that table.  With Query, this suits the case where a join
+            # is being made to an adapted entity
+            if f.is_derived_from(s):
+                liberal_idx.append(i)
+                break
+
+    # in an extremely small set of use cases, a join is being made where
+    # there are multiple FROM clauses where our target table is represented
+    # in more than one, such as embedded or similar.   in this case, do
+    # another pass where we try to get a more exact match where we aren't
+    # looking at adaption relationships.
+    if len(liberal_idx) > 1:
+        conservative_idx = []
+        for idx in liberal_idx:
+            f = clauses[idx]
+            for s in selectables:
+                if set(surface_selectables(f)).intersection(
+                    surface_selectables(s)
+                ):
+                    conservative_idx.append(idx)
+                    break
+        if conservative_idx:
+            return conservative_idx
+
+    return liberal_idx
+
+
+def find_left_clause_to_join_from(
+    clauses: Sequence[FromClause],
+    join_to: _JoinTargetElement,
+    onclause: Optional[ColumnElement[Any]],
+) -> List[int]:
+    """Given a list of FROM clauses, a selectable,
+    and optional ON clause, return a list of integer indexes from the
+    clauses list indicating the clauses that can be joined from.
+
+    The presence of an "onclause" indicates that at least one clause can
+    definitely be joined from; if the list of clauses is of length one
+    and the onclause is given, returns that index.   If the list of clauses
+    is more than length one, and the onclause is given, attempts to locate
+    which clauses contain the same columns.
+
+    """
+    idx = []
+    selectables = set(_from_objects(join_to))
+
+    # if we are given more than one target clause to join
+    # from, use the onclause to provide a more specific answer.
+    # otherwise, don't try to limit, after all, "ON TRUE" is a valid
+    # on clause
+    if len(clauses) > 1 and onclause is not None:
+        resolve_ambiguity = True
+        cols_in_onclause = _find_columns(onclause)
+    else:
+        resolve_ambiguity = False
+        cols_in_onclause = None
+
+    for i, f in enumerate(clauses):
+        for s in selectables.difference([f]):
+            if resolve_ambiguity:
+                assert cols_in_onclause is not None
+                if set(f.c).union(s.c).issuperset(cols_in_onclause):
+                    idx.append(i)
+                    break
+            elif onclause is not None or Join._can_join(f, s):
+                idx.append(i)
+                break
+
+    if len(idx) > 1:
+        # this is the same "hide froms" logic from
+        # Selectable._get_display_froms
+        toremove = set(
+            chain(*[_expand_cloned(f._hide_froms) for f in clauses])
+        )
+        idx = [i for i in idx if clauses[i] not in toremove]
+
+    # onclause was given and none of them resolved, so assume
+    # all indexes can match
+    if not idx and onclause is not None:
+        return list(range(len(clauses)))
+    else:
+        return idx
+
+
+def visit_binary_product(
+    fn: Callable[
+        [BinaryExpression[Any], ColumnElement[Any], ColumnElement[Any]], None
+    ],
+    expr: ColumnElement[Any],
+) -> None:
+    """Produce a traversal of the given expression, delivering
+    column comparisons to the given function.
+
+    The function is of the form::
+
+        def my_fn(binary, left, right): ...
+
+    For each binary expression located which has a
+    comparison operator, the product of "left" and
+    "right" will be delivered to that function,
+    in terms of that binary.
+
+    Hence an expression like::
+
+        and_((a + b) == q + func.sum(e + f), j == r)
+
+    would have the traversal:
+
+    .. sourcecode:: text
+
+        a <eq> q
+        a <eq> e
+        a <eq> f
+        b <eq> q
+        b <eq> e
+        b <eq> f
+        j <eq> r
+
+    That is, every combination of "left" and
+    "right" that doesn't further contain
+    a binary comparison is passed as pairs.
+
+    """
+    stack: List[BinaryExpression[Any]] = []
+
+    def visit(element: ClauseElement) -> Iterator[ColumnElement[Any]]:
+        if isinstance(element, ScalarSelect):
+            # we don't want to dig into correlated subqueries,
+            # those are just column elements by themselves
+            yield element
+        elif element.__visit_name__ == "binary" and operators.is_comparison(
+            element.operator  # type: ignore
+        ):
+            stack.insert(0, element)  # type: ignore
+            for l in visit(element.left):  # type: ignore
+                for r in visit(element.right):  # type: ignore
+                    fn(stack[0], l, r)
+            stack.pop(0)
+            for elem in element.get_children():
+                visit(elem)
+        else:
+            if isinstance(element, ColumnClause):
+                yield element
+            for elem in element.get_children():
+                yield from visit(elem)
+
+    list(visit(expr))
+    visit = None  # type: ignore  # remove gc cycles
+
+
+def find_tables(
+    clause: ClauseElement,
+    *,
+    check_columns: bool = False,
+    include_aliases: bool = False,
+    include_joins: bool = False,
+    include_selects: bool = False,
+    include_crud: bool = False,
+) -> List[TableClause]:
+    """locate Table objects within the given expression."""
+
+    tables: List[TableClause] = []
+    _visitors: Dict[str, _TraverseCallableType[Any]] = {}
+
+    if include_selects:
+        _visitors["select"] = _visitors["compound_select"] = tables.append
+
+    if include_joins:
+        _visitors["join"] = tables.append
+
+    if include_aliases:
+        _visitors["alias"] = _visitors["subquery"] = _visitors[
+            "tablesample"
+        ] = _visitors["lateral"] = tables.append
+
+    if include_crud:
+        _visitors["insert"] = _visitors["update"] = _visitors["delete"] = (
+            lambda ent: tables.append(ent.table)
+        )
+
+    if check_columns:
+
+        def visit_column(column):
+            tables.append(column.table)
+
+        _visitors["column"] = visit_column
+
+    _visitors["table"] = tables.append
+
+    visitors.traverse(clause, {}, _visitors)
+    return tables
+
+
+def unwrap_order_by(clause: Any) -> Any:
+    """Break up an 'order by' expression into individual column-expressions,
+    without DESC/ASC/NULLS FIRST/NULLS LAST"""
+
+    cols = util.column_set()
+    result = []
+    stack = deque([clause])
+
+    # examples
+    # column -> ASC/DESC == column
+    # column -> ASC/DESC -> label == column
+    # column -> label -> ASC/DESC -> label == column
+    # scalar_select -> label -> ASC/DESC == scalar_select -> label
+
+    while stack:
+        t = stack.popleft()
+        if isinstance(t, ColumnElement) and (
+            not isinstance(t, UnaryExpression)
+            or not operators.is_ordering_modifier(t.modifier)  # type: ignore
+        ):
+            if isinstance(t, Label) and not isinstance(
+                t.element, ScalarSelect
+            ):
+                t = t.element
+
+                if isinstance(t, Grouping):
+                    t = t.element
+
+                stack.append(t)
+                continue
+            elif isinstance(t, _label_reference):
+                t = t.element
+
+                stack.append(t)
+                continue
+            if isinstance(t, (_textual_label_reference)):
+                continue
+            if t not in cols:
+                cols.add(t)
+                result.append(t)
+
+        else:
+            for c in t.get_children():
+                stack.append(c)
+    return result
+
+
+def unwrap_label_reference(element):
+    def replace(
+        element: ExternallyTraversible, **kw: Any
+    ) -> Optional[ExternallyTraversible]:
+        if isinstance(element, _label_reference):
+            return element.element
+        elif isinstance(element, _textual_label_reference):
+            assert False, "can't unwrap a textual label reference"
+        return None
+
+    return visitors.replacement_traverse(element, {}, replace)
+
+
+def expand_column_list_from_order_by(collist, order_by):
+    """Given the columns clause and ORDER BY of a selectable,
+    return a list of column expressions that can be added to the collist
+    corresponding to the ORDER BY, without repeating those already
+    in the collist.
+
+    """
+    cols_already_present = {
+        col.element if col._order_by_label_element is not None else col
+        for col in collist
+    }
+
+    to_look_for = list(chain(*[unwrap_order_by(o) for o in order_by]))
+
+    return [col for col in to_look_for if col not in cols_already_present]
+
+
+def clause_is_present(clause, search):
+    """Given a target clause and a second to search within, return True
+    if the target is plainly present in the search without any
+    subqueries or aliases involved.
+
+    Basically descends through Joins.
+
+    """
+
+    for elem in surface_selectables(search):
+        if clause == elem:  # use == here so that Annotated's compare
+            return True
+    else:
+        return False
+
+
+def tables_from_leftmost(clause: FromClause) -> Iterator[FromClause]:
+    if isinstance(clause, Join):
+        yield from tables_from_leftmost(clause.left)
+        yield from tables_from_leftmost(clause.right)
+    elif isinstance(clause, FromGrouping):
+        yield from tables_from_leftmost(clause.element)
+    else:
+        yield clause
+
+
+def surface_selectables(clause):
+    stack = [clause]
+    while stack:
+        elem = stack.pop()
+        yield elem
+        if isinstance(elem, Join):
+            stack.extend((elem.left, elem.right))
+        elif isinstance(elem, FromGrouping):
+            stack.append(elem.element)
+
+
+def surface_selectables_only(clause):
+    stack = [clause]
+    while stack:
+        elem = stack.pop()
+        if isinstance(elem, (TableClause, Alias)):
+            yield elem
+        if isinstance(elem, Join):
+            stack.extend((elem.left, elem.right))
+        elif isinstance(elem, FromGrouping):
+            stack.append(elem.element)
+        elif isinstance(elem, ColumnClause):
+            if elem.table is not None:
+                stack.append(elem.table)
+            else:
+                yield elem
+        elif elem is not None:
+            yield elem
+
+
+def extract_first_column_annotation(column, annotation_name):
+    filter_ = (FromGrouping, SelectBase)
+
+    stack = deque([column])
+    while stack:
+        elem = stack.popleft()
+        if annotation_name in elem._annotations:
+            return elem._annotations[annotation_name]
+        for sub in elem.get_children():
+            if isinstance(sub, filter_):
+                continue
+            stack.append(sub)
+    return None
+
+
+def selectables_overlap(left: FromClause, right: FromClause) -> bool:
+    """Return True if left/right have some overlapping selectable"""
+
+    return bool(
+        set(surface_selectables(left)).intersection(surface_selectables(right))
+    )
+
+
+def bind_values(clause):
+    """Return an ordered list of "bound" values in the given clause.
+
+    E.g.::
+
+        >>> expr = and_(table.c.foo == 5, table.c.foo == 7)
+        >>> bind_values(expr)
+        [5, 7]
+    """
+
+    v = []
+
+    def visit_bindparam(bind):
+        v.append(bind.effective_value)
+
+    visitors.traverse(clause, {}, {"bindparam": visit_bindparam})
+    return v
+
+
+def _quote_ddl_expr(element):
+    if isinstance(element, str):
+        element = element.replace("'", "''")
+        return "'%s'" % element
+    else:
+        return repr(element)
+
+
+class _repr_base:
+    _LIST: int = 0
+    _TUPLE: int = 1
+    _DICT: int = 2
+
+    __slots__ = ("max_chars",)
+
+    max_chars: int
+
+    def trunc(self, value: Any) -> str:
+        rep = repr(value)
+        lenrep = len(rep)
+        if lenrep > self.max_chars:
+            segment_length = self.max_chars // 2
+            rep = (
+                rep[0:segment_length]
+                + (
+                    " ... (%d characters truncated) ... "
+                    % (lenrep - self.max_chars)
+                )
+                + rep[-segment_length:]
+            )
+        return rep
+
+
+def _repr_single_value(value):
+    rp = _repr_base()
+    rp.max_chars = 300
+    return rp.trunc(value)
+
+
+class _repr_row(_repr_base):
+    """Provide a string view of a row."""
+
+    __slots__ = ("row",)
+
+    def __init__(self, row: Row[Any], max_chars: int = 300):
+        self.row = row
+        self.max_chars = max_chars
+
+    def __repr__(self) -> str:
+        trunc = self.trunc
+        return "(%s%s)" % (
+            ", ".join(trunc(value) for value in self.row),
+            "," if len(self.row) == 1 else "",
+        )
+
+
+class _long_statement(str):
+    def __str__(self) -> str:
+        lself = len(self)
+        if lself > 500:
+            lleft = 250
+            lright = 100
+            trunc = lself - lleft - lright
+            return (
+                f"{self[0:lleft]} ... {trunc} "
+                f"characters truncated ... {self[-lright:]}"
+            )
+        else:
+            return str.__str__(self)
+
+
+class _repr_params(_repr_base):
+    """Provide a string view of bound parameters.
+
+    Truncates display to a given number of 'multi' parameter sets,
+    as well as long values to a given number of characters.
+
+    """
+
+    __slots__ = "params", "batches", "ismulti", "max_params"
+
+    def __init__(
+        self,
+        params: Optional[_AnyExecuteParams],
+        batches: int,
+        max_params: int = 100,
+        max_chars: int = 300,
+        ismulti: Optional[bool] = None,
+    ):
+        self.params = params
+        self.ismulti = ismulti
+        self.batches = batches
+        self.max_chars = max_chars
+        self.max_params = max_params
+
+    def __repr__(self) -> str:
+        if self.ismulti is None:
+            return self.trunc(self.params)
+
+        if isinstance(self.params, list):
+            typ = self._LIST
+
+        elif isinstance(self.params, tuple):
+            typ = self._TUPLE
+        elif isinstance(self.params, dict):
+            typ = self._DICT
+        else:
+            return self.trunc(self.params)
+
+        if self.ismulti:
+            multi_params = cast(
+                "_AnyMultiExecuteParams",
+                self.params,
+            )
+
+            if len(self.params) > self.batches:
+                msg = (
+                    " ... displaying %i of %i total bound parameter sets ... "
+                )
+                return " ".join(
+                    (
+                        self._repr_multi(
+                            multi_params[: self.batches - 2],
+                            typ,
+                        )[0:-1],
+                        msg % (self.batches, len(self.params)),
+                        self._repr_multi(multi_params[-2:], typ)[1:],
+                    )
+                )
+            else:
+                return self._repr_multi(multi_params, typ)
+        else:
+            return self._repr_params(
+                cast(
+                    "_AnySingleExecuteParams",
+                    self.params,
+                ),
+                typ,
+            )
+
+    def _repr_multi(
+        self,
+        multi_params: _AnyMultiExecuteParams,
+        typ: int,
+    ) -> str:
+        if multi_params:
+            if isinstance(multi_params[0], list):
+                elem_type = self._LIST
+            elif isinstance(multi_params[0], tuple):
+                elem_type = self._TUPLE
+            elif isinstance(multi_params[0], dict):
+                elem_type = self._DICT
+            else:
+                assert False, "Unknown parameter type %s" % (
+                    type(multi_params[0])
+                )
+
+            elements = ", ".join(
+                self._repr_params(params, elem_type) for params in multi_params
+            )
+        else:
+            elements = ""
+
+        if typ == self._LIST:
+            return "[%s]" % elements
+        else:
+            return "(%s)" % elements
+
+    def _get_batches(self, params: Iterable[Any]) -> Any:
+        lparams = list(params)
+        lenparams = len(lparams)
+        if lenparams > self.max_params:
+            lleft = self.max_params // 2
+            return (
+                lparams[0:lleft],
+                lparams[-lleft:],
+                lenparams - self.max_params,
+            )
+        else:
+            return lparams, None, None
+
+    def _repr_params(
+        self,
+        params: _AnySingleExecuteParams,
+        typ: int,
+    ) -> str:
+        if typ is self._DICT:
+            return self._repr_param_dict(
+                cast("_CoreSingleExecuteParams", params)
+            )
+        elif typ is self._TUPLE:
+            return self._repr_param_tuple(cast("Sequence[Any]", params))
+        else:
+            return self._repr_param_list(params)
+
+    def _repr_param_dict(self, params: _CoreSingleExecuteParams) -> str:
+        trunc = self.trunc
+        (
+            items_first_batch,
+            items_second_batch,
+            trunclen,
+        ) = self._get_batches(params.items())
+
+        if items_second_batch:
+            text = "{%s" % (
+                ", ".join(
+                    f"{key!r}: {trunc(value)}"
+                    for key, value in items_first_batch
+                )
+            )
+            text += f" ... {trunclen} parameters truncated ... "
+            text += "%s}" % (
+                ", ".join(
+                    f"{key!r}: {trunc(value)}"
+                    for key, value in items_second_batch
+                )
+            )
+        else:
+            text = "{%s}" % (
+                ", ".join(
+                    f"{key!r}: {trunc(value)}"
+                    for key, value in items_first_batch
+                )
+            )
+        return text
+
+    def _repr_param_tuple(self, params: Sequence[Any]) -> str:
+        trunc = self.trunc
+
+        (
+            items_first_batch,
+            items_second_batch,
+            trunclen,
+        ) = self._get_batches(params)
+
+        if items_second_batch:
+            text = "(%s" % (
+                ", ".join(trunc(value) for value in items_first_batch)
+            )
+            text += f" ... {trunclen} parameters truncated ... "
+            text += "%s)" % (
+                ", ".join(trunc(value) for value in items_second_batch),
+            )
+        else:
+            text = "(%s%s)" % (
+                ", ".join(trunc(value) for value in items_first_batch),
+                "," if len(items_first_batch) == 1 else "",
+            )
+        return text
+
+    def _repr_param_list(self, params: _AnySingleExecuteParams) -> str:
+        trunc = self.trunc
+        (
+            items_first_batch,
+            items_second_batch,
+            trunclen,
+        ) = self._get_batches(params)
+
+        if items_second_batch:
+            text = "[%s" % (
+                ", ".join(trunc(value) for value in items_first_batch)
+            )
+            text += f" ... {trunclen} parameters truncated ... "
+            text += "%s]" % (
+                ", ".join(trunc(value) for value in items_second_batch)
+            )
+        else:
+            text = "[%s]" % (
+                ", ".join(trunc(value) for value in items_first_batch)
+            )
+        return text
+
+
+def adapt_criterion_to_null(crit: _CE, nulls: Collection[Any]) -> _CE:
+    """given criterion containing bind params, convert selected elements
+    to IS NULL.
+
+    """
+
+    def visit_binary(binary):
+        if (
+            isinstance(binary.left, BindParameter)
+            and binary.left._identifying_key in nulls
+        ):
+            # reverse order if the NULL is on the left side
+            binary.left = binary.right
+            binary.right = Null()
+            binary.operator = operators.is_
+            binary.negate = operators.is_not
+        elif (
+            isinstance(binary.right, BindParameter)
+            and binary.right._identifying_key in nulls
+        ):
+            binary.right = Null()
+            binary.operator = operators.is_
+            binary.negate = operators.is_not
+
+    return visitors.cloned_traverse(crit, {}, {"binary": visit_binary})
+
+
+def splice_joins(
+    left: Optional[FromClause],
+    right: Optional[FromClause],
+    stop_on: Optional[FromClause] = None,
+) -> Optional[FromClause]:
+    if left is None:
+        return right
+
+    stack: List[Tuple[Optional[FromClause], Optional[Join]]] = [(right, None)]
+
+    adapter = ClauseAdapter(left)
+    ret = None
+    while stack:
+        (right, prevright) = stack.pop()
+        if isinstance(right, Join) and right is not stop_on:
+            right = right._clone()
+            right.onclause = adapter.traverse(right.onclause)
+            stack.append((right.left, right))
+        else:
+            right = adapter.traverse(right)
+        if prevright is not None:
+            assert right is not None
+            prevright.left = right
+        if ret is None:
+            ret = right
+
+    return ret
+
+
+@overload
+def reduce_columns(
+    columns: Iterable[ColumnElement[Any]],
+    *clauses: Optional[ClauseElement],
+    **kw: bool,
+) -> Sequence[ColumnElement[Any]]: ...
+
+
+@overload
+def reduce_columns(
+    columns: _SelectIterable,
+    *clauses: Optional[ClauseElement],
+    **kw: bool,
+) -> Sequence[Union[ColumnElement[Any], TextClause]]: ...
+
+
+def reduce_columns(
+    columns: _SelectIterable,
+    *clauses: Optional[ClauseElement],
+    **kw: bool,
+) -> Collection[Union[ColumnElement[Any], TextClause]]:
+    r"""given a list of columns, return a 'reduced' set based on natural
+    equivalents.
+
+    the set is reduced to the smallest list of columns which have no natural
+    equivalent present in the list.  A "natural equivalent" means that two
+    columns will ultimately represent the same value because they are related
+    by a foreign key.
+
+    \*clauses is an optional list of join clauses which will be traversed
+    to further identify columns that are "equivalent".
+
+    \**kw may specify 'ignore_nonexistent_tables' to ignore foreign keys
+    whose tables are not yet configured, or columns that aren't yet present.
+
+    This function is primarily used to determine the most minimal "primary
+    key" from a selectable, by reducing the set of primary key columns present
+    in the selectable to just those that are not repeated.
+
+    """
+    ignore_nonexistent_tables = kw.pop("ignore_nonexistent_tables", False)
+    only_synonyms = kw.pop("only_synonyms", False)
+
+    column_set = util.OrderedSet(columns)
+    cset_no_text: util.OrderedSet[ColumnElement[Any]] = column_set.difference(
+        c for c in column_set if is_text_clause(c)  # type: ignore
+    )
+
+    omit = util.column_set()
+    for col in cset_no_text:
+        for fk in chain(*[c.foreign_keys for c in col.proxy_set]):
+            for c in cset_no_text:
+                if c is col:
+                    continue
+                try:
+                    fk_col = fk.column
+                except exc.NoReferencedColumnError:
+                    # TODO: add specific coverage here
+                    # to test/sql/test_selectable ReduceTest
+                    if ignore_nonexistent_tables:
+                        continue
+                    else:
+                        raise
+                except exc.NoReferencedTableError:
+                    # TODO: add specific coverage here
+                    # to test/sql/test_selectable ReduceTest
+                    if ignore_nonexistent_tables:
+                        continue
+                    else:
+                        raise
+                if fk_col.shares_lineage(c) and (
+                    not only_synonyms or c.name == col.name
+                ):
+                    omit.add(col)
+                    break
+
+    if clauses:
+
+        def visit_binary(binary):
+            if binary.operator == operators.eq:
+                cols = util.column_set(
+                    chain(
+                        *[c.proxy_set for c in cset_no_text.difference(omit)]
+                    )
+                )
+                if binary.left in cols and binary.right in cols:
+                    for c in reversed(cset_no_text):
+                        if c.shares_lineage(binary.right) and (
+                            not only_synonyms or c.name == binary.left.name
+                        ):
+                            omit.add(c)
+                            break
+
+        for clause in clauses:
+            if clause is not None:
+                visitors.traverse(clause, {}, {"binary": visit_binary})
+
+    return column_set.difference(omit)
+
+
+def criterion_as_pairs(
+    expression,
+    consider_as_foreign_keys=None,
+    consider_as_referenced_keys=None,
+    any_operator=False,
+):
+    """traverse an expression and locate binary criterion pairs."""
+
+    if consider_as_foreign_keys and consider_as_referenced_keys:
+        raise exc.ArgumentError(
+            "Can only specify one of "
+            "'consider_as_foreign_keys' or "
+            "'consider_as_referenced_keys'"
+        )
+
+    def col_is(a, b):
+        # return a is b
+        return a.compare(b)
+
+    def visit_binary(binary):
+        if not any_operator and binary.operator is not operators.eq:
+            return
+        if not isinstance(binary.left, ColumnElement) or not isinstance(
+            binary.right, ColumnElement
+        ):
+            return
+
+        if consider_as_foreign_keys:
+            if binary.left in consider_as_foreign_keys and (
+                col_is(binary.right, binary.left)
+                or binary.right not in consider_as_foreign_keys
+            ):
+                pairs.append((binary.right, binary.left))
+            elif binary.right in consider_as_foreign_keys and (
+                col_is(binary.left, binary.right)
+                or binary.left not in consider_as_foreign_keys
+            ):
+                pairs.append((binary.left, binary.right))
+        elif consider_as_referenced_keys:
+            if binary.left in consider_as_referenced_keys and (
+                col_is(binary.right, binary.left)
+                or binary.right not in consider_as_referenced_keys
+            ):
+                pairs.append((binary.left, binary.right))
+            elif binary.right in consider_as_referenced_keys and (
+                col_is(binary.left, binary.right)
+                or binary.left not in consider_as_referenced_keys
+            ):
+                pairs.append((binary.right, binary.left))
+        else:
+            if isinstance(binary.left, Column) and isinstance(
+                binary.right, Column
+            ):
+                if binary.left.references(binary.right):
+                    pairs.append((binary.right, binary.left))
+                elif binary.right.references(binary.left):
+                    pairs.append((binary.left, binary.right))
+
+    pairs: List[Tuple[ColumnElement[Any], ColumnElement[Any]]] = []
+    visitors.traverse(expression, {}, {"binary": visit_binary})
+    return pairs
+
+
+class ClauseAdapter(visitors.ReplacingExternalTraversal):
+    """Clones and modifies clauses based on column correspondence.
+
+    E.g.::
+
+      table1 = Table(
+          "sometable",
+          metadata,
+          Column("col1", Integer),
+          Column("col2", Integer),
+      )
+      table2 = Table(
+          "someothertable",
+          metadata,
+          Column("col1", Integer),
+          Column("col2", Integer),
+      )
+
+      condition = table1.c.col1 == table2.c.col1
+
+    make an alias of table1::
+
+      s = table1.alias("foo")
+
+    calling ``ClauseAdapter(s).traverse(condition)`` converts
+    condition to read::
+
+      s.c.col1 == table2.c.col1
+
+    """
+
+    __slots__ = (
+        "__traverse_options__",
+        "selectable",
+        "include_fn",
+        "exclude_fn",
+        "equivalents",
+        "adapt_on_names",
+        "adapt_from_selectables",
+    )
+
+    def __init__(
+        self,
+        selectable: Selectable,
+        equivalents: Optional[_EquivalentColumnMap] = None,
+        include_fn: Optional[Callable[[ClauseElement], bool]] = None,
+        exclude_fn: Optional[Callable[[ClauseElement], bool]] = None,
+        adapt_on_names: bool = False,
+        anonymize_labels: bool = False,
+        adapt_from_selectables: Optional[AbstractSet[FromClause]] = None,
+    ):
+        self.__traverse_options__ = {
+            "stop_on": [selectable],
+            "anonymize_labels": anonymize_labels,
+        }
+        self.selectable = selectable
+        self.include_fn = include_fn
+        self.exclude_fn = exclude_fn
+        self.equivalents = util.column_dict(equivalents or {})
+        self.adapt_on_names = adapt_on_names
+        self.adapt_from_selectables = adapt_from_selectables
+
+    if TYPE_CHECKING:
+
+        @overload
+        def traverse(self, obj: Literal[None]) -> None: ...
+
+        # note this specializes the ReplacingExternalTraversal.traverse()
+        # method to state
+        # that we will return the same kind of ExternalTraversal object as
+        # we were given.  This is probably not 100% true, such as it's
+        # possible for us to swap out Alias for Table at the top level.
+        # Ideally there could be overloads specific to ColumnElement and
+        # FromClause but Mypy is not accepting those as compatible with
+        # the base ReplacingExternalTraversal
+        @overload
+        def traverse(self, obj: _ET) -> _ET: ...
+
+        def traverse(
+            self, obj: Optional[ExternallyTraversible]
+        ) -> Optional[ExternallyTraversible]: ...
+
+    def _corresponding_column(
+        self, col, require_embedded, _seen=util.EMPTY_SET
+    ):
+        newcol = self.selectable.corresponding_column(
+            col, require_embedded=require_embedded
+        )
+        if newcol is None and col in self.equivalents and col not in _seen:
+            for equiv in self.equivalents[col]:
+                newcol = self._corresponding_column(
+                    equiv,
+                    require_embedded=require_embedded,
+                    _seen=_seen.union([col]),
+                )
+                if newcol is not None:
+                    return newcol
+
+        if (
+            self.adapt_on_names
+            and newcol is None
+            and isinstance(col, NamedColumn)
+        ):
+            newcol = self.selectable.exported_columns.get(col.name)
+        return newcol
+
+    @util.preload_module("sqlalchemy.sql.functions")
+    def replace(
+        self, col: _ET, _include_singleton_constants: bool = False
+    ) -> Optional[_ET]:
+        functions = util.preloaded.sql_functions
+
+        # TODO: cython candidate
+
+        if self.include_fn and not self.include_fn(col):  # type: ignore
+            return None
+        elif self.exclude_fn and self.exclude_fn(col):  # type: ignore
+            return None
+
+        if isinstance(col, FromClause) and not isinstance(
+            col, functions.FunctionElement
+        ):
+            if self.selectable.is_derived_from(col):
+                if self.adapt_from_selectables:
+                    for adp in self.adapt_from_selectables:
+                        if adp.is_derived_from(col):
+                            break
+                    else:
+                        return None
+                return self.selectable  # type: ignore
+            elif isinstance(col, Alias) and isinstance(
+                col.element, TableClause
+            ):
+                # we are a SELECT statement and not derived from an alias of a
+                # table (which nonetheless may be a table our SELECT derives
+                # from), so return the alias to prevent further traversal
+                # or
+                # we are an alias of a table and we are not derived from an
+                # alias of a table (which nonetheless may be the same table
+                # as ours) so, same thing
+                return col  # type: ignore
+            else:
+                # other cases where we are a selectable and the element
+                # is another join or selectable that contains a table which our
+                # selectable derives from, that we want to process
+                return None
+
+        elif not isinstance(col, ColumnElement):
+            return None
+        elif not _include_singleton_constants and col._is_singleton_constant:
+            # dont swap out NULL, TRUE, FALSE for a label name
+            # in a SQL statement that's being rewritten,
+            # leave them as the constant.  This is first noted in #6259,
+            # however the logic to check this moved here as of #7154 so that
+            # it is made specific to SQL rewriting and not all column
+            # correspondence
+
+            return None
+
+        if "adapt_column" in col._annotations:
+            col = col._annotations["adapt_column"]
+
+        if TYPE_CHECKING:
+            assert isinstance(col, KeyedColumnElement)
+
+        if self.adapt_from_selectables and col not in self.equivalents:
+            for adp in self.adapt_from_selectables:
+                if adp.c.corresponding_column(col, False) is not None:
+                    break
+            else:
+                return None
+
+        if TYPE_CHECKING:
+            assert isinstance(col, KeyedColumnElement)
+
+        return self._corresponding_column(  # type: ignore
+            col, require_embedded=True
+        )
+
+
+class _ColumnLookup(Protocol):
+    @overload
+    def __getitem__(self, key: None) -> None: ...
+
+    @overload
+    def __getitem__(self, key: ColumnClause[Any]) -> ColumnClause[Any]: ...
+
+    @overload
+    def __getitem__(self, key: ColumnElement[Any]) -> ColumnElement[Any]: ...
+
+    @overload
+    def __getitem__(self, key: _ET) -> _ET: ...
+
+    def __getitem__(self, key: Any) -> Any: ...
+
+
+class ColumnAdapter(ClauseAdapter):
+    """Extends ClauseAdapter with extra utility functions.
+
+    Key aspects of ColumnAdapter include:
+
+    * Expressions that are adapted are stored in a persistent
+      .columns collection; so that an expression E adapted into
+      an expression E1, will return the same object E1 when adapted
+      a second time.   This is important in particular for things like
+      Label objects that are anonymized, so that the ColumnAdapter can
+      be used to present a consistent "adapted" view of things.
+
+    * Exclusion of items from the persistent collection based on
+      include/exclude rules, but also independent of hash identity.
+      This because "annotated" items all have the same hash identity as their
+      parent.
+
+    * "wrapping" capability is added, so that the replacement of an expression
+      E can proceed through a series of adapters.  This differs from the
+      visitor's "chaining" feature in that the resulting object is passed
+      through all replacing functions unconditionally, rather than stopping
+      at the first one that returns non-None.
+
+    * An adapt_required option, used by eager loading to indicate that
+      We don't trust a result row column that is not translated.
+      This is to prevent a column from being interpreted as that
+      of the child row in a self-referential scenario, see
+      inheritance/test_basic.py->EagerTargetingTest.test_adapt_stringency
+
+    """
+
+    __slots__ = (
+        "columns",
+        "adapt_required",
+        "allow_label_resolve",
+        "_wrap",
+        "__weakref__",
+    )
+
+    columns: _ColumnLookup
+
+    def __init__(
+        self,
+        selectable: Selectable,
+        equivalents: Optional[_EquivalentColumnMap] = None,
+        adapt_required: bool = False,
+        include_fn: Optional[Callable[[ClauseElement], bool]] = None,
+        exclude_fn: Optional[Callable[[ClauseElement], bool]] = None,
+        adapt_on_names: bool = False,
+        allow_label_resolve: bool = True,
+        anonymize_labels: bool = False,
+        adapt_from_selectables: Optional[AbstractSet[FromClause]] = None,
+    ):
+        super().__init__(
+            selectable,
+            equivalents,
+            include_fn=include_fn,
+            exclude_fn=exclude_fn,
+            adapt_on_names=adapt_on_names,
+            anonymize_labels=anonymize_labels,
+            adapt_from_selectables=adapt_from_selectables,
+        )
+
+        self.columns = util.WeakPopulateDict(self._locate_col)  # type: ignore
+        if self.include_fn or self.exclude_fn:
+            self.columns = self._IncludeExcludeMapping(self, self.columns)
+        self.adapt_required = adapt_required
+        self.allow_label_resolve = allow_label_resolve
+        self._wrap = None
+
+    class _IncludeExcludeMapping:
+        def __init__(self, parent, columns):
+            self.parent = parent
+            self.columns = columns
+
+        def __getitem__(self, key):
+            if (
+                self.parent.include_fn and not self.parent.include_fn(key)
+            ) or (self.parent.exclude_fn and self.parent.exclude_fn(key)):
+                if self.parent._wrap:
+                    return self.parent._wrap.columns[key]
+                else:
+                    return key
+            return self.columns[key]
+
+    def wrap(self, adapter):
+        ac = copy.copy(self)
+        ac._wrap = adapter
+        ac.columns = util.WeakPopulateDict(ac._locate_col)  # type: ignore
+        if ac.include_fn or ac.exclude_fn:
+            ac.columns = self._IncludeExcludeMapping(ac, ac.columns)
+
+        return ac
+
+    @overload
+    def traverse(self, obj: Literal[None]) -> None: ...
+
+    @overload
+    def traverse(self, obj: _ET) -> _ET: ...
+
+    def traverse(
+        self, obj: Optional[ExternallyTraversible]
+    ) -> Optional[ExternallyTraversible]:
+        return self.columns[obj]
+
+    def chain(self, visitor: ExternalTraversal) -> ColumnAdapter:
+        assert isinstance(visitor, ColumnAdapter)
+
+        return super().chain(visitor)
+
+    if TYPE_CHECKING:
+
+        @property
+        def visitor_iterator(self) -> Iterator[ColumnAdapter]: ...
+
+    adapt_clause = traverse
+    adapt_list = ClauseAdapter.copy_and_process
+
+    def adapt_check_present(
+        self, col: ColumnElement[Any]
+    ) -> Optional[ColumnElement[Any]]:
+        newcol = self.columns[col]
+
+        if newcol is col and self._corresponding_column(col, True) is None:
+            return None
+
+        return newcol
+
+    def _locate_col(
+        self, col: ColumnElement[Any]
+    ) -> Optional[ColumnElement[Any]]:
+        # both replace and traverse() are overly complicated for what
+        # we are doing here and we would do better to have an inlined
+        # version that doesn't build up as much overhead.  the issue is that
+        # sometimes the lookup does in fact have to adapt the insides of
+        # say a labeled scalar subquery.   However, if the object is an
+        # Immutable, i.e. Column objects, we can skip the "clone" /
+        # "copy internals" part since those will be no-ops in any case.
+        # additionally we want to catch singleton objects null/true/false
+        # and make sure they are adapted as well here.
+
+        if col._is_immutable:
+            for vis in self.visitor_iterator:
+                c = vis.replace(col, _include_singleton_constants=True)
+                if c is not None:
+                    break
+            else:
+                c = col
+        else:
+            c = ClauseAdapter.traverse(self, col)
+
+        if self._wrap:
+            c2 = self._wrap._locate_col(c)
+            if c2 is not None:
+                c = c2
+
+        if self.adapt_required and c is col:
+            return None
+
+        # allow_label_resolve is consumed by one case for joined eager loading
+        # as part of its logic to prevent its own columns from being affected
+        # by .order_by().  Before full typing were applied to the ORM, this
+        # logic would set this attribute on the incoming object (which is
+        # typically a column, but we have a test for it being a non-column
+        # object) if no column were found.  While this seemed to
+        # have no negative effects, this adjustment should only occur on the
+        # new column which is assumed to be local to an adapted selectable.
+        if c is not col:
+            c._allow_label_resolve = self.allow_label_resolve
+
+        return c
+
+
+def _offset_or_limit_clause(
+    element: _LimitOffsetType,
+    name: Optional[str] = None,
+    type_: Optional[_TypeEngineArgument[int]] = None,
+) -> ColumnElement[int]:
+    """Convert the given value to an "offset or limit" clause.
+
+    This handles incoming integers and converts to an expression; if
+    an expression is already given, it is passed through.
+
+    """
+    return coercions.expect(
+        roles.LimitOffsetRole, element, name=name, type_=type_
+    )
+
+
+def _offset_or_limit_clause_asint_if_possible(
+    clause: _LimitOffsetType,
+) -> _LimitOffsetType:
+    """Return the offset or limit clause as a simple integer if possible,
+    else return the clause.
+
+    """
+    if clause is None:
+        return None
+    if hasattr(clause, "_limit_offset_value"):
+        value = clause._limit_offset_value
+        return util.asint(value)
+    else:
+        return clause
+
+
+def _make_slice(
+    limit_clause: _LimitOffsetType,
+    offset_clause: _LimitOffsetType,
+    start: int,
+    stop: int,
+) -> Tuple[Optional[ColumnElement[int]], Optional[ColumnElement[int]]]:
+    """Compute LIMIT/OFFSET in terms of slice start/end"""
+
+    # for calculated limit/offset, try to do the addition of
+    # values to offset in Python, however if a SQL clause is present
+    # then the addition has to be on the SQL side.
+
+    # TODO: typing is finding a few gaps in here, see if they can be
+    # closed up
+
+    if start is not None and stop is not None:
+        offset_clause = _offset_or_limit_clause_asint_if_possible(
+            offset_clause
+        )
+        if offset_clause is None:
+            offset_clause = 0
+
+        if start != 0:
+            offset_clause = offset_clause + start  # type: ignore
+
+        if offset_clause == 0:
+            offset_clause = None
+        else:
+            assert offset_clause is not None
+            offset_clause = _offset_or_limit_clause(offset_clause)
+
+        limit_clause = _offset_or_limit_clause(stop - start)
+
+    elif start is None and stop is not None:
+        limit_clause = _offset_or_limit_clause(stop)
+    elif start is not None and stop is None:
+        offset_clause = _offset_or_limit_clause_asint_if_possible(
+            offset_clause
+        )
+        if offset_clause is None:
+            offset_clause = 0
+
+        if start != 0:
+            offset_clause = offset_clause + start
+
+        if offset_clause == 0:
+            offset_clause = None
+        else:
+            offset_clause = _offset_or_limit_clause(offset_clause)
+
+    return limit_clause, offset_clause
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/sql/visitors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/visitors.py
new file mode 100644
index 00000000..e758350a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/sql/visitors.py
@@ -0,0 +1,1167 @@
+# sql/visitors.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Visitor/traversal interface and library functions.
+
+
+"""
+
+from __future__ import annotations
+
+from collections import deque
+from enum import Enum
+import itertools
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import ClassVar
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .. import exc
+from .. import util
+from ..util import langhelpers
+from ..util._has_cy import HAS_CYEXTENSION
+from ..util.typing import Literal
+from ..util.typing import Protocol
+from ..util.typing import Self
+
+if TYPE_CHECKING:
+    from .annotation import _AnnotationDict
+    from .elements import ColumnElement
+
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
+    from ._py_util import prefix_anon_map as prefix_anon_map
+    from ._py_util import cache_anon_map as anon_map
+else:
+    from sqlalchemy.cyextension.util import (  # noqa: F401,E501
+        prefix_anon_map as prefix_anon_map,
+    )
+    from sqlalchemy.cyextension.util import (  # noqa: F401,E501
+        cache_anon_map as anon_map,
+    )
+
+
+__all__ = [
+    "iterate",
+    "traverse_using",
+    "traverse",
+    "cloned_traverse",
+    "replacement_traverse",
+    "Visitable",
+    "ExternalTraversal",
+    "InternalTraversal",
+    "anon_map",
+]
+
+
+class _CompilerDispatchType(Protocol):
+    def __call__(_self, self: Visitable, visitor: Any, **kw: Any) -> Any: ...
+
+
+class Visitable:
+    """Base class for visitable objects.
+
+    :class:`.Visitable` is used to implement the SQL compiler dispatch
+    functions.    Other forms of traversal such as for cache key generation
+    are implemented separately using the :class:`.HasTraverseInternals`
+    interface.
+
+    .. versionchanged:: 2.0  The :class:`.Visitable` class was named
+       :class:`.Traversible` in the 1.4 series; the name is changed back
+       to :class:`.Visitable` in 2.0 which is what it was prior to 1.4.
+
+       Both names remain importable in both 1.4 and 2.0 versions.
+
+    """
+
+    __slots__ = ()
+
+    __visit_name__: str
+
+    _original_compiler_dispatch: _CompilerDispatchType
+
+    if typing.TYPE_CHECKING:
+
+        def _compiler_dispatch(self, visitor: Any, **kw: Any) -> str: ...
+
+    def __init_subclass__(cls) -> None:
+        if "__visit_name__" in cls.__dict__:
+            cls._generate_compiler_dispatch()
+        super().__init_subclass__()
+
+    @classmethod
+    def _generate_compiler_dispatch(cls) -> None:
+        visit_name = cls.__visit_name__
+
+        if "_compiler_dispatch" in cls.__dict__:
+            # class has a fixed _compiler_dispatch() method.
+            # copy it to "original" so that we can get it back if
+            # sqlalchemy.ext.compiles overrides it.
+            cls._original_compiler_dispatch = cls._compiler_dispatch
+            return
+
+        if not isinstance(visit_name, str):
+            raise exc.InvalidRequestError(
+                f"__visit_name__ on class {cls.__name__} must be a string "
+                "at the class level"
+            )
+
+        name = "visit_%s" % visit_name
+        getter = operator.attrgetter(name)
+
+        def _compiler_dispatch(
+            self: Visitable, visitor: Any, **kw: Any
+        ) -> str:
+            """Look for an attribute named "visit_<visit_name>" on the
+            visitor, and call it with the same kw params.
+
+            """
+            try:
+                meth = getter(visitor)
+            except AttributeError as err:
+                return visitor.visit_unsupported_compilation(self, err, **kw)  # type: ignore  # noqa: E501
+            else:
+                return meth(self, **kw)  # type: ignore  # noqa: E501
+
+        cls._compiler_dispatch = (  # type: ignore
+            cls._original_compiler_dispatch
+        ) = _compiler_dispatch
+
+    def __class_getitem__(cls, key: Any) -> Any:
+        # allow generic classes in py3.9+
+        return cls
+
+
+class InternalTraversal(Enum):
+    r"""Defines visitor symbols used for internal traversal.
+
+    The :class:`.InternalTraversal` class is used in two ways.  One is that
+    it can serve as the superclass for an object that implements the
+    various visit methods of the class.   The other is that the symbols
+    themselves of :class:`.InternalTraversal` are used within
+    the ``_traverse_internals`` collection.   Such as, the :class:`.Case`
+    object defines ``_traverse_internals`` as ::
+
+        class Case(ColumnElement[_T]):
+            _traverse_internals = [
+                ("value", InternalTraversal.dp_clauseelement),
+                ("whens", InternalTraversal.dp_clauseelement_tuples),
+                ("else_", InternalTraversal.dp_clauseelement),
+            ]
+
+    Above, the :class:`.Case` class indicates its internal state as the
+    attributes named ``value``, ``whens``, and ``else_``.    They each
+    link to an :class:`.InternalTraversal` method which indicates the type
+    of datastructure to which each attribute refers.
+
+    Using the ``_traverse_internals`` structure, objects of type
+    :class:`.InternalTraversible` will have the following methods automatically
+    implemented:
+
+    * :meth:`.HasTraverseInternals.get_children`
+
+    * :meth:`.HasTraverseInternals._copy_internals`
+
+    * :meth:`.HasCacheKey._gen_cache_key`
+
+    Subclasses can also implement these methods directly, particularly for the
+    :meth:`.HasTraverseInternals._copy_internals` method, when special steps
+    are needed.
+
+    .. versionadded:: 1.4
+
+    """
+
+    dp_has_cache_key = "HC"
+    """Visit a :class:`.HasCacheKey` object."""
+
+    dp_has_cache_key_list = "HL"
+    """Visit a list of :class:`.HasCacheKey` objects."""
+
+    dp_clauseelement = "CE"
+    """Visit a :class:`_expression.ClauseElement` object."""
+
+    dp_fromclause_canonical_column_collection = "FC"
+    """Visit a :class:`_expression.FromClause` object in the context of the
+    ``columns`` attribute.
+
+    The column collection is "canonical", meaning it is the originally
+    defined location of the :class:`.ColumnClause` objects.   Right now
+    this means that the object being visited is a
+    :class:`_expression.TableClause`
+    or :class:`_schema.Table` object only.
+
+    """
+
+    dp_clauseelement_tuples = "CTS"
+    """Visit a list of tuples which contain :class:`_expression.ClauseElement`
+    objects.
+
+    """
+
+    dp_clauseelement_list = "CL"
+    """Visit a list of :class:`_expression.ClauseElement` objects.
+
+    """
+
+    dp_clauseelement_tuple = "CT"
+    """Visit a tuple of :class:`_expression.ClauseElement` objects.
+
+    """
+
+    dp_executable_options = "EO"
+
+    dp_with_context_options = "WC"
+
+    dp_fromclause_ordered_set = "CO"
+    """Visit an ordered set of :class:`_expression.FromClause` objects. """
+
+    dp_string = "S"
+    """Visit a plain string value.
+
+    Examples include table and column names, bound parameter keys, special
+    keywords such as "UNION", "UNION ALL".
+
+    The string value is considered to be significant for cache key
+    generation.
+
+    """
+
+    dp_string_list = "SL"
+    """Visit a list of strings."""
+
+    dp_anon_name = "AN"
+    """Visit a potentially "anonymized" string value.
+
+    The string value is considered to be significant for cache key
+    generation.
+
+    """
+
+    dp_boolean = "B"
+    """Visit a boolean value.
+
+    The boolean value is considered to be significant for cache key
+    generation.
+
+    """
+
+    dp_operator = "O"
+    """Visit an operator.
+
+    The operator is a function from the :mod:`sqlalchemy.sql.operators`
+    module.
+
+    The operator value is considered to be significant for cache key
+    generation.
+
+    """
+
+    dp_type = "T"
+    """Visit a :class:`.TypeEngine` object
+
+    The type object is considered to be significant for cache key
+    generation.
+
+    """
+
+    dp_plain_dict = "PD"
+    """Visit a dictionary with string keys.
+
+    The keys of the dictionary should be strings, the values should
+    be immutable and hashable.   The dictionary is considered to be
+    significant for cache key generation.
+
+    """
+
+    dp_dialect_options = "DO"
+    """Visit a dialect options structure."""
+
+    dp_string_clauseelement_dict = "CD"
+    """Visit a dictionary of string keys to :class:`_expression.ClauseElement`
+    objects.
+
+    """
+
+    dp_string_multi_dict = "MD"
+    """Visit a dictionary of string keys to values which may either be
+    plain immutable/hashable or :class:`.HasCacheKey` objects.
+
+    """
+
+    dp_annotations_key = "AK"
+    """Visit the _annotations_cache_key element.
+
+    This is a dictionary of additional information about a ClauseElement
+    that modifies its role.  It should be included when comparing or caching
+    objects, however generating this key is relatively expensive.   Visitors
+    should check the "_annotations" dict for non-None first before creating
+    this key.
+
+    """
+
+    dp_plain_obj = "PO"
+    """Visit a plain python object.
+
+    The value should be immutable and hashable, such as an integer.
+    The value is considered to be significant for cache key generation.
+
+    """
+
+    dp_named_ddl_element = "DD"
+    """Visit a simple named DDL element.
+
+    The current object used by this method is the :class:`.Sequence`.
+
+    The object is only considered to be important for cache key generation
+    as far as its name, but not any other aspects of it.
+
+    """
+
+    dp_prefix_sequence = "PS"
+    """Visit the sequence represented by :class:`_expression.HasPrefixes`
+    or :class:`_expression.HasSuffixes`.
+
+    """
+
+    dp_table_hint_list = "TH"
+    """Visit the ``_hints`` collection of a :class:`_expression.Select`
+    object.
+
+    """
+
+    dp_setup_join_tuple = "SJ"
+
+    dp_memoized_select_entities = "ME"
+
+    dp_statement_hint_list = "SH"
+    """Visit the ``_statement_hints`` collection of a
+    :class:`_expression.Select`
+    object.
+
+    """
+
+    dp_unknown_structure = "UK"
+    """Visit an unknown structure.
+
+    """
+
+    dp_dml_ordered_values = "DML_OV"
+    """Visit the values() ordered tuple list of an
+    :class:`_expression.Update` object."""
+
+    dp_dml_values = "DML_V"
+    """Visit the values() dictionary of a :class:`.ValuesBase`
+    (e.g. Insert or Update) object.
+
+    """
+
+    dp_dml_multi_values = "DML_MV"
+    """Visit the values() multi-valued list of dictionaries of an
+    :class:`_expression.Insert` object.
+
+    """
+
+    dp_propagate_attrs = "PA"
+    """Visit the propagate attrs dict.  This hardcodes to the particular
+    elements we care about right now."""
+
+    """Symbols that follow are additional symbols that are useful in
+    caching applications.
+
+    Traversals for :class:`_expression.ClauseElement` objects only need to use
+    those symbols present in :class:`.InternalTraversal`.  However, for
+    additional caching use cases within the ORM, symbols dealing with the
+    :class:`.HasCacheKey` class are added here.
+
+    """
+
+    dp_ignore = "IG"
+    """Specify an object that should be ignored entirely.
+
+    This currently applies function call argument caching where some
+    arguments should not be considered to be part of a cache key.
+
+    """
+
+    dp_inspectable = "IS"
+    """Visit an inspectable object where the return value is a
+    :class:`.HasCacheKey` object."""
+
+    dp_multi = "M"
+    """Visit an object that may be a :class:`.HasCacheKey` or may be a
+    plain hashable object."""
+
+    dp_multi_list = "MT"
+    """Visit a tuple containing elements that may be :class:`.HasCacheKey` or
+    may be a plain hashable object."""
+
+    dp_has_cache_key_tuples = "HT"
+    """Visit a list of tuples which contain :class:`.HasCacheKey`
+    objects.
+
+    """
+
+    dp_inspectable_list = "IL"
+    """Visit a list of inspectable objects which upon inspection are
+    HasCacheKey objects."""
+
+
+_TraverseInternalsType = List[Tuple[str, InternalTraversal]]
+"""a structure that defines how a HasTraverseInternals should be
+traversed.
+
+This structure consists of a list of (attributename, internaltraversal)
+tuples, where the "attributename" refers to the name of an attribute on an
+instance of the HasTraverseInternals object, and "internaltraversal" refers
+to an :class:`.InternalTraversal` enumeration symbol defining what kind
+of data this attribute stores, which indicates to the traverser how it should
+be handled.
+
+"""
+
+
+class HasTraverseInternals:
+    """base for classes that have a "traverse internals" element,
+    which defines all kinds of ways of traversing the elements of an object.
+
+    Compared to :class:`.Visitable`, which relies upon an external visitor to
+    define how the object is travered (i.e. the :class:`.SQLCompiler`), the
+    :class:`.HasTraverseInternals` interface allows classes to define their own
+    traversal, that is, what attributes are accessed and in what order.
+
+    """
+
+    __slots__ = ()
+
+    _traverse_internals: _TraverseInternalsType
+
+    _is_immutable: bool = False
+
+    @util.preload_module("sqlalchemy.sql.traversals")
+    def get_children(
+        self, *, omit_attrs: Tuple[str, ...] = (), **kw: Any
+    ) -> Iterable[HasTraverseInternals]:
+        r"""Return immediate child :class:`.visitors.HasTraverseInternals`
+        elements of this :class:`.visitors.HasTraverseInternals`.
+
+        This is used for visit traversal.
+
+        \**kw may contain flags that change the collection that is
+        returned, for example to return a subset of items in order to
+        cut down on larger traversals, or to return child items from a
+        different context (such as schema-level collections instead of
+        clause-level).
+
+        """
+
+        traversals = util.preloaded.sql_traversals
+
+        try:
+            traverse_internals = self._traverse_internals
+        except AttributeError:
+            # user-defined classes may not have a _traverse_internals
+            return []
+
+        dispatch = traversals._get_children.run_generated_dispatch
+        return itertools.chain.from_iterable(
+            meth(obj, **kw)
+            for attrname, obj, meth in dispatch(
+                self, traverse_internals, "_generated_get_children_traversal"
+            )
+            if attrname not in omit_attrs and obj is not None
+        )
+
+
+class _InternalTraversalDispatchType(Protocol):
+    def __call__(s, self: object, visitor: HasTraversalDispatch) -> Any: ...
+
+
+class HasTraversalDispatch:
+    r"""Define infrastructure for classes that perform internal traversals
+
+    .. versionadded:: 2.0
+
+    """
+
+    __slots__ = ()
+
+    _dispatch_lookup: ClassVar[Dict[Union[InternalTraversal, str], str]] = {}
+
+    def dispatch(self, visit_symbol: InternalTraversal) -> Callable[..., Any]:
+        """Given a method from :class:`.HasTraversalDispatch`, return the
+        corresponding method on a subclass.
+
+        """
+        name = _dispatch_lookup[visit_symbol]
+        return getattr(self, name, None)  # type: ignore
+
+    def run_generated_dispatch(
+        self,
+        target: object,
+        internal_dispatch: _TraverseInternalsType,
+        generate_dispatcher_name: str,
+    ) -> Any:
+        dispatcher: _InternalTraversalDispatchType
+        try:
+            dispatcher = target.__class__.__dict__[generate_dispatcher_name]
+        except KeyError:
+            # traversals.py -> _preconfigure_traversals()
+            # may be used to run these ahead of time, but
+            # is not enabled right now.
+            # this block will generate any remaining dispatchers.
+            dispatcher = self.generate_dispatch(
+                target.__class__, internal_dispatch, generate_dispatcher_name
+            )
+        return dispatcher(target, self)
+
+    def generate_dispatch(
+        self,
+        target_cls: Type[object],
+        internal_dispatch: _TraverseInternalsType,
+        generate_dispatcher_name: str,
+    ) -> _InternalTraversalDispatchType:
+        dispatcher = self._generate_dispatcher(
+            internal_dispatch, generate_dispatcher_name
+        )
+        # assert isinstance(target_cls, type)
+        setattr(target_cls, generate_dispatcher_name, dispatcher)
+        return dispatcher
+
+    def _generate_dispatcher(
+        self, internal_dispatch: _TraverseInternalsType, method_name: str
+    ) -> _InternalTraversalDispatchType:
+        names = []
+        for attrname, visit_sym in internal_dispatch:
+            meth = self.dispatch(visit_sym)
+            if meth is not None:
+                visit_name = _dispatch_lookup[visit_sym]
+                names.append((attrname, visit_name))
+
+        code = (
+            ("    return [\n")
+            + (
+                ", \n".join(
+                    "        (%r, self.%s, visitor.%s)"
+                    % (attrname, attrname, visit_name)
+                    for attrname, visit_name in names
+                )
+            )
+            + ("\n    ]\n")
+        )
+        meth_text = ("def %s(self, visitor):\n" % method_name) + code + "\n"
+        return cast(
+            _InternalTraversalDispatchType,
+            langhelpers._exec_code_in_env(meth_text, {}, method_name),
+        )
+
+
+ExtendedInternalTraversal = InternalTraversal
+
+
+def _generate_traversal_dispatch() -> None:
+    lookup = _dispatch_lookup
+
+    for sym in InternalTraversal:
+        key = sym.name
+        if key.startswith("dp_"):
+            visit_key = key.replace("dp_", "visit_")
+            sym_name = sym.value
+            assert sym_name not in lookup, sym_name
+            lookup[sym] = lookup[sym_name] = visit_key
+
+
+_dispatch_lookup = HasTraversalDispatch._dispatch_lookup
+_generate_traversal_dispatch()
+
+
+class ExternallyTraversible(HasTraverseInternals, Visitable):
+    __slots__ = ()
+
+    _annotations: Mapping[Any, Any] = util.EMPTY_DICT
+
+    if typing.TYPE_CHECKING:
+
+        def _annotate(self, values: _AnnotationDict) -> Self: ...
+
+        def get_children(
+            self, *, omit_attrs: Tuple[str, ...] = (), **kw: Any
+        ) -> Iterable[ExternallyTraversible]: ...
+
+    def _clone(self, **kw: Any) -> Self:
+        """clone this element"""
+        raise NotImplementedError()
+
+    def _copy_internals(
+        self, *, omit_attrs: Tuple[str, ...] = (), **kw: Any
+    ) -> None:
+        """Reassign internal elements to be clones of themselves.
+
+        Called during a copy-and-traverse operation on newly
+        shallow-copied elements to create a deep copy.
+
+        The given clone function should be used, which may be applying
+        additional transformations to the element (i.e. replacement
+        traversal, cloned traversal, annotations).
+
+        """
+        raise NotImplementedError()
+
+
+_ET = TypeVar("_ET", bound=ExternallyTraversible)
+
+_CE = TypeVar("_CE", bound="ColumnElement[Any]")
+
+_TraverseCallableType = Callable[[_ET], None]
+
+
+class _CloneCallableType(Protocol):
+    def __call__(self, element: _ET, **kw: Any) -> _ET: ...
+
+
+class _TraverseTransformCallableType(Protocol[_ET]):
+    def __call__(self, element: _ET, **kw: Any) -> Optional[_ET]: ...
+
+
+_ExtT = TypeVar("_ExtT", bound="ExternalTraversal")
+
+
+class ExternalTraversal(util.MemoizedSlots):
+    """Base class for visitor objects which can traverse externally using
+    the :func:`.visitors.traverse` function.
+
+    Direct usage of the :func:`.visitors.traverse` function is usually
+    preferred.
+
+    """
+
+    __slots__ = ("_visitor_dict", "_next")
+
+    __traverse_options__: Dict[str, Any] = {}
+    _next: Optional[ExternalTraversal]
+
+    def traverse_single(self, obj: Visitable, **kw: Any) -> Any:
+        for v in self.visitor_iterator:
+            meth = getattr(v, "visit_%s" % obj.__visit_name__, None)
+            if meth:
+                return meth(obj, **kw)
+
+    def iterate(
+        self, obj: Optional[ExternallyTraversible]
+    ) -> Iterator[ExternallyTraversible]:
+        """Traverse the given expression structure, returning an iterator
+        of all elements.
+
+        """
+        return iterate(obj, self.__traverse_options__)
+
+    @overload
+    def traverse(self, obj: Literal[None]) -> None: ...
+
+    @overload
+    def traverse(
+        self, obj: ExternallyTraversible
+    ) -> ExternallyTraversible: ...
+
+    def traverse(
+        self, obj: Optional[ExternallyTraversible]
+    ) -> Optional[ExternallyTraversible]:
+        """Traverse and visit the given expression structure."""
+
+        return traverse(obj, self.__traverse_options__, self._visitor_dict)
+
+    def _memoized_attr__visitor_dict(
+        self,
+    ) -> Dict[str, _TraverseCallableType[Any]]:
+        visitors = {}
+
+        for name in dir(self):
+            if name.startswith("visit_"):
+                visitors[name[6:]] = getattr(self, name)
+        return visitors
+
+    @property
+    def visitor_iterator(self) -> Iterator[ExternalTraversal]:
+        """Iterate through this visitor and each 'chained' visitor."""
+
+        v: Optional[ExternalTraversal] = self
+        while v:
+            yield v
+            v = getattr(v, "_next", None)
+
+    def chain(self: _ExtT, visitor: ExternalTraversal) -> _ExtT:
+        """'Chain' an additional ExternalTraversal onto this ExternalTraversal
+
+        The chained visitor will receive all visit events after this one.
+
+        """
+        tail = list(self.visitor_iterator)[-1]
+        tail._next = visitor
+        return self
+
+
+class CloningExternalTraversal(ExternalTraversal):
+    """Base class for visitor objects which can traverse using
+    the :func:`.visitors.cloned_traverse` function.
+
+    Direct usage of the :func:`.visitors.cloned_traverse` function is usually
+    preferred.
+
+
+    """
+
+    __slots__ = ()
+
+    def copy_and_process(
+        self, list_: List[ExternallyTraversible]
+    ) -> List[ExternallyTraversible]:
+        """Apply cloned traversal to the given list of elements, and return
+        the new list.
+
+        """
+        return [self.traverse(x) for x in list_]
+
+    @overload
+    def traverse(self, obj: Literal[None]) -> None: ...
+
+    @overload
+    def traverse(
+        self, obj: ExternallyTraversible
+    ) -> ExternallyTraversible: ...
+
+    def traverse(
+        self, obj: Optional[ExternallyTraversible]
+    ) -> Optional[ExternallyTraversible]:
+        """Traverse and visit the given expression structure."""
+
+        return cloned_traverse(
+            obj, self.__traverse_options__, self._visitor_dict
+        )
+
+
+class ReplacingExternalTraversal(CloningExternalTraversal):
+    """Base class for visitor objects which can traverse using
+    the :func:`.visitors.replacement_traverse` function.
+
+    Direct usage of the :func:`.visitors.replacement_traverse` function is
+    usually preferred.
+
+    """
+
+    __slots__ = ()
+
+    def replace(
+        self, elem: ExternallyTraversible
+    ) -> Optional[ExternallyTraversible]:
+        """Receive pre-copied elements during a cloning traversal.
+
+        If the method returns a new element, the element is used
+        instead of creating a simple copy of the element.  Traversal
+        will halt on the newly returned element if it is re-encountered.
+        """
+        return None
+
+    @overload
+    def traverse(self, obj: Literal[None]) -> None: ...
+
+    @overload
+    def traverse(
+        self, obj: ExternallyTraversible
+    ) -> ExternallyTraversible: ...
+
+    def traverse(
+        self, obj: Optional[ExternallyTraversible]
+    ) -> Optional[ExternallyTraversible]:
+        """Traverse and visit the given expression structure."""
+
+        def replace(
+            element: ExternallyTraversible,
+            **kw: Any,
+        ) -> Optional[ExternallyTraversible]:
+            for v in self.visitor_iterator:
+                e = cast(ReplacingExternalTraversal, v).replace(element)
+                if e is not None:
+                    return e
+
+            return None
+
+        return replacement_traverse(obj, self.__traverse_options__, replace)
+
+
+# backwards compatibility
+Traversible = Visitable
+
+ClauseVisitor = ExternalTraversal
+CloningVisitor = CloningExternalTraversal
+ReplacingCloningVisitor = ReplacingExternalTraversal
+
+
+def iterate(
+    obj: Optional[ExternallyTraversible],
+    opts: Mapping[str, Any] = util.EMPTY_DICT,
+) -> Iterator[ExternallyTraversible]:
+    r"""Traverse the given expression structure, returning an iterator.
+
+    Traversal is configured to be breadth-first.
+
+    The central API feature used by the :func:`.visitors.iterate`
+    function is the
+    :meth:`_expression.ClauseElement.get_children` method of
+    :class:`_expression.ClauseElement` objects.  This method should return all
+    the :class:`_expression.ClauseElement` objects which are associated with a
+    particular :class:`_expression.ClauseElement` object. For example, a
+    :class:`.Case` structure will refer to a series of
+    :class:`_expression.ColumnElement` objects within its "whens" and "else\_"
+    member variables.
+
+    :param obj: :class:`_expression.ClauseElement` structure to be traversed
+
+    :param opts: dictionary of iteration options.   This dictionary is usually
+     empty in modern usage.
+
+    """
+    if obj is None:
+        return
+
+    yield obj
+    children = obj.get_children(**opts)
+
+    if not children:
+        return
+
+    stack = deque([children])
+    while stack:
+        t_iterator = stack.popleft()
+        for t in t_iterator:
+            yield t
+            stack.append(t.get_children(**opts))
+
+
+@overload
+def traverse_using(
+    iterator: Iterable[ExternallyTraversible],
+    obj: Literal[None],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> None: ...
+
+
+@overload
+def traverse_using(
+    iterator: Iterable[ExternallyTraversible],
+    obj: ExternallyTraversible,
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> ExternallyTraversible: ...
+
+
+def traverse_using(
+    iterator: Iterable[ExternallyTraversible],
+    obj: Optional[ExternallyTraversible],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> Optional[ExternallyTraversible]:
+    """Visit the given expression structure using the given iterator of
+    objects.
+
+    :func:`.visitors.traverse_using` is usually called internally as the result
+    of the :func:`.visitors.traverse` function.
+
+    :param iterator: an iterable or sequence which will yield
+     :class:`_expression.ClauseElement`
+     structures; the iterator is assumed to be the
+     product of the :func:`.visitors.iterate` function.
+
+    :param obj: the :class:`_expression.ClauseElement`
+     that was used as the target of the
+     :func:`.iterate` function.
+
+    :param visitors: dictionary of visit functions.  See :func:`.traverse`
+     for details on this dictionary.
+
+    .. seealso::
+
+        :func:`.traverse`
+
+
+    """
+    for target in iterator:
+        meth = visitors.get(target.__visit_name__, None)
+        if meth:
+            meth(target)
+    return obj
+
+
+@overload
+def traverse(
+    obj: Literal[None],
+    opts: Mapping[str, Any],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> None: ...
+
+
+@overload
+def traverse(
+    obj: ExternallyTraversible,
+    opts: Mapping[str, Any],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> ExternallyTraversible: ...
+
+
+def traverse(
+    obj: Optional[ExternallyTraversible],
+    opts: Mapping[str, Any],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> Optional[ExternallyTraversible]:
+    """Traverse and visit the given expression structure using the default
+    iterator.
+
+     e.g.::
+
+        from sqlalchemy.sql import visitors
+
+        stmt = select(some_table).where(some_table.c.foo == "bar")
+
+
+        def visit_bindparam(bind_param):
+            print("found bound value: %s" % bind_param.value)
+
+
+        visitors.traverse(stmt, {}, {"bindparam": visit_bindparam})
+
+    The iteration of objects uses the :func:`.visitors.iterate` function,
+    which does a breadth-first traversal using a stack.
+
+    :param obj: :class:`_expression.ClauseElement` structure to be traversed
+
+    :param opts: dictionary of iteration options.   This dictionary is usually
+     empty in modern usage.
+
+    :param visitors: dictionary of visit functions.   The dictionary should
+     have strings as keys, each of which would correspond to the
+     ``__visit_name__`` of a particular kind of SQL expression object, and
+     callable functions  as values, each of which represents a visitor function
+     for that kind of object.
+
+    """
+    return traverse_using(iterate(obj, opts), obj, visitors)
+
+
+@overload
+def cloned_traverse(
+    obj: Literal[None],
+    opts: Mapping[str, Any],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> None: ...
+
+
+# a bit of controversy here, as the clone of the lead element
+# *could* in theory replace with an entirely different kind of element.
+# however this is really not how cloned_traverse is ever used internally
+# at least.
+@overload
+def cloned_traverse(
+    obj: _ET,
+    opts: Mapping[str, Any],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> _ET: ...
+
+
+def cloned_traverse(
+    obj: Optional[ExternallyTraversible],
+    opts: Mapping[str, Any],
+    visitors: Mapping[str, _TraverseCallableType[Any]],
+) -> Optional[ExternallyTraversible]:
+    """Clone the given expression structure, allowing modifications by
+    visitors for mutable objects.
+
+    Traversal usage is the same as that of :func:`.visitors.traverse`.
+    The visitor functions present in the ``visitors`` dictionary may also
+    modify the internals of the given structure as the traversal proceeds.
+
+    The :func:`.cloned_traverse` function does **not** provide objects that are
+    part of the :class:`.Immutable` interface to the visit methods (this
+    primarily includes :class:`.ColumnClause`, :class:`.Column`,
+    :class:`.TableClause` and :class:`.Table` objects). As this traversal is
+    only intended to allow in-place mutation of objects, :class:`.Immutable`
+    objects are skipped. The :meth:`.Immutable._clone` method is still called
+    on each object to allow for objects to replace themselves with a different
+    object based on a clone of their sub-internals (e.g. a
+    :class:`.ColumnClause` that clones its subquery to return a new
+    :class:`.ColumnClause`).
+
+    .. versionchanged:: 2.0  The :func:`.cloned_traverse` function omits
+       objects that are part of the :class:`.Immutable` interface.
+
+    The central API feature used by the :func:`.visitors.cloned_traverse`
+    and :func:`.visitors.replacement_traverse` functions, in addition to the
+    :meth:`_expression.ClauseElement.get_children`
+    function that is used to achieve
+    the iteration, is the :meth:`_expression.ClauseElement._copy_internals`
+    method.
+    For a :class:`_expression.ClauseElement`
+    structure to support cloning and replacement
+    traversals correctly, it needs to be able to pass a cloning function into
+    its internal members in order to make copies of them.
+
+    .. seealso::
+
+        :func:`.visitors.traverse`
+
+        :func:`.visitors.replacement_traverse`
+
+    """
+
+    cloned: Dict[int, ExternallyTraversible] = {}
+    stop_on = set(opts.get("stop_on", []))
+
+    def deferred_copy_internals(
+        obj: ExternallyTraversible,
+    ) -> ExternallyTraversible:
+        return cloned_traverse(obj, opts, visitors)
+
+    def clone(elem: ExternallyTraversible, **kw: Any) -> ExternallyTraversible:
+        if elem in stop_on:
+            return elem
+        else:
+            if id(elem) not in cloned:
+                if "replace" in kw:
+                    newelem = cast(
+                        Optional[ExternallyTraversible], kw["replace"](elem)
+                    )
+                    if newelem is not None:
+                        cloned[id(elem)] = newelem
+                        return newelem
+
+                # the _clone method for immutable normally returns "self".
+                # however, the method is still allowed to return a
+                # different object altogether; ColumnClause._clone() will
+                # based on options clone the subquery to which it is associated
+                # and return the new corresponding column.
+                cloned[id(elem)] = newelem = elem._clone(clone=clone, **kw)
+                newelem._copy_internals(clone=clone, **kw)
+
+                # however, visit methods which are tasked with in-place
+                # mutation of the object should not get access to the immutable
+                # object.
+                if not elem._is_immutable:
+                    meth = visitors.get(newelem.__visit_name__, None)
+                    if meth:
+                        meth(newelem)
+            return cloned[id(elem)]
+
+    if obj is not None:
+        obj = clone(
+            obj, deferred_copy_internals=deferred_copy_internals, **opts
+        )
+    clone = None  # type: ignore[assignment]  # remove gc cycles
+    return obj
+
+
+@overload
+def replacement_traverse(
+    obj: Literal[None],
+    opts: Mapping[str, Any],
+    replace: _TraverseTransformCallableType[Any],
+) -> None: ...
+
+
+@overload
+def replacement_traverse(
+    obj: _CE,
+    opts: Mapping[str, Any],
+    replace: _TraverseTransformCallableType[Any],
+) -> _CE: ...
+
+
+@overload
+def replacement_traverse(
+    obj: ExternallyTraversible,
+    opts: Mapping[str, Any],
+    replace: _TraverseTransformCallableType[Any],
+) -> ExternallyTraversible: ...
+
+
+def replacement_traverse(
+    obj: Optional[ExternallyTraversible],
+    opts: Mapping[str, Any],
+    replace: _TraverseTransformCallableType[Any],
+) -> Optional[ExternallyTraversible]:
+    """Clone the given expression structure, allowing element
+    replacement by a given replacement function.
+
+    This function is very similar to the :func:`.visitors.cloned_traverse`
+    function, except instead of being passed a dictionary of visitors, all
+    elements are unconditionally passed into the given replace function.
+    The replace function then has the option to return an entirely new object
+    which will replace the one given.  If it returns ``None``, then the object
+    is kept in place.
+
+    The difference in usage between :func:`.visitors.cloned_traverse` and
+    :func:`.visitors.replacement_traverse` is that in the former case, an
+    already-cloned object is passed to the visitor function, and the visitor
+    function can then manipulate the internal state of the object.
+    In the case of the latter, the visitor function should only return an
+    entirely different object, or do nothing.
+
+    The use case for :func:`.visitors.replacement_traverse` is that of
+    replacing a FROM clause inside of a SQL structure with a different one,
+    as is a common use case within the ORM.
+
+    """
+
+    cloned = {}
+    stop_on = {id(x) for x in opts.get("stop_on", [])}
+
+    def deferred_copy_internals(
+        obj: ExternallyTraversible,
+    ) -> ExternallyTraversible:
+        return replacement_traverse(obj, opts, replace)
+
+    def clone(elem: ExternallyTraversible, **kw: Any) -> ExternallyTraversible:
+        if (
+            id(elem) in stop_on
+            or "no_replacement_traverse" in elem._annotations
+        ):
+            return elem
+        else:
+            newelem = replace(elem)
+            if newelem is not None:
+                stop_on.add(id(newelem))
+                return newelem  # type: ignore
+            else:
+                # base "already seen" on id(), not hash, so that we don't
+                # replace an Annotated element with its non-annotated one, and
+                # vice versa
+                id_elem = id(elem)
+                if id_elem not in cloned:
+                    if "replace" in kw:
+                        newelem = kw["replace"](elem)
+                        if newelem is not None:
+                            cloned[id_elem] = newelem
+                            return newelem  # type: ignore
+
+                    cloned[id_elem] = newelem = elem._clone(**kw)
+                    newelem._copy_internals(clone=clone, **kw)
+                return cloned[id_elem]  # type: ignore
+
+    if obj is not None:
+        obj = clone(
+            obj, deferred_copy_internals=deferred_copy_internals, **opts
+        )
+    clone = None  # type: ignore[assignment]  # remove gc cycles
+    return obj