about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/sqlalchemy/engine
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/sqlalchemy/engine
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/sqlalchemy/engine')
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py62
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py136
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py128
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py74
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py3371
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py155
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py878
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py2176
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py2367
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py965
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py3406
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py133
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py61
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py2099
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py2380
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py400
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py19
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py924
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py167
19 files changed, 19901 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py
new file mode 100644
index 00000000..f4205d89
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/__init__.py
@@ -0,0 +1,62 @@
+# engine/__init__.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""SQL connections, SQL execution and high-level DB-API interface.
+
+The engine package defines the basic components used to interface
+DB-API modules with higher-level statement construction,
+connection-management, execution and result contexts.  The primary
+"entry point" class into this package is the Engine and its public
+constructor ``create_engine()``.
+
+"""
+
+from . import events as events
+from . import util as util
+from .base import Connection as Connection
+from .base import Engine as Engine
+from .base import NestedTransaction as NestedTransaction
+from .base import RootTransaction as RootTransaction
+from .base import Transaction as Transaction
+from .base import TwoPhaseTransaction as TwoPhaseTransaction
+from .create import create_engine as create_engine
+from .create import create_pool_from_url as create_pool_from_url
+from .create import engine_from_config as engine_from_config
+from .cursor import CursorResult as CursorResult
+from .cursor import ResultProxy as ResultProxy
+from .interfaces import AdaptedConnection as AdaptedConnection
+from .interfaces import BindTyping as BindTyping
+from .interfaces import Compiled as Compiled
+from .interfaces import Connectable as Connectable
+from .interfaces import ConnectArgsType as ConnectArgsType
+from .interfaces import ConnectionEventsTarget as ConnectionEventsTarget
+from .interfaces import CreateEnginePlugin as CreateEnginePlugin
+from .interfaces import Dialect as Dialect
+from .interfaces import ExceptionContext as ExceptionContext
+from .interfaces import ExecutionContext as ExecutionContext
+from .interfaces import TypeCompiler as TypeCompiler
+from .mock import create_mock_engine as create_mock_engine
+from .reflection import Inspector as Inspector
+from .reflection import ObjectKind as ObjectKind
+from .reflection import ObjectScope as ObjectScope
+from .result import ChunkedIteratorResult as ChunkedIteratorResult
+from .result import FilterResult as FilterResult
+from .result import FrozenResult as FrozenResult
+from .result import IteratorResult as IteratorResult
+from .result import MappingResult as MappingResult
+from .result import MergedResult as MergedResult
+from .result import Result as Result
+from .result import result_tuple as result_tuple
+from .result import ScalarResult as ScalarResult
+from .result import TupleResult as TupleResult
+from .row import BaseRow as BaseRow
+from .row import Row as Row
+from .row import RowMapping as RowMapping
+from .url import make_url as make_url
+from .url import URL as URL
+from .util import connection_memoize as connection_memoize
+from ..sql import ddl as ddl
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py
new file mode 100644
index 00000000..8536d53d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_processors.py
@@ -0,0 +1,136 @@
+# engine/_py_processors.py
+# Copyright (C) 2010-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""defines generic type conversion functions, as used in bind and result
+processors.
+
+They all share one common characteristic: None is passed through unchanged.
+
+"""
+
+from __future__ import annotations
+
+import datetime
+from datetime import date as date_cls
+from datetime import datetime as datetime_cls
+from datetime import time as time_cls
+from decimal import Decimal
+import typing
+from typing import Any
+from typing import Callable
+from typing import Optional
+from typing import Type
+from typing import TypeVar
+from typing import Union
+
+
+_DT = TypeVar(
+    "_DT", bound=Union[datetime.datetime, datetime.time, datetime.date]
+)
+
+
+def str_to_datetime_processor_factory(
+    regexp: typing.Pattern[str], type_: Callable[..., _DT]
+) -> Callable[[Optional[str]], Optional[_DT]]:
+    rmatch = regexp.match
+    # Even on python2.6 datetime.strptime is both slower than this code
+    # and it does not support microseconds.
+    has_named_groups = bool(regexp.groupindex)
+
+    def process(value: Optional[str]) -> Optional[_DT]:
+        if value is None:
+            return None
+        else:
+            try:
+                m = rmatch(value)
+            except TypeError as err:
+                raise ValueError(
+                    "Couldn't parse %s string '%r' "
+                    "- value is not a string." % (type_.__name__, value)
+                ) from err
+
+            if m is None:
+                raise ValueError(
+                    "Couldn't parse %s string: "
+                    "'%s'" % (type_.__name__, value)
+                )
+            if has_named_groups:
+                groups = m.groupdict(0)
+                return type_(
+                    **dict(
+                        list(
+                            zip(
+                                iter(groups.keys()),
+                                list(map(int, iter(groups.values()))),
+                            )
+                        )
+                    )
+                )
+            else:
+                return type_(*list(map(int, m.groups(0))))
+
+    return process
+
+
+def to_decimal_processor_factory(
+    target_class: Type[Decimal], scale: int
+) -> Callable[[Optional[float]], Optional[Decimal]]:
+    fstring = "%%.%df" % scale
+
+    def process(value: Optional[float]) -> Optional[Decimal]:
+        if value is None:
+            return None
+        else:
+            return target_class(fstring % value)
+
+    return process
+
+
+def to_float(value: Optional[Union[int, float]]) -> Optional[float]:
+    if value is None:
+        return None
+    else:
+        return float(value)
+
+
+def to_str(value: Optional[Any]) -> Optional[str]:
+    if value is None:
+        return None
+    else:
+        return str(value)
+
+
+def int_to_boolean(value: Optional[int]) -> Optional[bool]:
+    if value is None:
+        return None
+    else:
+        return bool(value)
+
+
+def str_to_datetime(value: Optional[str]) -> Optional[datetime.datetime]:
+    if value is not None:
+        dt_value = datetime_cls.fromisoformat(value)
+    else:
+        dt_value = None
+    return dt_value
+
+
+def str_to_time(value: Optional[str]) -> Optional[datetime.time]:
+    if value is not None:
+        dt_value = time_cls.fromisoformat(value)
+    else:
+        dt_value = None
+    return dt_value
+
+
+def str_to_date(value: Optional[str]) -> Optional[datetime.date]:
+    if value is not None:
+        dt_value = date_cls.fromisoformat(value)
+    else:
+        dt_value = None
+    return dt_value
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py
new file mode 100644
index 00000000..38c60fcd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_row.py
@@ -0,0 +1,128 @@
+# engine/_py_row.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
+
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import Type
+
+if typing.TYPE_CHECKING:
+    from .result import _KeyType
+    from .result import _ProcessorsType
+    from .result import _RawRowType
+    from .result import _TupleGetterType
+    from .result import ResultMetaData
+
+MD_INDEX = 0  # integer index in cursor.description
+
+
+class BaseRow:
+    __slots__ = ("_parent", "_data", "_key_to_index")
+
+    _parent: ResultMetaData
+    _key_to_index: Mapping[_KeyType, int]
+    _data: _RawRowType
+
+    def __init__(
+        self,
+        parent: ResultMetaData,
+        processors: Optional[_ProcessorsType],
+        key_to_index: Mapping[_KeyType, int],
+        data: _RawRowType,
+    ):
+        """Row objects are constructed by CursorResult objects."""
+        object.__setattr__(self, "_parent", parent)
+
+        object.__setattr__(self, "_key_to_index", key_to_index)
+
+        if processors:
+            object.__setattr__(
+                self,
+                "_data",
+                tuple(
+                    [
+                        proc(value) if proc else value
+                        for proc, value in zip(processors, data)
+                    ]
+                ),
+            )
+        else:
+            object.__setattr__(self, "_data", tuple(data))
+
+    def __reduce__(self) -> Tuple[Callable[..., BaseRow], Tuple[Any, ...]]:
+        return (
+            rowproxy_reconstructor,
+            (self.__class__, self.__getstate__()),
+        )
+
+    def __getstate__(self) -> Dict[str, Any]:
+        return {"_parent": self._parent, "_data": self._data}
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        parent = state["_parent"]
+        object.__setattr__(self, "_parent", parent)
+        object.__setattr__(self, "_data", state["_data"])
+        object.__setattr__(self, "_key_to_index", parent._key_to_index)
+
+    def _values_impl(self) -> List[Any]:
+        return list(self)
+
+    def __iter__(self) -> Iterator[Any]:
+        return iter(self._data)
+
+    def __len__(self) -> int:
+        return len(self._data)
+
+    def __hash__(self) -> int:
+        return hash(self._data)
+
+    def __getitem__(self, key: Any) -> Any:
+        return self._data[key]
+
+    def _get_by_key_impl_mapping(self, key: str) -> Any:
+        try:
+            return self._data[self._key_to_index[key]]
+        except KeyError:
+            pass
+        self._parent._key_not_found(key, False)
+
+    def __getattr__(self, name: str) -> Any:
+        try:
+            return self._data[self._key_to_index[name]]
+        except KeyError:
+            pass
+        self._parent._key_not_found(name, True)
+
+    def _to_tuple_instance(self) -> Tuple[Any, ...]:
+        return self._data
+
+
+# This reconstructor is necessary so that pickles with the Cy extension or
+# without use the same Binary format.
+def rowproxy_reconstructor(
+    cls: Type[BaseRow], state: Dict[str, Any]
+) -> BaseRow:
+    obj = cls.__new__(cls)
+    obj.__setstate__(state)
+    return obj
+
+
+def tuplegetter(*indexes: int) -> _TupleGetterType:
+    if len(indexes) != 1:
+        for i in range(1, len(indexes)):
+            if indexes[i - 1] != indexes[i] - 1:
+                return operator.itemgetter(*indexes)
+    # slice form is faster but returns a list if input is list
+    return operator.itemgetter(slice(indexes[0], indexes[-1] + 1))
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py
new file mode 100644
index 00000000..50badea2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/_py_util.py
@@ -0,0 +1,74 @@
+# engine/_py_util.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+
+from .. import exc
+
+if typing.TYPE_CHECKING:
+    from .interfaces import _CoreAnyExecuteParams
+    from .interfaces import _CoreMultiExecuteParams
+    from .interfaces import _DBAPIAnyExecuteParams
+    from .interfaces import _DBAPIMultiExecuteParams
+
+
+_no_tuple: Tuple[Any, ...] = ()
+
+
+def _distill_params_20(
+    params: Optional[_CoreAnyExecuteParams],
+) -> _CoreMultiExecuteParams:
+    if params is None:
+        return _no_tuple
+    # Assume list is more likely than tuple
+    elif isinstance(params, list) or isinstance(params, tuple):
+        # collections_abc.MutableSequence): # avoid abc.__instancecheck__
+        if params and not isinstance(params[0], (tuple, Mapping)):
+            raise exc.ArgumentError(
+                "List argument must consist only of tuples or dictionaries"
+            )
+
+        return params
+    elif isinstance(params, dict) or isinstance(
+        # only do immutabledict or abc.__instancecheck__ for Mapping after
+        # we've checked for plain dictionaries and would otherwise raise
+        params,
+        Mapping,
+    ):
+        return [params]
+    else:
+        raise exc.ArgumentError("mapping or list expected for parameters")
+
+
+def _distill_raw_params(
+    params: Optional[_DBAPIAnyExecuteParams],
+) -> _DBAPIMultiExecuteParams:
+    if params is None:
+        return _no_tuple
+    elif isinstance(params, list):
+        # collections_abc.MutableSequence): # avoid abc.__instancecheck__
+        if params and not isinstance(params[0], (tuple, Mapping)):
+            raise exc.ArgumentError(
+                "List argument must consist only of tuples or dictionaries"
+            )
+
+        return params
+    elif isinstance(params, (tuple, dict)) or isinstance(
+        # only do abc.__instancecheck__ for Mapping after we've checked
+        # for plain dictionaries and would otherwise raise
+        params,
+        Mapping,
+    ):
+        # cast("Union[List[Mapping[str, Any]], Tuple[Any, ...]]", [params])
+        return [params]  # type: ignore
+    else:
+        raise exc.ArgumentError("mapping or sequence expected for parameters")
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py
new file mode 100644
index 00000000..cbf11acf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/base.py
@@ -0,0 +1,3371 @@
+# engine/base.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`.
+
+"""
+from __future__ import annotations
+
+import contextlib
+import sys
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import Union
+
+from .interfaces import BindTyping
+from .interfaces import ConnectionEventsTarget
+from .interfaces import DBAPICursor
+from .interfaces import ExceptionContext
+from .interfaces import ExecuteStyle
+from .interfaces import ExecutionContext
+from .interfaces import IsolationLevel
+from .util import _distill_params_20
+from .util import _distill_raw_params
+from .util import TransactionalContext
+from .. import exc
+from .. import inspection
+from .. import log
+from .. import util
+from ..sql import compiler
+from ..sql import util as sql_util
+
+if typing.TYPE_CHECKING:
+    from . import CursorResult
+    from . import ScalarResult
+    from .interfaces import _AnyExecuteParams
+    from .interfaces import _AnyMultiExecuteParams
+    from .interfaces import _CoreAnyExecuteParams
+    from .interfaces import _CoreMultiExecuteParams
+    from .interfaces import _CoreSingleExecuteParams
+    from .interfaces import _DBAPIAnyExecuteParams
+    from .interfaces import _DBAPISingleExecuteParams
+    from .interfaces import _ExecuteOptions
+    from .interfaces import CompiledCacheType
+    from .interfaces import CoreExecuteOptionsParameter
+    from .interfaces import Dialect
+    from .interfaces import SchemaTranslateMapType
+    from .reflection import Inspector  # noqa
+    from .url import URL
+    from ..event import dispatcher
+    from ..log import _EchoFlagType
+    from ..pool import _ConnectionFairy
+    from ..pool import Pool
+    from ..pool import PoolProxiedConnection
+    from ..sql import Executable
+    from ..sql._typing import _InfoType
+    from ..sql.compiler import Compiled
+    from ..sql.ddl import ExecutableDDLElement
+    from ..sql.ddl import SchemaDropper
+    from ..sql.ddl import SchemaGenerator
+    from ..sql.functions import FunctionElement
+    from ..sql.schema import DefaultGenerator
+    from ..sql.schema import HasSchemaAttr
+    from ..sql.schema import SchemaItem
+    from ..sql.selectable import TypedReturnsRows
+
+
+_T = TypeVar("_T", bound=Any)
+_EMPTY_EXECUTION_OPTS: _ExecuteOptions = util.EMPTY_DICT
+NO_OPTIONS: Mapping[str, Any] = util.EMPTY_DICT
+
+
+class Connection(ConnectionEventsTarget, inspection.Inspectable["Inspector"]):
+    """Provides high-level functionality for a wrapped DB-API connection.
+
+    The :class:`_engine.Connection` object is procured by calling the
+    :meth:`_engine.Engine.connect` method of the :class:`_engine.Engine`
+    object, and provides services for execution of SQL statements as well
+    as transaction control.
+
+    The Connection object is **not** thread-safe. While a Connection can be
+    shared among threads using properly synchronized access, it is still
+    possible that the underlying DBAPI connection may not support shared
+    access between threads. Check the DBAPI documentation for details.
+
+    The Connection object represents a single DBAPI connection checked out
+    from the connection pool. In this state, the connection pool has no
+    affect upon the connection, including its expiration or timeout state.
+    For the connection pool to properly manage connections, connections
+    should be returned to the connection pool (i.e. ``connection.close()``)
+    whenever the connection is not in use.
+
+    .. index::
+      single: thread safety; Connection
+
+    """
+
+    dialect: Dialect
+    dispatch: dispatcher[ConnectionEventsTarget]
+
+    _sqla_logger_namespace = "sqlalchemy.engine.Connection"
+
+    # used by sqlalchemy.engine.util.TransactionalContext
+    _trans_context_manager: Optional[TransactionalContext] = None
+
+    # legacy as of 2.0, should be eventually deprecated and
+    # removed.  was used in the "pre_ping" recipe that's been in the docs
+    # a long time
+    should_close_with_result = False
+
+    _dbapi_connection: Optional[PoolProxiedConnection]
+
+    _execution_options: _ExecuteOptions
+
+    _transaction: Optional[RootTransaction]
+    _nested_transaction: Optional[NestedTransaction]
+
+    def __init__(
+        self,
+        engine: Engine,
+        connection: Optional[PoolProxiedConnection] = None,
+        _has_events: Optional[bool] = None,
+        _allow_revalidate: bool = True,
+        _allow_autobegin: bool = True,
+    ):
+        """Construct a new Connection."""
+        self.engine = engine
+        self.dialect = dialect = engine.dialect
+
+        if connection is None:
+            try:
+                self._dbapi_connection = engine.raw_connection()
+            except dialect.loaded_dbapi.Error as err:
+                Connection._handle_dbapi_exception_noconnection(
+                    err, dialect, engine
+                )
+                raise
+        else:
+            self._dbapi_connection = connection
+
+        self._transaction = self._nested_transaction = None
+        self.__savepoint_seq = 0
+        self.__in_begin = False
+
+        self.__can_reconnect = _allow_revalidate
+        self._allow_autobegin = _allow_autobegin
+        self._echo = self.engine._should_log_info()
+
+        if _has_events is None:
+            # if _has_events is sent explicitly as False,
+            # then don't join the dispatch of the engine; we don't
+            # want to handle any of the engine's events in that case.
+            self.dispatch = self.dispatch._join(engine.dispatch)
+        self._has_events = _has_events or (
+            _has_events is None and engine._has_events
+        )
+
+        self._execution_options = engine._execution_options
+
+        if self._has_events or self.engine._has_events:
+            self.dispatch.engine_connect(self)
+
+    # this can be assigned differently via
+    # characteristics.LoggingTokenCharacteristic
+    _message_formatter: Any = None
+
+    def _log_info(self, message: str, *arg: Any, **kw: Any) -> None:
+        fmt = self._message_formatter
+
+        if fmt:
+            message = fmt(message)
+
+        if log.STACKLEVEL:
+            kw["stacklevel"] = 1 + log.STACKLEVEL_OFFSET
+
+        self.engine.logger.info(message, *arg, **kw)
+
+    def _log_debug(self, message: str, *arg: Any, **kw: Any) -> None:
+        fmt = self._message_formatter
+
+        if fmt:
+            message = fmt(message)
+
+        if log.STACKLEVEL:
+            kw["stacklevel"] = 1 + log.STACKLEVEL_OFFSET
+
+        self.engine.logger.debug(message, *arg, **kw)
+
+    @property
+    def _schema_translate_map(self) -> Optional[SchemaTranslateMapType]:
+        schema_translate_map: Optional[SchemaTranslateMapType] = (
+            self._execution_options.get("schema_translate_map", None)
+        )
+
+        return schema_translate_map
+
+    def schema_for_object(self, obj: HasSchemaAttr) -> Optional[str]:
+        """Return the schema name for the given schema item taking into
+        account current schema translate map.
+
+        """
+
+        name = obj.schema
+        schema_translate_map: Optional[SchemaTranslateMapType] = (
+            self._execution_options.get("schema_translate_map", None)
+        )
+
+        if (
+            schema_translate_map
+            and name in schema_translate_map
+            and obj._use_schema_map
+        ):
+            return schema_translate_map[name]
+        else:
+            return name
+
+    def __enter__(self) -> Connection:
+        return self
+
+    def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
+        self.close()
+
+    @overload
+    def execution_options(
+        self,
+        *,
+        compiled_cache: Optional[CompiledCacheType] = ...,
+        logging_token: str = ...,
+        isolation_level: IsolationLevel = ...,
+        no_parameters: bool = False,
+        stream_results: bool = False,
+        max_row_buffer: int = ...,
+        yield_per: int = ...,
+        insertmanyvalues_page_size: int = ...,
+        schema_translate_map: Optional[SchemaTranslateMapType] = ...,
+        preserve_rowcount: bool = False,
+        **opt: Any,
+    ) -> Connection: ...
+
+    @overload
+    def execution_options(self, **opt: Any) -> Connection: ...
+
+    def execution_options(self, **opt: Any) -> Connection:
+        r"""Set non-SQL options for the connection which take effect
+        during execution.
+
+        This method modifies this :class:`_engine.Connection` **in-place**;
+        the return value is the same :class:`_engine.Connection` object
+        upon which the method is called.   Note that this is in contrast
+        to the behavior of the ``execution_options`` methods on other
+        objects such as :meth:`_engine.Engine.execution_options` and
+        :meth:`_sql.Executable.execution_options`.  The rationale is that many
+        such execution options necessarily modify the state of the base
+        DBAPI connection in any case so there is no feasible means of
+        keeping the effect of such an option localized to a "sub" connection.
+
+        .. versionchanged:: 2.0  The :meth:`_engine.Connection.execution_options`
+           method, in contrast to other objects with this method, modifies
+           the connection in-place without creating copy of it.
+
+        As discussed elsewhere, the :meth:`_engine.Connection.execution_options`
+        method accepts any arbitrary parameters including user defined names.
+        All parameters given are consumable in a number of ways including
+        by using the :meth:`_engine.Connection.get_execution_options` method.
+        See the examples at :meth:`_sql.Executable.execution_options`
+        and :meth:`_engine.Engine.execution_options`.
+
+        The keywords that are currently recognized by SQLAlchemy itself
+        include all those listed under :meth:`.Executable.execution_options`,
+        as well as others that are specific to :class:`_engine.Connection`.
+
+        :param compiled_cache: Available on: :class:`_engine.Connection`,
+          :class:`_engine.Engine`.
+
+          A dictionary where :class:`.Compiled` objects
+          will be cached when the :class:`_engine.Connection`
+          compiles a clause
+          expression into a :class:`.Compiled` object.  This dictionary will
+          supersede the statement cache that may be configured on the
+          :class:`_engine.Engine` itself.   If set to None, caching
+          is disabled, even if the engine has a configured cache size.
+
+          Note that the ORM makes use of its own "compiled" caches for
+          some operations, including flush operations.  The caching
+          used by the ORM internally supersedes a cache dictionary
+          specified here.
+
+        :param logging_token: Available on: :class:`_engine.Connection`,
+          :class:`_engine.Engine`, :class:`_sql.Executable`.
+
+          Adds the specified string token surrounded by brackets in log
+          messages logged by the connection, i.e. the logging that's enabled
+          either via the :paramref:`_sa.create_engine.echo` flag or via the
+          ``logging.getLogger("sqlalchemy.engine")`` logger. This allows a
+          per-connection or per-sub-engine token to be available which is
+          useful for debugging concurrent connection scenarios.
+
+          .. versionadded:: 1.4.0b2
+
+          .. seealso::
+
+            :ref:`dbengine_logging_tokens` - usage example
+
+            :paramref:`_sa.create_engine.logging_name` - adds a name to the
+            name used by the Python logger object itself.
+
+        :param isolation_level: Available on: :class:`_engine.Connection`,
+          :class:`_engine.Engine`.
+
+          Set the transaction isolation level for the lifespan of this
+          :class:`_engine.Connection` object.
+          Valid values include those string
+          values accepted by the :paramref:`_sa.create_engine.isolation_level`
+          parameter passed to :func:`_sa.create_engine`.  These levels are
+          semi-database specific; see individual dialect documentation for
+          valid levels.
+
+          The isolation level option applies the isolation level by emitting
+          statements on the DBAPI connection, and **necessarily affects the
+          original Connection object overall**. The isolation level will remain
+          at the given setting until explicitly changed, or when the DBAPI
+          connection itself is :term:`released` to the connection pool, i.e. the
+          :meth:`_engine.Connection.close` method is called, at which time an
+          event handler will emit additional statements on the DBAPI connection
+          in order to revert the isolation level change.
+
+          .. note:: The ``isolation_level`` execution option may only be
+             established before the :meth:`_engine.Connection.begin` method is
+             called, as well as before any SQL statements are emitted which
+             would otherwise trigger "autobegin", or directly after a call to
+             :meth:`_engine.Connection.commit` or
+             :meth:`_engine.Connection.rollback`. A database cannot change the
+             isolation level on a transaction in progress.
+
+          .. note:: The ``isolation_level`` execution option is implicitly
+             reset if the :class:`_engine.Connection` is invalidated, e.g. via
+             the :meth:`_engine.Connection.invalidate` method, or if a
+             disconnection error occurs. The new connection produced after the
+             invalidation will **not** have the selected isolation level
+             re-applied to it automatically.
+
+          .. seealso::
+
+                :ref:`dbapi_autocommit`
+
+                :meth:`_engine.Connection.get_isolation_level`
+                - view current actual level
+
+        :param no_parameters: Available on: :class:`_engine.Connection`,
+          :class:`_sql.Executable`.
+
+          When ``True``, if the final parameter
+          list or dictionary is totally empty, will invoke the
+          statement on the cursor as ``cursor.execute(statement)``,
+          not passing the parameter collection at all.
+          Some DBAPIs such as psycopg2 and mysql-python consider
+          percent signs as significant only when parameters are
+          present; this option allows code to generate SQL
+          containing percent signs (and possibly other characters)
+          that is neutral regarding whether it's executed by the DBAPI
+          or piped into a script that's later invoked by
+          command line tools.
+
+        :param stream_results: Available on: :class:`_engine.Connection`,
+          :class:`_sql.Executable`.
+
+          Indicate to the dialect that results should be "streamed" and not
+          pre-buffered, if possible.  For backends such as PostgreSQL, MySQL
+          and MariaDB, this indicates the use of a "server side cursor" as
+          opposed to a client side cursor.  Other backends such as that of
+          Oracle Database may already use server side cursors by default.
+
+          The usage of
+          :paramref:`_engine.Connection.execution_options.stream_results` is
+          usually combined with setting a fixed number of rows to to be fetched
+          in batches, to allow for efficient iteration of database rows while
+          at the same time not loading all result rows into memory at once;
+          this can be configured on a :class:`_engine.Result` object using the
+          :meth:`_engine.Result.yield_per` method, after execution has
+          returned a new :class:`_engine.Result`.   If
+          :meth:`_engine.Result.yield_per` is not used,
+          the :paramref:`_engine.Connection.execution_options.stream_results`
+          mode of operation will instead use a dynamically sized buffer
+          which buffers sets of rows at a time, growing on each batch
+          based on a fixed growth size up until a limit which may
+          be configured using the
+          :paramref:`_engine.Connection.execution_options.max_row_buffer`
+          parameter.
+
+          When using the ORM to fetch ORM mapped objects from a result,
+          :meth:`_engine.Result.yield_per` should always be used with
+          :paramref:`_engine.Connection.execution_options.stream_results`,
+          so that the ORM does not fetch all rows into new ORM objects at once.
+
+          For typical use, the
+          :paramref:`_engine.Connection.execution_options.yield_per` execution
+          option should be preferred, which sets up both
+          :paramref:`_engine.Connection.execution_options.stream_results` and
+          :meth:`_engine.Result.yield_per` at once. This option is supported
+          both at a core level by :class:`_engine.Connection` as well as by the
+          ORM :class:`_engine.Session`; the latter is described at
+          :ref:`orm_queryguide_yield_per`.
+
+          .. seealso::
+
+            :ref:`engine_stream_results` - background on
+            :paramref:`_engine.Connection.execution_options.stream_results`
+
+            :paramref:`_engine.Connection.execution_options.max_row_buffer`
+
+            :paramref:`_engine.Connection.execution_options.yield_per`
+
+            :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+            describing the ORM version of ``yield_per``
+
+        :param max_row_buffer: Available on: :class:`_engine.Connection`,
+          :class:`_sql.Executable`.  Sets a maximum
+          buffer size to use when the
+          :paramref:`_engine.Connection.execution_options.stream_results`
+          execution option is used on a backend that supports server side
+          cursors.  The default value if not specified is 1000.
+
+          .. seealso::
+
+            :paramref:`_engine.Connection.execution_options.stream_results`
+
+            :ref:`engine_stream_results`
+
+
+        :param yield_per: Available on: :class:`_engine.Connection`,
+          :class:`_sql.Executable`.  Integer value applied which will
+          set the :paramref:`_engine.Connection.execution_options.stream_results`
+          execution option and invoke :meth:`_engine.Result.yield_per`
+          automatically at once.  Allows equivalent functionality as
+          is present when using this parameter with the ORM.
+
+          .. versionadded:: 1.4.40
+
+          .. seealso::
+
+            :ref:`engine_stream_results` - background and examples
+            on using server side cursors with Core.
+
+            :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+            describing the ORM version of ``yield_per``
+
+        :param insertmanyvalues_page_size: Available on: :class:`_engine.Connection`,
+            :class:`_engine.Engine`. Number of rows to format into an
+            INSERT statement when the statement uses "insertmanyvalues" mode,
+            which is a paged form of bulk insert that is used for many backends
+            when using :term:`executemany` execution typically in conjunction
+            with RETURNING. Defaults to 1000. May also be modified on a
+            per-engine basis using the
+            :paramref:`_sa.create_engine.insertmanyvalues_page_size` parameter.
+
+            .. versionadded:: 2.0
+
+            .. seealso::
+
+                :ref:`engine_insertmanyvalues`
+
+        :param schema_translate_map: Available on: :class:`_engine.Connection`,
+          :class:`_engine.Engine`, :class:`_sql.Executable`.
+
+          A dictionary mapping schema names to schema names, that will be
+          applied to the :paramref:`_schema.Table.schema` element of each
+          :class:`_schema.Table`
+          encountered when SQL or DDL expression elements
+          are compiled into strings; the resulting schema name will be
+          converted based on presence in the map of the original name.
+
+          .. seealso::
+
+            :ref:`schema_translating`
+
+        :param preserve_rowcount: Boolean; when True, the ``cursor.rowcount``
+          attribute will be unconditionally memoized within the result and
+          made available via the :attr:`.CursorResult.rowcount` attribute.
+          Normally, this attribute is only preserved for UPDATE and DELETE
+          statements.  Using this option, the DBAPIs rowcount value can
+          be accessed for other kinds of statements such as INSERT and SELECT,
+          to the degree that the DBAPI supports these statements.  See
+          :attr:`.CursorResult.rowcount` for notes regarding the behavior
+          of this attribute.
+
+          .. versionadded:: 2.0.28
+
+        .. seealso::
+
+            :meth:`_engine.Engine.execution_options`
+
+            :meth:`.Executable.execution_options`
+
+            :meth:`_engine.Connection.get_execution_options`
+
+            :ref:`orm_queryguide_execution_options` - documentation on all
+            ORM-specific execution options
+
+        """  # noqa
+        if self._has_events or self.engine._has_events:
+            self.dispatch.set_connection_execution_options(self, opt)
+        self._execution_options = self._execution_options.union(opt)
+        self.dialect.set_connection_execution_options(self, opt)
+        return self
+
+    def get_execution_options(self) -> _ExecuteOptions:
+        """Get the non-SQL options which will take effect during execution.
+
+        .. versionadded:: 1.3
+
+        .. seealso::
+
+            :meth:`_engine.Connection.execution_options`
+        """
+        return self._execution_options
+
+    @property
+    def _still_open_and_dbapi_connection_is_valid(self) -> bool:
+        pool_proxied_connection = self._dbapi_connection
+        return (
+            pool_proxied_connection is not None
+            and pool_proxied_connection.is_valid
+        )
+
+    @property
+    def closed(self) -> bool:
+        """Return True if this connection is closed."""
+
+        return self._dbapi_connection is None and not self.__can_reconnect
+
+    @property
+    def invalidated(self) -> bool:
+        """Return True if this connection was invalidated.
+
+        This does not indicate whether or not the connection was
+        invalidated at the pool level, however
+
+        """
+
+        # prior to 1.4, "invalid" was stored as a state independent of
+        # "closed", meaning an invalidated connection could be "closed",
+        # the _dbapi_connection would be None and closed=True, yet the
+        # "invalid" flag would stay True.  This meant that there were
+        # three separate states (open/valid, closed/valid, closed/invalid)
+        # when there is really no reason for that; a connection that's
+        # "closed" does not need to be "invalid".  So the state is now
+        # represented by the two facts alone.
+
+        pool_proxied_connection = self._dbapi_connection
+        return pool_proxied_connection is None and self.__can_reconnect
+
+    @property
+    def connection(self) -> PoolProxiedConnection:
+        """The underlying DB-API connection managed by this Connection.
+
+        This is a SQLAlchemy connection-pool proxied connection
+        which then has the attribute
+        :attr:`_pool._ConnectionFairy.dbapi_connection` that refers to the
+        actual driver connection.
+
+        .. seealso::
+
+
+            :ref:`dbapi_connections`
+
+        """
+
+        if self._dbapi_connection is None:
+            try:
+                return self._revalidate_connection()
+            except (exc.PendingRollbackError, exc.ResourceClosedError):
+                raise
+            except BaseException as e:
+                self._handle_dbapi_exception(e, None, None, None, None)
+        else:
+            return self._dbapi_connection
+
+    def get_isolation_level(self) -> IsolationLevel:
+        """Return the current **actual** isolation level that's present on
+        the database within the scope of this connection.
+
+        This attribute will perform a live SQL operation against the database
+        in order to procure the current isolation level, so the value returned
+        is the actual level on the underlying DBAPI connection regardless of
+        how this state was set. This will be one of the four actual isolation
+        modes ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``,
+        ``SERIALIZABLE``. It will **not** include the ``AUTOCOMMIT`` isolation
+        level setting. Third party dialects may also feature additional
+        isolation level settings.
+
+        .. note::  This method **will not report** on the ``AUTOCOMMIT``
+          isolation level, which is a separate :term:`dbapi` setting that's
+          independent of **actual** isolation level.  When ``AUTOCOMMIT`` is
+          in use, the database connection still has a "traditional" isolation
+          mode in effect, that is typically one of the four values
+          ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``,
+          ``SERIALIZABLE``.
+
+        Compare to the :attr:`_engine.Connection.default_isolation_level`
+        accessor which returns the isolation level that is present on the
+        database at initial connection time.
+
+        .. seealso::
+
+            :attr:`_engine.Connection.default_isolation_level`
+            - view default level
+
+            :paramref:`_sa.create_engine.isolation_level`
+            - set per :class:`_engine.Engine` isolation level
+
+            :paramref:`.Connection.execution_options.isolation_level`
+            - set per :class:`_engine.Connection` isolation level
+
+        """
+        dbapi_connection = self.connection.dbapi_connection
+        assert dbapi_connection is not None
+        try:
+            return self.dialect.get_isolation_level(dbapi_connection)
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+
+    @property
+    def default_isolation_level(self) -> Optional[IsolationLevel]:
+        """The initial-connection time isolation level associated with the
+        :class:`_engine.Dialect` in use.
+
+        This value is independent of the
+        :paramref:`.Connection.execution_options.isolation_level` and
+        :paramref:`.Engine.execution_options.isolation_level` execution
+        options, and is determined by the :class:`_engine.Dialect` when the
+        first connection is created, by performing a SQL query against the
+        database for the current isolation level before any additional commands
+        have been emitted.
+
+        Calling this accessor does not invoke any new SQL queries.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.get_isolation_level`
+            - view current actual isolation level
+
+            :paramref:`_sa.create_engine.isolation_level`
+            - set per :class:`_engine.Engine` isolation level
+
+            :paramref:`.Connection.execution_options.isolation_level`
+            - set per :class:`_engine.Connection` isolation level
+
+        """
+        return self.dialect.default_isolation_level
+
+    def _invalid_transaction(self) -> NoReturn:
+        raise exc.PendingRollbackError(
+            "Can't reconnect until invalid %stransaction is rolled "
+            "back.  Please rollback() fully before proceeding"
+            % ("savepoint " if self._nested_transaction is not None else ""),
+            code="8s2b",
+        )
+
+    def _revalidate_connection(self) -> PoolProxiedConnection:
+        if self.__can_reconnect and self.invalidated:
+            if self._transaction is not None:
+                self._invalid_transaction()
+            self._dbapi_connection = self.engine.raw_connection()
+            return self._dbapi_connection
+        raise exc.ResourceClosedError("This Connection is closed")
+
+    @property
+    def info(self) -> _InfoType:
+        """Info dictionary associated with the underlying DBAPI connection
+        referred to by this :class:`_engine.Connection`, allowing user-defined
+        data to be associated with the connection.
+
+        The data here will follow along with the DBAPI connection including
+        after it is returned to the connection pool and used again
+        in subsequent instances of :class:`_engine.Connection`.
+
+        """
+
+        return self.connection.info
+
+    def invalidate(self, exception: Optional[BaseException] = None) -> None:
+        """Invalidate the underlying DBAPI connection associated with
+        this :class:`_engine.Connection`.
+
+        An attempt will be made to close the underlying DBAPI connection
+        immediately; however if this operation fails, the error is logged
+        but not raised.  The connection is then discarded whether or not
+        close() succeeded.
+
+        Upon the next use (where "use" typically means using the
+        :meth:`_engine.Connection.execute` method or similar),
+        this :class:`_engine.Connection` will attempt to
+        procure a new DBAPI connection using the services of the
+        :class:`_pool.Pool` as a source of connectivity (e.g.
+        a "reconnection").
+
+        If a transaction was in progress (e.g. the
+        :meth:`_engine.Connection.begin` method has been called) when
+        :meth:`_engine.Connection.invalidate` method is called, at the DBAPI
+        level all state associated with this transaction is lost, as
+        the DBAPI connection is closed.  The :class:`_engine.Connection`
+        will not allow a reconnection to proceed until the
+        :class:`.Transaction` object is ended, by calling the
+        :meth:`.Transaction.rollback` method; until that point, any attempt at
+        continuing to use the :class:`_engine.Connection` will raise an
+        :class:`~sqlalchemy.exc.InvalidRequestError`.
+        This is to prevent applications from accidentally
+        continuing an ongoing transactional operations despite the
+        fact that the transaction has been lost due to an
+        invalidation.
+
+        The :meth:`_engine.Connection.invalidate` method,
+        just like auto-invalidation,
+        will at the connection pool level invoke the
+        :meth:`_events.PoolEvents.invalidate` event.
+
+        :param exception: an optional ``Exception`` instance that's the
+         reason for the invalidation.  is passed along to event handlers
+         and logging functions.
+
+        .. seealso::
+
+            :ref:`pool_connection_invalidation`
+
+        """
+
+        if self.invalidated:
+            return
+
+        if self.closed:
+            raise exc.ResourceClosedError("This Connection is closed")
+
+        if self._still_open_and_dbapi_connection_is_valid:
+            pool_proxied_connection = self._dbapi_connection
+            assert pool_proxied_connection is not None
+            pool_proxied_connection.invalidate(exception)
+
+        self._dbapi_connection = None
+
+    def detach(self) -> None:
+        """Detach the underlying DB-API connection from its connection pool.
+
+        E.g.::
+
+            with engine.connect() as conn:
+                conn.detach()
+                conn.execute(text("SET search_path TO schema1, schema2"))
+
+                # work with connection
+
+            # connection is fully closed (since we used "with:", can
+            # also call .close())
+
+        This :class:`_engine.Connection` instance will remain usable.
+        When closed
+        (or exited from a context manager context as above),
+        the DB-API connection will be literally closed and not
+        returned to its originating pool.
+
+        This method can be used to insulate the rest of an application
+        from a modified state on a connection (such as a transaction
+        isolation level or similar).
+
+        """
+
+        if self.closed:
+            raise exc.ResourceClosedError("This Connection is closed")
+
+        pool_proxied_connection = self._dbapi_connection
+        if pool_proxied_connection is None:
+            raise exc.InvalidRequestError(
+                "Can't detach an invalidated Connection"
+            )
+        pool_proxied_connection.detach()
+
+    def _autobegin(self) -> None:
+        if self._allow_autobegin and not self.__in_begin:
+            self.begin()
+
+    def begin(self) -> RootTransaction:
+        """Begin a transaction prior to autobegin occurring.
+
+        E.g.::
+
+            with engine.connect() as conn:
+                with conn.begin() as trans:
+                    conn.execute(table.insert(), {"username": "sandy"})
+
+        The returned object is an instance of :class:`_engine.RootTransaction`.
+        This object represents the "scope" of the transaction,
+        which completes when either the :meth:`_engine.Transaction.rollback`
+        or :meth:`_engine.Transaction.commit` method is called; the object
+        also works as a context manager as illustrated above.
+
+        The :meth:`_engine.Connection.begin` method begins a
+        transaction that normally will be begun in any case when the connection
+        is first used to execute a statement.  The reason this method might be
+        used would be to invoke the :meth:`_events.ConnectionEvents.begin`
+        event at a specific time, or to organize code within the scope of a
+        connection checkout in terms of context managed blocks, such as::
+
+            with engine.connect() as conn:
+                with conn.begin():
+                    conn.execute(...)
+                    conn.execute(...)
+
+                with conn.begin():
+                    conn.execute(...)
+                    conn.execute(...)
+
+        The above code is not  fundamentally any different in its behavior than
+        the following code  which does not use
+        :meth:`_engine.Connection.begin`; the below style is known
+        as "commit as you go" style::
+
+            with engine.connect() as conn:
+                conn.execute(...)
+                conn.execute(...)
+                conn.commit()
+
+                conn.execute(...)
+                conn.execute(...)
+                conn.commit()
+
+        From a database point of view, the :meth:`_engine.Connection.begin`
+        method does not emit any SQL or change the state of the underlying
+        DBAPI connection in any way; the Python DBAPI does not have any
+        concept of explicit transaction begin.
+
+        .. seealso::
+
+            :ref:`tutorial_working_with_transactions` - in the
+            :ref:`unified_tutorial`
+
+            :meth:`_engine.Connection.begin_nested` - use a SAVEPOINT
+
+            :meth:`_engine.Connection.begin_twophase` -
+            use a two phase /XID transaction
+
+            :meth:`_engine.Engine.begin` - context manager available from
+            :class:`_engine.Engine`
+
+        """
+        if self._transaction is None:
+            self._transaction = RootTransaction(self)
+            return self._transaction
+        else:
+            raise exc.InvalidRequestError(
+                "This connection has already initialized a SQLAlchemy "
+                "Transaction() object via begin() or autobegin; can't "
+                "call begin() here unless rollback() or commit() "
+                "is called first."
+            )
+
+    def begin_nested(self) -> NestedTransaction:
+        """Begin a nested transaction (i.e. SAVEPOINT) and return a transaction
+        handle that controls the scope of the SAVEPOINT.
+
+        E.g.::
+
+            with engine.begin() as connection:
+                with connection.begin_nested():
+                    connection.execute(table.insert(), {"username": "sandy"})
+
+        The returned object is an instance of
+        :class:`_engine.NestedTransaction`, which includes transactional
+        methods :meth:`_engine.NestedTransaction.commit` and
+        :meth:`_engine.NestedTransaction.rollback`; for a nested transaction,
+        these methods correspond to the operations "RELEASE SAVEPOINT <name>"
+        and "ROLLBACK TO SAVEPOINT <name>". The name of the savepoint is local
+        to the :class:`_engine.NestedTransaction` object and is generated
+        automatically. Like any other :class:`_engine.Transaction`, the
+        :class:`_engine.NestedTransaction` may be used as a context manager as
+        illustrated above which will "release" or "rollback" corresponding to
+        if the operation within the block were successful or raised an
+        exception.
+
+        Nested transactions require SAVEPOINT support in the underlying
+        database, else the behavior is undefined. SAVEPOINT is commonly used to
+        run operations within a transaction that may fail, while continuing the
+        outer transaction. E.g.::
+
+            from sqlalchemy import exc
+
+            with engine.begin() as connection:
+                trans = connection.begin_nested()
+                try:
+                    connection.execute(table.insert(), {"username": "sandy"})
+                    trans.commit()
+                except exc.IntegrityError:  # catch for duplicate username
+                    trans.rollback()  # rollback to savepoint
+
+                # outer transaction continues
+                connection.execute(...)
+
+        If :meth:`_engine.Connection.begin_nested` is called without first
+        calling :meth:`_engine.Connection.begin` or
+        :meth:`_engine.Engine.begin`, the :class:`_engine.Connection` object
+        will "autobegin" the outer transaction first. This outer transaction
+        may be committed using "commit-as-you-go" style, e.g.::
+
+            with engine.connect() as connection:  # begin() wasn't called
+
+                with connection.begin_nested():  # will auto-"begin()" first
+                    connection.execute(...)
+                # savepoint is released
+
+                connection.execute(...)
+
+                # explicitly commit outer transaction
+                connection.commit()
+
+                # can continue working with connection here
+
+        .. versionchanged:: 2.0
+
+            :meth:`_engine.Connection.begin_nested` will now participate
+            in the connection "autobegin" behavior that is new as of
+            2.0 / "future" style connections in 1.4.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.begin`
+
+            :ref:`session_begin_nested` - ORM support for SAVEPOINT
+
+        """
+        if self._transaction is None:
+            self._autobegin()
+
+        return NestedTransaction(self)
+
+    def begin_twophase(self, xid: Optional[Any] = None) -> TwoPhaseTransaction:
+        """Begin a two-phase or XA transaction and return a transaction
+        handle.
+
+        The returned object is an instance of :class:`.TwoPhaseTransaction`,
+        which in addition to the methods provided by
+        :class:`.Transaction`, also provides a
+        :meth:`~.TwoPhaseTransaction.prepare` method.
+
+        :param xid: the two phase transaction id.  If not supplied, a
+          random id will be generated.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.begin`
+
+            :meth:`_engine.Connection.begin_twophase`
+
+        """
+
+        if self._transaction is not None:
+            raise exc.InvalidRequestError(
+                "Cannot start a two phase transaction when a transaction "
+                "is already in progress."
+            )
+        if xid is None:
+            xid = self.engine.dialect.create_xid()
+        return TwoPhaseTransaction(self, xid)
+
+    def commit(self) -> None:
+        """Commit the transaction that is currently in progress.
+
+        This method commits the current transaction if one has been started.
+        If no transaction was started, the method has no effect, assuming
+        the connection is in a non-invalidated state.
+
+        A transaction is begun on a :class:`_engine.Connection` automatically
+        whenever a statement is first executed, or when the
+        :meth:`_engine.Connection.begin` method is called.
+
+        .. note:: The :meth:`_engine.Connection.commit` method only acts upon
+          the primary database transaction that is linked to the
+          :class:`_engine.Connection` object.  It does not operate upon a
+          SAVEPOINT that would have been invoked from the
+          :meth:`_engine.Connection.begin_nested` method; for control of a
+          SAVEPOINT, call :meth:`_engine.NestedTransaction.commit` on the
+          :class:`_engine.NestedTransaction` that is returned by the
+          :meth:`_engine.Connection.begin_nested` method itself.
+
+
+        """
+        if self._transaction:
+            self._transaction.commit()
+
+    def rollback(self) -> None:
+        """Roll back the transaction that is currently in progress.
+
+        This method rolls back the current transaction if one has been started.
+        If no transaction was started, the method has no effect.  If a
+        transaction was started and the connection is in an invalidated state,
+        the transaction is cleared using this method.
+
+        A transaction is begun on a :class:`_engine.Connection` automatically
+        whenever a statement is first executed, or when the
+        :meth:`_engine.Connection.begin` method is called.
+
+        .. note:: The :meth:`_engine.Connection.rollback` method only acts
+          upon the primary database transaction that is linked to the
+          :class:`_engine.Connection` object.  It does not operate upon a
+          SAVEPOINT that would have been invoked from the
+          :meth:`_engine.Connection.begin_nested` method; for control of a
+          SAVEPOINT, call :meth:`_engine.NestedTransaction.rollback` on the
+          :class:`_engine.NestedTransaction` that is returned by the
+          :meth:`_engine.Connection.begin_nested` method itself.
+
+
+        """
+        if self._transaction:
+            self._transaction.rollback()
+
+    def recover_twophase(self) -> List[Any]:
+        return self.engine.dialect.do_recover_twophase(self)
+
+    def rollback_prepared(self, xid: Any, recover: bool = False) -> None:
+        self.engine.dialect.do_rollback_twophase(self, xid, recover=recover)
+
+    def commit_prepared(self, xid: Any, recover: bool = False) -> None:
+        self.engine.dialect.do_commit_twophase(self, xid, recover=recover)
+
+    def in_transaction(self) -> bool:
+        """Return True if a transaction is in progress."""
+        return self._transaction is not None and self._transaction.is_active
+
+    def in_nested_transaction(self) -> bool:
+        """Return True if a transaction is in progress."""
+        return (
+            self._nested_transaction is not None
+            and self._nested_transaction.is_active
+        )
+
+    def _is_autocommit_isolation(self) -> bool:
+        opt_iso = self._execution_options.get("isolation_level", None)
+        return bool(
+            opt_iso == "AUTOCOMMIT"
+            or (
+                opt_iso is None
+                and self.engine.dialect._on_connect_isolation_level
+                == "AUTOCOMMIT"
+            )
+        )
+
+    def _get_required_transaction(self) -> RootTransaction:
+        trans = self._transaction
+        if trans is None:
+            raise exc.InvalidRequestError("connection is not in a transaction")
+        return trans
+
+    def _get_required_nested_transaction(self) -> NestedTransaction:
+        trans = self._nested_transaction
+        if trans is None:
+            raise exc.InvalidRequestError(
+                "connection is not in a nested transaction"
+            )
+        return trans
+
+    def get_transaction(self) -> Optional[RootTransaction]:
+        """Return the current root transaction in progress, if any.
+
+        .. versionadded:: 1.4
+
+        """
+
+        return self._transaction
+
+    def get_nested_transaction(self) -> Optional[NestedTransaction]:
+        """Return the current nested transaction in progress, if any.
+
+        .. versionadded:: 1.4
+
+        """
+        return self._nested_transaction
+
+    def _begin_impl(self, transaction: RootTransaction) -> None:
+        if self._echo:
+            if self._is_autocommit_isolation():
+                self._log_info(
+                    "BEGIN (implicit; DBAPI should not BEGIN due to "
+                    "autocommit mode)"
+                )
+            else:
+                self._log_info("BEGIN (implicit)")
+
+        self.__in_begin = True
+
+        if self._has_events or self.engine._has_events:
+            self.dispatch.begin(self)
+
+        try:
+            self.engine.dialect.do_begin(self.connection)
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+        finally:
+            self.__in_begin = False
+
+    def _rollback_impl(self) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.rollback(self)
+
+        if self._still_open_and_dbapi_connection_is_valid:
+            if self._echo:
+                if self._is_autocommit_isolation():
+                    self._log_info(
+                        "ROLLBACK using DBAPI connection.rollback(), "
+                        "DBAPI should ignore due to autocommit mode"
+                    )
+                else:
+                    self._log_info("ROLLBACK")
+            try:
+                self.engine.dialect.do_rollback(self.connection)
+            except BaseException as e:
+                self._handle_dbapi_exception(e, None, None, None, None)
+
+    def _commit_impl(self) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.commit(self)
+
+        if self._echo:
+            if self._is_autocommit_isolation():
+                self._log_info(
+                    "COMMIT using DBAPI connection.commit(), "
+                    "DBAPI should ignore due to autocommit mode"
+                )
+            else:
+                self._log_info("COMMIT")
+        try:
+            self.engine.dialect.do_commit(self.connection)
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+
+    def _savepoint_impl(self, name: Optional[str] = None) -> str:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.savepoint(self, name)
+
+        if name is None:
+            self.__savepoint_seq += 1
+            name = "sa_savepoint_%s" % self.__savepoint_seq
+        self.engine.dialect.do_savepoint(self, name)
+        return name
+
+    def _rollback_to_savepoint_impl(self, name: str) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.rollback_savepoint(self, name, None)
+
+        if self._still_open_and_dbapi_connection_is_valid:
+            self.engine.dialect.do_rollback_to_savepoint(self, name)
+
+    def _release_savepoint_impl(self, name: str) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.release_savepoint(self, name, None)
+
+        self.engine.dialect.do_release_savepoint(self, name)
+
+    def _begin_twophase_impl(self, transaction: TwoPhaseTransaction) -> None:
+        if self._echo:
+            self._log_info("BEGIN TWOPHASE (implicit)")
+        if self._has_events or self.engine._has_events:
+            self.dispatch.begin_twophase(self, transaction.xid)
+
+        self.__in_begin = True
+        try:
+            self.engine.dialect.do_begin_twophase(self, transaction.xid)
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+        finally:
+            self.__in_begin = False
+
+    def _prepare_twophase_impl(self, xid: Any) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.prepare_twophase(self, xid)
+
+        assert isinstance(self._transaction, TwoPhaseTransaction)
+        try:
+            self.engine.dialect.do_prepare_twophase(self, xid)
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+
+    def _rollback_twophase_impl(self, xid: Any, is_prepared: bool) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.rollback_twophase(self, xid, is_prepared)
+
+        if self._still_open_and_dbapi_connection_is_valid:
+            assert isinstance(self._transaction, TwoPhaseTransaction)
+            try:
+                self.engine.dialect.do_rollback_twophase(
+                    self, xid, is_prepared
+                )
+            except BaseException as e:
+                self._handle_dbapi_exception(e, None, None, None, None)
+
+    def _commit_twophase_impl(self, xid: Any, is_prepared: bool) -> None:
+        if self._has_events or self.engine._has_events:
+            self.dispatch.commit_twophase(self, xid, is_prepared)
+
+        assert isinstance(self._transaction, TwoPhaseTransaction)
+        try:
+            self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+
+    def close(self) -> None:
+        """Close this :class:`_engine.Connection`.
+
+        This results in a release of the underlying database
+        resources, that is, the DBAPI connection referenced
+        internally. The DBAPI connection is typically restored
+        back to the connection-holding :class:`_pool.Pool` referenced
+        by the :class:`_engine.Engine` that produced this
+        :class:`_engine.Connection`. Any transactional state present on
+        the DBAPI connection is also unconditionally released via
+        the DBAPI connection's ``rollback()`` method, regardless
+        of any :class:`.Transaction` object that may be
+        outstanding with regards to this :class:`_engine.Connection`.
+
+        This has the effect of also calling :meth:`_engine.Connection.rollback`
+        if any transaction is in place.
+
+        After :meth:`_engine.Connection.close` is called, the
+        :class:`_engine.Connection` is permanently in a closed state,
+        and will allow no further operations.
+
+        """
+
+        if self._transaction:
+            self._transaction.close()
+            skip_reset = True
+        else:
+            skip_reset = False
+
+        if self._dbapi_connection is not None:
+            conn = self._dbapi_connection
+
+            # as we just closed the transaction, close the connection
+            # pool connection without doing an additional reset
+            if skip_reset:
+                cast("_ConnectionFairy", conn)._close_special(
+                    transaction_reset=True
+                )
+            else:
+                conn.close()
+
+            # There is a slight chance that conn.close() may have
+            # triggered an invalidation here in which case
+            # _dbapi_connection would already be None, however usually
+            # it will be non-None here and in a "closed" state.
+            self._dbapi_connection = None
+        self.__can_reconnect = False
+
+    @overload
+    def scalar(
+        self,
+        statement: TypedReturnsRows[Tuple[_T]],
+        parameters: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> Optional[_T]: ...
+
+    @overload
+    def scalar(
+        self,
+        statement: Executable,
+        parameters: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> Any: ...
+
+    def scalar(
+        self,
+        statement: Executable,
+        parameters: Optional[_CoreSingleExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> Any:
+        r"""Executes a SQL statement construct and returns a scalar object.
+
+        This method is shorthand for invoking the
+        :meth:`_engine.Result.scalar` method after invoking the
+        :meth:`_engine.Connection.execute` method.  Parameters are equivalent.
+
+        :return: a scalar Python value representing the first column of the
+         first row returned.
+
+        """
+        distilled_parameters = _distill_params_20(parameters)
+        try:
+            meth = statement._execute_on_scalar
+        except AttributeError as err:
+            raise exc.ObjectNotExecutableError(statement) from err
+        else:
+            return meth(
+                self,
+                distilled_parameters,
+                execution_options or NO_OPTIONS,
+            )
+
+    @overload
+    def scalars(
+        self,
+        statement: TypedReturnsRows[Tuple[_T]],
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> ScalarResult[_T]: ...
+
+    @overload
+    def scalars(
+        self,
+        statement: Executable,
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> ScalarResult[Any]: ...
+
+    def scalars(
+        self,
+        statement: Executable,
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> ScalarResult[Any]:
+        """Executes and returns a scalar result set, which yields scalar values
+        from the first column of each row.
+
+        This method is equivalent to calling :meth:`_engine.Connection.execute`
+        to receive a :class:`_result.Result` object, then invoking the
+        :meth:`_result.Result.scalars` method to produce a
+        :class:`_result.ScalarResult` instance.
+
+        :return: a :class:`_result.ScalarResult`
+
+        .. versionadded:: 1.4.24
+
+        """
+
+        return self.execute(
+            statement, parameters, execution_options=execution_options
+        ).scalars()
+
+    @overload
+    def execute(
+        self,
+        statement: TypedReturnsRows[_T],
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> CursorResult[_T]: ...
+
+    @overload
+    def execute(
+        self,
+        statement: Executable,
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> CursorResult[Any]: ...
+
+    def execute(
+        self,
+        statement: Executable,
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        *,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> CursorResult[Any]:
+        r"""Executes a SQL statement construct and returns a
+        :class:`_engine.CursorResult`.
+
+        :param statement: The statement to be executed.  This is always
+         an object that is in both the :class:`_expression.ClauseElement` and
+         :class:`_expression.Executable` hierarchies, including:
+
+         * :class:`_expression.Select`
+         * :class:`_expression.Insert`, :class:`_expression.Update`,
+           :class:`_expression.Delete`
+         * :class:`_expression.TextClause` and
+           :class:`_expression.TextualSelect`
+         * :class:`_schema.DDL` and objects which inherit from
+           :class:`_schema.ExecutableDDLElement`
+
+        :param parameters: parameters which will be bound into the statement.
+         This may be either a dictionary of parameter names to values,
+         or a mutable sequence (e.g. a list) of dictionaries.  When a
+         list of dictionaries is passed, the underlying statement execution
+         will make use of the DBAPI ``cursor.executemany()`` method.
+         When a single dictionary is passed, the DBAPI ``cursor.execute()``
+         method will be used.
+
+        :param execution_options: optional dictionary of execution options,
+         which will be associated with the statement execution.  This
+         dictionary can provide a subset of the options that are accepted
+         by :meth:`_engine.Connection.execution_options`.
+
+        :return: a :class:`_engine.Result` object.
+
+        """
+        distilled_parameters = _distill_params_20(parameters)
+        try:
+            meth = statement._execute_on_connection
+        except AttributeError as err:
+            raise exc.ObjectNotExecutableError(statement) from err
+        else:
+            return meth(
+                self,
+                distilled_parameters,
+                execution_options or NO_OPTIONS,
+            )
+
+    def _execute_function(
+        self,
+        func: FunctionElement[Any],
+        distilled_parameters: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> CursorResult[Any]:
+        """Execute a sql.FunctionElement object."""
+
+        return self._execute_clauseelement(
+            func.select(), distilled_parameters, execution_options
+        )
+
+    def _execute_default(
+        self,
+        default: DefaultGenerator,
+        distilled_parameters: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> Any:
+        """Execute a schema.ColumnDefault object."""
+
+        execution_options = self._execution_options.merge_with(
+            execution_options
+        )
+
+        event_multiparams: Optional[_CoreMultiExecuteParams]
+        event_params: Optional[_CoreAnyExecuteParams]
+
+        # note for event handlers, the "distilled parameters" which is always
+        # a list of dicts is broken out into separate "multiparams" and
+        # "params" collections, which allows the handler to distinguish
+        # between an executemany and execute style set of parameters.
+        if self._has_events or self.engine._has_events:
+            (
+                default,
+                distilled_parameters,
+                event_multiparams,
+                event_params,
+            ) = self._invoke_before_exec_event(
+                default, distilled_parameters, execution_options
+            )
+        else:
+            event_multiparams = event_params = None
+
+        try:
+            conn = self._dbapi_connection
+            if conn is None:
+                conn = self._revalidate_connection()
+
+            dialect = self.dialect
+            ctx = dialect.execution_ctx_cls._init_default(
+                dialect, self, conn, execution_options
+            )
+        except (exc.PendingRollbackError, exc.ResourceClosedError):
+            raise
+        except BaseException as e:
+            self._handle_dbapi_exception(e, None, None, None, None)
+
+        ret = ctx._exec_default(None, default, None)
+
+        if self._has_events or self.engine._has_events:
+            self.dispatch.after_execute(
+                self,
+                default,
+                event_multiparams,
+                event_params,
+                execution_options,
+                ret,
+            )
+
+        return ret
+
+    def _execute_ddl(
+        self,
+        ddl: ExecutableDDLElement,
+        distilled_parameters: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> CursorResult[Any]:
+        """Execute a schema.DDL object."""
+
+        exec_opts = ddl._execution_options.merge_with(
+            self._execution_options, execution_options
+        )
+
+        event_multiparams: Optional[_CoreMultiExecuteParams]
+        event_params: Optional[_CoreSingleExecuteParams]
+
+        if self._has_events or self.engine._has_events:
+            (
+                ddl,
+                distilled_parameters,
+                event_multiparams,
+                event_params,
+            ) = self._invoke_before_exec_event(
+                ddl, distilled_parameters, exec_opts
+            )
+        else:
+            event_multiparams = event_params = None
+
+        schema_translate_map = exec_opts.get("schema_translate_map", None)
+
+        dialect = self.dialect
+
+        compiled = ddl.compile(
+            dialect=dialect, schema_translate_map=schema_translate_map
+        )
+        ret = self._execute_context(
+            dialect,
+            dialect.execution_ctx_cls._init_ddl,
+            compiled,
+            None,
+            exec_opts,
+            compiled,
+        )
+        if self._has_events or self.engine._has_events:
+            self.dispatch.after_execute(
+                self,
+                ddl,
+                event_multiparams,
+                event_params,
+                exec_opts,
+                ret,
+            )
+        return ret
+
+    def _invoke_before_exec_event(
+        self,
+        elem: Any,
+        distilled_params: _CoreMultiExecuteParams,
+        execution_options: _ExecuteOptions,
+    ) -> Tuple[
+        Any,
+        _CoreMultiExecuteParams,
+        _CoreMultiExecuteParams,
+        _CoreSingleExecuteParams,
+    ]:
+        event_multiparams: _CoreMultiExecuteParams
+        event_params: _CoreSingleExecuteParams
+
+        if len(distilled_params) == 1:
+            event_multiparams, event_params = [], distilled_params[0]
+        else:
+            event_multiparams, event_params = distilled_params, {}
+
+        for fn in self.dispatch.before_execute:
+            elem, event_multiparams, event_params = fn(
+                self,
+                elem,
+                event_multiparams,
+                event_params,
+                execution_options,
+            )
+
+        if event_multiparams:
+            distilled_params = list(event_multiparams)
+            if event_params:
+                raise exc.InvalidRequestError(
+                    "Event handler can't return non-empty multiparams "
+                    "and params at the same time"
+                )
+        elif event_params:
+            distilled_params = [event_params]
+        else:
+            distilled_params = []
+
+        return elem, distilled_params, event_multiparams, event_params
+
+    def _execute_clauseelement(
+        self,
+        elem: Executable,
+        distilled_parameters: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter,
+    ) -> CursorResult[Any]:
+        """Execute a sql.ClauseElement object."""
+
+        execution_options = elem._execution_options.merge_with(
+            self._execution_options, execution_options
+        )
+
+        has_events = self._has_events or self.engine._has_events
+        if has_events:
+            (
+                elem,
+                distilled_parameters,
+                event_multiparams,
+                event_params,
+            ) = self._invoke_before_exec_event(
+                elem, distilled_parameters, execution_options
+            )
+
+        if distilled_parameters:
+            # ensure we don't retain a link to the view object for keys()
+            # which links to the values, which we don't want to cache
+            keys = sorted(distilled_parameters[0])
+            for_executemany = len(distilled_parameters) > 1
+        else:
+            keys = []
+            for_executemany = False
+
+        dialect = self.dialect
+
+        schema_translate_map = execution_options.get(
+            "schema_translate_map", None
+        )
+
+        compiled_cache: Optional[CompiledCacheType] = execution_options.get(
+            "compiled_cache", self.engine._compiled_cache
+        )
+
+        compiled_sql, extracted_params, cache_hit = elem._compile_w_cache(
+            dialect=dialect,
+            compiled_cache=compiled_cache,
+            column_keys=keys,
+            for_executemany=for_executemany,
+            schema_translate_map=schema_translate_map,
+            linting=self.dialect.compiler_linting | compiler.WARN_LINTING,
+        )
+        ret = self._execute_context(
+            dialect,
+            dialect.execution_ctx_cls._init_compiled,
+            compiled_sql,
+            distilled_parameters,
+            execution_options,
+            compiled_sql,
+            distilled_parameters,
+            elem,
+            extracted_params,
+            cache_hit=cache_hit,
+        )
+        if has_events:
+            self.dispatch.after_execute(
+                self,
+                elem,
+                event_multiparams,
+                event_params,
+                execution_options,
+                ret,
+            )
+        return ret
+
+    def _execute_compiled(
+        self,
+        compiled: Compiled,
+        distilled_parameters: _CoreMultiExecuteParams,
+        execution_options: CoreExecuteOptionsParameter = _EMPTY_EXECUTION_OPTS,
+    ) -> CursorResult[Any]:
+        """Execute a sql.Compiled object.
+
+        TODO: why do we have this?   likely deprecate or remove
+
+        """
+
+        execution_options = compiled.execution_options.merge_with(
+            self._execution_options, execution_options
+        )
+
+        if self._has_events or self.engine._has_events:
+            (
+                compiled,
+                distilled_parameters,
+                event_multiparams,
+                event_params,
+            ) = self._invoke_before_exec_event(
+                compiled, distilled_parameters, execution_options
+            )
+
+        dialect = self.dialect
+
+        ret = self._execute_context(
+            dialect,
+            dialect.execution_ctx_cls._init_compiled,
+            compiled,
+            distilled_parameters,
+            execution_options,
+            compiled,
+            distilled_parameters,
+            None,
+            None,
+        )
+        if self._has_events or self.engine._has_events:
+            self.dispatch.after_execute(
+                self,
+                compiled,
+                event_multiparams,
+                event_params,
+                execution_options,
+                ret,
+            )
+        return ret
+
+    def exec_driver_sql(
+        self,
+        statement: str,
+        parameters: Optional[_DBAPIAnyExecuteParams] = None,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> CursorResult[Any]:
+        r"""Executes a string SQL statement on the DBAPI cursor directly,
+        without any SQL compilation steps.
+
+        This can be used to pass any string directly to the
+        ``cursor.execute()`` method of the DBAPI in use.
+
+        :param statement: The statement str to be executed.   Bound parameters
+         must use the underlying DBAPI's paramstyle, such as "qmark",
+         "pyformat", "format", etc.
+
+        :param parameters: represent bound parameter values to be used in the
+         execution.  The format is one of:   a dictionary of named parameters,
+         a tuple of positional parameters, or a list containing either
+         dictionaries or tuples for multiple-execute support.
+
+        :return: a :class:`_engine.CursorResult`.
+
+         E.g. multiple dictionaries::
+
+
+             conn.exec_driver_sql(
+                 "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
+                 [{"id": 1, "value": "v1"}, {"id": 2, "value": "v2"}],
+             )
+
+         Single dictionary::
+
+             conn.exec_driver_sql(
+                 "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
+                 dict(id=1, value="v1"),
+             )
+
+         Single tuple::
+
+             conn.exec_driver_sql(
+                 "INSERT INTO table (id, value) VALUES (?, ?)", (1, "v1")
+             )
+
+         .. note:: The :meth:`_engine.Connection.exec_driver_sql` method does
+             not participate in the
+             :meth:`_events.ConnectionEvents.before_execute` and
+             :meth:`_events.ConnectionEvents.after_execute` events.   To
+             intercept calls to :meth:`_engine.Connection.exec_driver_sql`, use
+             :meth:`_events.ConnectionEvents.before_cursor_execute` and
+             :meth:`_events.ConnectionEvents.after_cursor_execute`.
+
+         .. seealso::
+
+            :pep:`249`
+
+        """
+
+        distilled_parameters = _distill_raw_params(parameters)
+
+        execution_options = self._execution_options.merge_with(
+            execution_options
+        )
+
+        dialect = self.dialect
+        ret = self._execute_context(
+            dialect,
+            dialect.execution_ctx_cls._init_statement,
+            statement,
+            None,
+            execution_options,
+            statement,
+            distilled_parameters,
+        )
+
+        return ret
+
+    def _execute_context(
+        self,
+        dialect: Dialect,
+        constructor: Callable[..., ExecutionContext],
+        statement: Union[str, Compiled],
+        parameters: Optional[_AnyMultiExecuteParams],
+        execution_options: _ExecuteOptions,
+        *args: Any,
+        **kw: Any,
+    ) -> CursorResult[Any]:
+        """Create an :class:`.ExecutionContext` and execute, returning
+        a :class:`_engine.CursorResult`."""
+
+        if execution_options:
+            yp = execution_options.get("yield_per", None)
+            if yp:
+                execution_options = execution_options.union(
+                    {"stream_results": True, "max_row_buffer": yp}
+                )
+        try:
+            conn = self._dbapi_connection
+            if conn is None:
+                conn = self._revalidate_connection()
+
+            context = constructor(
+                dialect, self, conn, execution_options, *args, **kw
+            )
+        except (exc.PendingRollbackError, exc.ResourceClosedError):
+            raise
+        except BaseException as e:
+            self._handle_dbapi_exception(
+                e, str(statement), parameters, None, None
+            )
+
+        if (
+            self._transaction
+            and not self._transaction.is_active
+            or (
+                self._nested_transaction
+                and not self._nested_transaction.is_active
+            )
+        ):
+            self._invalid_transaction()
+
+        elif self._trans_context_manager:
+            TransactionalContext._trans_ctx_check(self)
+
+        if self._transaction is None:
+            self._autobegin()
+
+        context.pre_exec()
+
+        if context.execute_style is ExecuteStyle.INSERTMANYVALUES:
+            return self._exec_insertmany_context(dialect, context)
+        else:
+            return self._exec_single_context(
+                dialect, context, statement, parameters
+            )
+
+    def _exec_single_context(
+        self,
+        dialect: Dialect,
+        context: ExecutionContext,
+        statement: Union[str, Compiled],
+        parameters: Optional[_AnyMultiExecuteParams],
+    ) -> CursorResult[Any]:
+        """continue the _execute_context() method for a single DBAPI
+        cursor.execute() or cursor.executemany() call.
+
+        """
+        if dialect.bind_typing is BindTyping.SETINPUTSIZES:
+            generic_setinputsizes = context._prepare_set_input_sizes()
+
+            if generic_setinputsizes:
+                try:
+                    dialect.do_set_input_sizes(
+                        context.cursor, generic_setinputsizes, context
+                    )
+                except BaseException as e:
+                    self._handle_dbapi_exception(
+                        e, str(statement), parameters, None, context
+                    )
+
+        cursor, str_statement, parameters = (
+            context.cursor,
+            context.statement,
+            context.parameters,
+        )
+
+        effective_parameters: Optional[_AnyExecuteParams]
+
+        if not context.executemany:
+            effective_parameters = parameters[0]
+        else:
+            effective_parameters = parameters
+
+        if self._has_events or self.engine._has_events:
+            for fn in self.dispatch.before_cursor_execute:
+                str_statement, effective_parameters = fn(
+                    self,
+                    cursor,
+                    str_statement,
+                    effective_parameters,
+                    context,
+                    context.executemany,
+                )
+
+        if self._echo:
+            self._log_info(str_statement)
+
+            stats = context._get_cache_stats()
+
+            if not self.engine.hide_parameters:
+                self._log_info(
+                    "[%s] %r",
+                    stats,
+                    sql_util._repr_params(
+                        effective_parameters,
+                        batches=10,
+                        ismulti=context.executemany,
+                    ),
+                )
+            else:
+                self._log_info(
+                    "[%s] [SQL parameters hidden due to hide_parameters=True]",
+                    stats,
+                )
+
+        evt_handled: bool = False
+        try:
+            if context.execute_style is ExecuteStyle.EXECUTEMANY:
+                effective_parameters = cast(
+                    "_CoreMultiExecuteParams", effective_parameters
+                )
+                if self.dialect._has_events:
+                    for fn in self.dialect.dispatch.do_executemany:
+                        if fn(
+                            cursor,
+                            str_statement,
+                            effective_parameters,
+                            context,
+                        ):
+                            evt_handled = True
+                            break
+                if not evt_handled:
+                    self.dialect.do_executemany(
+                        cursor,
+                        str_statement,
+                        effective_parameters,
+                        context,
+                    )
+            elif not effective_parameters and context.no_parameters:
+                if self.dialect._has_events:
+                    for fn in self.dialect.dispatch.do_execute_no_params:
+                        if fn(cursor, str_statement, context):
+                            evt_handled = True
+                            break
+                if not evt_handled:
+                    self.dialect.do_execute_no_params(
+                        cursor, str_statement, context
+                    )
+            else:
+                effective_parameters = cast(
+                    "_CoreSingleExecuteParams", effective_parameters
+                )
+                if self.dialect._has_events:
+                    for fn in self.dialect.dispatch.do_execute:
+                        if fn(
+                            cursor,
+                            str_statement,
+                            effective_parameters,
+                            context,
+                        ):
+                            evt_handled = True
+                            break
+                if not evt_handled:
+                    self.dialect.do_execute(
+                        cursor, str_statement, effective_parameters, context
+                    )
+
+            if self._has_events or self.engine._has_events:
+                self.dispatch.after_cursor_execute(
+                    self,
+                    cursor,
+                    str_statement,
+                    effective_parameters,
+                    context,
+                    context.executemany,
+                )
+
+            context.post_exec()
+
+            result = context._setup_result_proxy()
+
+        except BaseException as e:
+            self._handle_dbapi_exception(
+                e, str_statement, effective_parameters, cursor, context
+            )
+
+        return result
+
+    def _exec_insertmany_context(
+        self,
+        dialect: Dialect,
+        context: ExecutionContext,
+    ) -> CursorResult[Any]:
+        """continue the _execute_context() method for an "insertmanyvalues"
+        operation, which will invoke DBAPI
+        cursor.execute() one or more times with individual log and
+        event hook calls.
+
+        """
+
+        if dialect.bind_typing is BindTyping.SETINPUTSIZES:
+            generic_setinputsizes = context._prepare_set_input_sizes()
+        else:
+            generic_setinputsizes = None
+
+        cursor, str_statement, parameters = (
+            context.cursor,
+            context.statement,
+            context.parameters,
+        )
+
+        effective_parameters = parameters
+
+        engine_events = self._has_events or self.engine._has_events
+        if self.dialect._has_events:
+            do_execute_dispatch: Iterable[Any] = (
+                self.dialect.dispatch.do_execute
+            )
+        else:
+            do_execute_dispatch = ()
+
+        if self._echo:
+            stats = context._get_cache_stats() + " (insertmanyvalues)"
+
+        preserve_rowcount = context.execution_options.get(
+            "preserve_rowcount", False
+        )
+        rowcount = 0
+
+        for imv_batch in dialect._deliver_insertmanyvalues_batches(
+            self,
+            cursor,
+            str_statement,
+            effective_parameters,
+            generic_setinputsizes,
+            context,
+        ):
+            if imv_batch.processed_setinputsizes:
+                try:
+                    dialect.do_set_input_sizes(
+                        context.cursor,
+                        imv_batch.processed_setinputsizes,
+                        context,
+                    )
+                except BaseException as e:
+                    self._handle_dbapi_exception(
+                        e,
+                        sql_util._long_statement(imv_batch.replaced_statement),
+                        imv_batch.replaced_parameters,
+                        None,
+                        context,
+                        is_sub_exec=True,
+                    )
+
+            sub_stmt = imv_batch.replaced_statement
+            sub_params = imv_batch.replaced_parameters
+
+            if engine_events:
+                for fn in self.dispatch.before_cursor_execute:
+                    sub_stmt, sub_params = fn(
+                        self,
+                        cursor,
+                        sub_stmt,
+                        sub_params,
+                        context,
+                        True,
+                    )
+
+            if self._echo:
+                self._log_info(sql_util._long_statement(sub_stmt))
+
+                imv_stats = f""" {imv_batch.batchnum}/{
+                            imv_batch.total_batches
+                } ({
+                    'ordered'
+                    if imv_batch.rows_sorted else 'unordered'
+                }{
+                    '; batch not supported'
+                    if imv_batch.is_downgraded
+                    else ''
+                })"""
+
+                if imv_batch.batchnum == 1:
+                    stats += imv_stats
+                else:
+                    stats = f"insertmanyvalues{imv_stats}"
+
+                if not self.engine.hide_parameters:
+                    self._log_info(
+                        "[%s] %r",
+                        stats,
+                        sql_util._repr_params(
+                            sub_params,
+                            batches=10,
+                            ismulti=False,
+                        ),
+                    )
+                else:
+                    self._log_info(
+                        "[%s] [SQL parameters hidden due to "
+                        "hide_parameters=True]",
+                        stats,
+                    )
+
+            try:
+                for fn in do_execute_dispatch:
+                    if fn(
+                        cursor,
+                        sub_stmt,
+                        sub_params,
+                        context,
+                    ):
+                        break
+                else:
+                    dialect.do_execute(
+                        cursor,
+                        sub_stmt,
+                        sub_params,
+                        context,
+                    )
+
+            except BaseException as e:
+                self._handle_dbapi_exception(
+                    e,
+                    sql_util._long_statement(sub_stmt),
+                    sub_params,
+                    cursor,
+                    context,
+                    is_sub_exec=True,
+                )
+
+            if engine_events:
+                self.dispatch.after_cursor_execute(
+                    self,
+                    cursor,
+                    str_statement,
+                    effective_parameters,
+                    context,
+                    context.executemany,
+                )
+
+            if preserve_rowcount:
+                rowcount += imv_batch.current_batch_size
+
+        try:
+            context.post_exec()
+
+            if preserve_rowcount:
+                context._rowcount = rowcount  # type: ignore[attr-defined]
+
+            result = context._setup_result_proxy()
+
+        except BaseException as e:
+            self._handle_dbapi_exception(
+                e, str_statement, effective_parameters, cursor, context
+            )
+
+        return result
+
+    def _cursor_execute(
+        self,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPISingleExecuteParams,
+        context: Optional[ExecutionContext] = None,
+    ) -> None:
+        """Execute a statement + params on the given cursor.
+
+        Adds appropriate logging and exception handling.
+
+        This method is used by DefaultDialect for special-case
+        executions, such as for sequences and column defaults.
+        The path of statement execution in the majority of cases
+        terminates at _execute_context().
+
+        """
+        if self._has_events or self.engine._has_events:
+            for fn in self.dispatch.before_cursor_execute:
+                statement, parameters = fn(
+                    self, cursor, statement, parameters, context, False
+                )
+
+        if self._echo:
+            self._log_info(statement)
+            self._log_info("[raw sql] %r", parameters)
+        try:
+            for fn in (
+                ()
+                if not self.dialect._has_events
+                else self.dialect.dispatch.do_execute
+            ):
+                if fn(cursor, statement, parameters, context):
+                    break
+            else:
+                self.dialect.do_execute(cursor, statement, parameters, context)
+        except BaseException as e:
+            self._handle_dbapi_exception(
+                e, statement, parameters, cursor, context
+            )
+
+        if self._has_events or self.engine._has_events:
+            self.dispatch.after_cursor_execute(
+                self, cursor, statement, parameters, context, False
+            )
+
+    def _safe_close_cursor(self, cursor: DBAPICursor) -> None:
+        """Close the given cursor, catching exceptions
+        and turning into log warnings.
+
+        """
+        try:
+            cursor.close()
+        except Exception:
+            # log the error through the connection pool's logger.
+            self.engine.pool.logger.error(
+                "Error closing cursor", exc_info=True
+            )
+
+    _reentrant_error = False
+    _is_disconnect = False
+
+    def _handle_dbapi_exception(
+        self,
+        e: BaseException,
+        statement: Optional[str],
+        parameters: Optional[_AnyExecuteParams],
+        cursor: Optional[DBAPICursor],
+        context: Optional[ExecutionContext],
+        is_sub_exec: bool = False,
+    ) -> NoReturn:
+        exc_info = sys.exc_info()
+
+        is_exit_exception = util.is_exit_exception(e)
+
+        if not self._is_disconnect:
+            self._is_disconnect = (
+                isinstance(e, self.dialect.loaded_dbapi.Error)
+                and not self.closed
+                and self.dialect.is_disconnect(
+                    e,
+                    self._dbapi_connection if not self.invalidated else None,
+                    cursor,
+                )
+            ) or (is_exit_exception and not self.closed)
+
+        invalidate_pool_on_disconnect = not is_exit_exception
+
+        ismulti: bool = (
+            not is_sub_exec and context.executemany
+            if context is not None
+            else False
+        )
+        if self._reentrant_error:
+            raise exc.DBAPIError.instance(
+                statement,
+                parameters,
+                e,
+                self.dialect.loaded_dbapi.Error,
+                hide_parameters=self.engine.hide_parameters,
+                dialect=self.dialect,
+                ismulti=ismulti,
+            ).with_traceback(exc_info[2]) from e
+        self._reentrant_error = True
+        try:
+            # non-DBAPI error - if we already got a context,
+            # or there's no string statement, don't wrap it
+            should_wrap = isinstance(e, self.dialect.loaded_dbapi.Error) or (
+                statement is not None
+                and context is None
+                and not is_exit_exception
+            )
+
+            if should_wrap:
+                sqlalchemy_exception = exc.DBAPIError.instance(
+                    statement,
+                    parameters,
+                    cast(Exception, e),
+                    self.dialect.loaded_dbapi.Error,
+                    hide_parameters=self.engine.hide_parameters,
+                    connection_invalidated=self._is_disconnect,
+                    dialect=self.dialect,
+                    ismulti=ismulti,
+                )
+            else:
+                sqlalchemy_exception = None
+
+            newraise = None
+
+            if (self.dialect._has_events) and not self._execution_options.get(
+                "skip_user_error_events", False
+            ):
+                ctx = ExceptionContextImpl(
+                    e,
+                    sqlalchemy_exception,
+                    self.engine,
+                    self.dialect,
+                    self,
+                    cursor,
+                    statement,
+                    parameters,
+                    context,
+                    self._is_disconnect,
+                    invalidate_pool_on_disconnect,
+                    False,
+                )
+
+                for fn in self.dialect.dispatch.handle_error:
+                    try:
+                        # handler returns an exception;
+                        # call next handler in a chain
+                        per_fn = fn(ctx)
+                        if per_fn is not None:
+                            ctx.chained_exception = newraise = per_fn
+                    except Exception as _raised:
+                        # handler raises an exception - stop processing
+                        newraise = _raised
+                        break
+
+                if self._is_disconnect != ctx.is_disconnect:
+                    self._is_disconnect = ctx.is_disconnect
+                    if sqlalchemy_exception:
+                        sqlalchemy_exception.connection_invalidated = (
+                            ctx.is_disconnect
+                        )
+
+                # set up potentially user-defined value for
+                # invalidate pool.
+                invalidate_pool_on_disconnect = (
+                    ctx.invalidate_pool_on_disconnect
+                )
+
+            if should_wrap and context:
+                context.handle_dbapi_exception(e)
+
+            if not self._is_disconnect:
+                if cursor:
+                    self._safe_close_cursor(cursor)
+                # "autorollback" was mostly relevant in 1.x series.
+                # It's very unlikely to reach here, as the connection
+                # does autobegin so when we are here, we are usually
+                # in an explicit / semi-explicit transaction.
+                # however we have a test which manufactures this
+                # scenario in any case using an event handler.
+                # test/engine/test_execute.py-> test_actual_autorollback
+                if not self.in_transaction():
+                    self._rollback_impl()
+
+            if newraise:
+                raise newraise.with_traceback(exc_info[2]) from e
+            elif should_wrap:
+                assert sqlalchemy_exception is not None
+                raise sqlalchemy_exception.with_traceback(exc_info[2]) from e
+            else:
+                assert exc_info[1] is not None
+                raise exc_info[1].with_traceback(exc_info[2])
+        finally:
+            del self._reentrant_error
+            if self._is_disconnect:
+                del self._is_disconnect
+                if not self.invalidated:
+                    dbapi_conn_wrapper = self._dbapi_connection
+                    assert dbapi_conn_wrapper is not None
+                    if invalidate_pool_on_disconnect:
+                        self.engine.pool._invalidate(dbapi_conn_wrapper, e)
+                    self.invalidate(e)
+
+    @classmethod
+    def _handle_dbapi_exception_noconnection(
+        cls,
+        e: BaseException,
+        dialect: Dialect,
+        engine: Optional[Engine] = None,
+        is_disconnect: Optional[bool] = None,
+        invalidate_pool_on_disconnect: bool = True,
+        is_pre_ping: bool = False,
+    ) -> NoReturn:
+        exc_info = sys.exc_info()
+
+        if is_disconnect is None:
+            is_disconnect = isinstance(
+                e, dialect.loaded_dbapi.Error
+            ) and dialect.is_disconnect(e, None, None)
+
+        should_wrap = isinstance(e, dialect.loaded_dbapi.Error)
+
+        if should_wrap:
+            sqlalchemy_exception = exc.DBAPIError.instance(
+                None,
+                None,
+                cast(Exception, e),
+                dialect.loaded_dbapi.Error,
+                hide_parameters=(
+                    engine.hide_parameters if engine is not None else False
+                ),
+                connection_invalidated=is_disconnect,
+                dialect=dialect,
+            )
+        else:
+            sqlalchemy_exception = None
+
+        newraise = None
+
+        if dialect._has_events:
+            ctx = ExceptionContextImpl(
+                e,
+                sqlalchemy_exception,
+                engine,
+                dialect,
+                None,
+                None,
+                None,
+                None,
+                None,
+                is_disconnect,
+                invalidate_pool_on_disconnect,
+                is_pre_ping,
+            )
+            for fn in dialect.dispatch.handle_error:
+                try:
+                    # handler returns an exception;
+                    # call next handler in a chain
+                    per_fn = fn(ctx)
+                    if per_fn is not None:
+                        ctx.chained_exception = newraise = per_fn
+                except Exception as _raised:
+                    # handler raises an exception - stop processing
+                    newraise = _raised
+                    break
+
+            if sqlalchemy_exception and is_disconnect != ctx.is_disconnect:
+                sqlalchemy_exception.connection_invalidated = is_disconnect = (
+                    ctx.is_disconnect
+                )
+
+        if newraise:
+            raise newraise.with_traceback(exc_info[2]) from e
+        elif should_wrap:
+            assert sqlalchemy_exception is not None
+            raise sqlalchemy_exception.with_traceback(exc_info[2]) from e
+        else:
+            assert exc_info[1] is not None
+            raise exc_info[1].with_traceback(exc_info[2])
+
+    def _run_ddl_visitor(
+        self,
+        visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]],
+        element: SchemaItem,
+        **kwargs: Any,
+    ) -> None:
+        """run a DDL visitor.
+
+        This method is only here so that the MockConnection can change the
+        options given to the visitor so that "checkfirst" is skipped.
+
+        """
+        visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
+
+
+class ExceptionContextImpl(ExceptionContext):
+    """Implement the :class:`.ExceptionContext` interface."""
+
+    __slots__ = (
+        "connection",
+        "engine",
+        "dialect",
+        "cursor",
+        "statement",
+        "parameters",
+        "original_exception",
+        "sqlalchemy_exception",
+        "chained_exception",
+        "execution_context",
+        "is_disconnect",
+        "invalidate_pool_on_disconnect",
+        "is_pre_ping",
+    )
+
+    def __init__(
+        self,
+        exception: BaseException,
+        sqlalchemy_exception: Optional[exc.StatementError],
+        engine: Optional[Engine],
+        dialect: Dialect,
+        connection: Optional[Connection],
+        cursor: Optional[DBAPICursor],
+        statement: Optional[str],
+        parameters: Optional[_DBAPIAnyExecuteParams],
+        context: Optional[ExecutionContext],
+        is_disconnect: bool,
+        invalidate_pool_on_disconnect: bool,
+        is_pre_ping: bool,
+    ):
+        self.engine = engine
+        self.dialect = dialect
+        self.connection = connection
+        self.sqlalchemy_exception = sqlalchemy_exception
+        self.original_exception = exception
+        self.execution_context = context
+        self.statement = statement
+        self.parameters = parameters
+        self.is_disconnect = is_disconnect
+        self.invalidate_pool_on_disconnect = invalidate_pool_on_disconnect
+        self.is_pre_ping = is_pre_ping
+
+
+class Transaction(TransactionalContext):
+    """Represent a database transaction in progress.
+
+    The :class:`.Transaction` object is procured by
+    calling the :meth:`_engine.Connection.begin` method of
+    :class:`_engine.Connection`::
+
+        from sqlalchemy import create_engine
+
+        engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test")
+        connection = engine.connect()
+        trans = connection.begin()
+        connection.execute(text("insert into x (a, b) values (1, 2)"))
+        trans.commit()
+
+    The object provides :meth:`.rollback` and :meth:`.commit`
+    methods in order to control transaction boundaries.  It
+    also implements a context manager interface so that
+    the Python ``with`` statement can be used with the
+    :meth:`_engine.Connection.begin` method::
+
+        with connection.begin():
+            connection.execute(text("insert into x (a, b) values (1, 2)"))
+
+    The Transaction object is **not** threadsafe.
+
+    .. seealso::
+
+        :meth:`_engine.Connection.begin`
+
+        :meth:`_engine.Connection.begin_twophase`
+
+        :meth:`_engine.Connection.begin_nested`
+
+    .. index::
+      single: thread safety; Transaction
+    """  # noqa
+
+    __slots__ = ()
+
+    _is_root: bool = False
+    is_active: bool
+    connection: Connection
+
+    def __init__(self, connection: Connection):
+        raise NotImplementedError()
+
+    @property
+    def _deactivated_from_connection(self) -> bool:
+        """True if this transaction is totally deactivated from the connection
+        and therefore can no longer affect its state.
+
+        """
+        raise NotImplementedError()
+
+    def _do_close(self) -> None:
+        raise NotImplementedError()
+
+    def _do_rollback(self) -> None:
+        raise NotImplementedError()
+
+    def _do_commit(self) -> None:
+        raise NotImplementedError()
+
+    @property
+    def is_valid(self) -> bool:
+        return self.is_active and not self.connection.invalidated
+
+    def close(self) -> None:
+        """Close this :class:`.Transaction`.
+
+        If this transaction is the base transaction in a begin/commit
+        nesting, the transaction will rollback().  Otherwise, the
+        method returns.
+
+        This is used to cancel a Transaction without affecting the scope of
+        an enclosing transaction.
+
+        """
+        try:
+            self._do_close()
+        finally:
+            assert not self.is_active
+
+    def rollback(self) -> None:
+        """Roll back this :class:`.Transaction`.
+
+        The implementation of this may vary based on the type of transaction in
+        use:
+
+        * For a simple database transaction (e.g. :class:`.RootTransaction`),
+          it corresponds to a ROLLBACK.
+
+        * For a :class:`.NestedTransaction`, it corresponds to a
+          "ROLLBACK TO SAVEPOINT" operation.
+
+        * For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two
+          phase transactions may be used.
+
+
+        """
+        try:
+            self._do_rollback()
+        finally:
+            assert not self.is_active
+
+    def commit(self) -> None:
+        """Commit this :class:`.Transaction`.
+
+        The implementation of this may vary based on the type of transaction in
+        use:
+
+        * For a simple database transaction (e.g. :class:`.RootTransaction`),
+          it corresponds to a COMMIT.
+
+        * For a :class:`.NestedTransaction`, it corresponds to a
+          "RELEASE SAVEPOINT" operation.
+
+        * For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two
+          phase transactions may be used.
+
+        """
+        try:
+            self._do_commit()
+        finally:
+            assert not self.is_active
+
+    def _get_subject(self) -> Connection:
+        return self.connection
+
+    def _transaction_is_active(self) -> bool:
+        return self.is_active
+
+    def _transaction_is_closed(self) -> bool:
+        return not self._deactivated_from_connection
+
+    def _rollback_can_be_called(self) -> bool:
+        # for RootTransaction / NestedTransaction, it's safe to call
+        # rollback() even if the transaction is deactive and no warnings
+        # will be emitted.  tested in
+        # test_transaction.py -> test_no_rollback_in_deactive(?:_savepoint)?
+        return True
+
+
+class RootTransaction(Transaction):
+    """Represent the "root" transaction on a :class:`_engine.Connection`.
+
+    This corresponds to the current "BEGIN/COMMIT/ROLLBACK" that's occurring
+    for the :class:`_engine.Connection`. The :class:`_engine.RootTransaction`
+    is created by calling upon the :meth:`_engine.Connection.begin` method, and
+    remains associated with the :class:`_engine.Connection` throughout its
+    active span. The current :class:`_engine.RootTransaction` in use is
+    accessible via the :attr:`_engine.Connection.get_transaction` method of
+    :class:`_engine.Connection`.
+
+    In :term:`2.0 style` use, the :class:`_engine.Connection` also employs
+    "autobegin" behavior that will create a new
+    :class:`_engine.RootTransaction` whenever a connection in a
+    non-transactional state is used to emit commands on the DBAPI connection.
+    The scope of the :class:`_engine.RootTransaction` in 2.0 style
+    use can be controlled using the :meth:`_engine.Connection.commit` and
+    :meth:`_engine.Connection.rollback` methods.
+
+
+    """
+
+    _is_root = True
+
+    __slots__ = ("connection", "is_active")
+
+    def __init__(self, connection: Connection):
+        assert connection._transaction is None
+        if connection._trans_context_manager:
+            TransactionalContext._trans_ctx_check(connection)
+        self.connection = connection
+        self._connection_begin_impl()
+        connection._transaction = self
+
+        self.is_active = True
+
+    def _deactivate_from_connection(self) -> None:
+        if self.is_active:
+            assert self.connection._transaction is self
+            self.is_active = False
+
+        elif self.connection._transaction is not self:
+            util.warn("transaction already deassociated from connection")
+
+    @property
+    def _deactivated_from_connection(self) -> bool:
+        return self.connection._transaction is not self
+
+    def _connection_begin_impl(self) -> None:
+        self.connection._begin_impl(self)
+
+    def _connection_rollback_impl(self) -> None:
+        self.connection._rollback_impl()
+
+    def _connection_commit_impl(self) -> None:
+        self.connection._commit_impl()
+
+    def _close_impl(self, try_deactivate: bool = False) -> None:
+        try:
+            if self.is_active:
+                self._connection_rollback_impl()
+
+            if self.connection._nested_transaction:
+                self.connection._nested_transaction._cancel()
+        finally:
+            if self.is_active or try_deactivate:
+                self._deactivate_from_connection()
+            if self.connection._transaction is self:
+                self.connection._transaction = None
+
+        assert not self.is_active
+        assert self.connection._transaction is not self
+
+    def _do_close(self) -> None:
+        self._close_impl()
+
+    def _do_rollback(self) -> None:
+        self._close_impl(try_deactivate=True)
+
+    def _do_commit(self) -> None:
+        if self.is_active:
+            assert self.connection._transaction is self
+
+            try:
+                self._connection_commit_impl()
+            finally:
+                # whether or not commit succeeds, cancel any
+                # nested transactions, make this transaction "inactive"
+                # and remove it as a reset agent
+                if self.connection._nested_transaction:
+                    self.connection._nested_transaction._cancel()
+
+                self._deactivate_from_connection()
+
+            # ...however only remove as the connection's current transaction
+            # if commit succeeded.  otherwise it stays on so that a rollback
+            # needs to occur.
+            self.connection._transaction = None
+        else:
+            if self.connection._transaction is self:
+                self.connection._invalid_transaction()
+            else:
+                raise exc.InvalidRequestError("This transaction is inactive")
+
+        assert not self.is_active
+        assert self.connection._transaction is not self
+
+
+class NestedTransaction(Transaction):
+    """Represent a 'nested', or SAVEPOINT transaction.
+
+    The :class:`.NestedTransaction` object is created by calling the
+    :meth:`_engine.Connection.begin_nested` method of
+    :class:`_engine.Connection`.
+
+    When using :class:`.NestedTransaction`, the semantics of "begin" /
+    "commit" / "rollback" are as follows:
+
+    * the "begin" operation corresponds to the "BEGIN SAVEPOINT" command, where
+      the savepoint is given an explicit name that is part of the state
+      of this object.
+
+    * The :meth:`.NestedTransaction.commit` method corresponds to a
+      "RELEASE SAVEPOINT" operation, using the savepoint identifier associated
+      with this :class:`.NestedTransaction`.
+
+    * The :meth:`.NestedTransaction.rollback` method corresponds to a
+      "ROLLBACK TO SAVEPOINT" operation, using the savepoint identifier
+      associated with this :class:`.NestedTransaction`.
+
+    The rationale for mimicking the semantics of an outer transaction in
+    terms of savepoints so that code may deal with a "savepoint" transaction
+    and an "outer" transaction in an agnostic way.
+
+    .. seealso::
+
+        :ref:`session_begin_nested` - ORM version of the SAVEPOINT API.
+
+    """
+
+    __slots__ = ("connection", "is_active", "_savepoint", "_previous_nested")
+
+    _savepoint: str
+
+    def __init__(self, connection: Connection):
+        assert connection._transaction is not None
+        if connection._trans_context_manager:
+            TransactionalContext._trans_ctx_check(connection)
+        self.connection = connection
+        self._savepoint = self.connection._savepoint_impl()
+        self.is_active = True
+        self._previous_nested = connection._nested_transaction
+        connection._nested_transaction = self
+
+    def _deactivate_from_connection(self, warn: bool = True) -> None:
+        if self.connection._nested_transaction is self:
+            self.connection._nested_transaction = self._previous_nested
+        elif warn:
+            util.warn(
+                "nested transaction already deassociated from connection"
+            )
+
+    @property
+    def _deactivated_from_connection(self) -> bool:
+        return self.connection._nested_transaction is not self
+
+    def _cancel(self) -> None:
+        # called by RootTransaction when the outer transaction is
+        # committed, rolled back, or closed to cancel all savepoints
+        # without any action being taken
+        self.is_active = False
+        self._deactivate_from_connection()
+        if self._previous_nested:
+            self._previous_nested._cancel()
+
+    def _close_impl(
+        self, deactivate_from_connection: bool, warn_already_deactive: bool
+    ) -> None:
+        try:
+            if (
+                self.is_active
+                and self.connection._transaction
+                and self.connection._transaction.is_active
+            ):
+                self.connection._rollback_to_savepoint_impl(self._savepoint)
+        finally:
+            self.is_active = False
+
+            if deactivate_from_connection:
+                self._deactivate_from_connection(warn=warn_already_deactive)
+
+        assert not self.is_active
+        if deactivate_from_connection:
+            assert self.connection._nested_transaction is not self
+
+    def _do_close(self) -> None:
+        self._close_impl(True, False)
+
+    def _do_rollback(self) -> None:
+        self._close_impl(True, True)
+
+    def _do_commit(self) -> None:
+        if self.is_active:
+            try:
+                self.connection._release_savepoint_impl(self._savepoint)
+            finally:
+                # nested trans becomes inactive on failed release
+                # unconditionally.  this prevents it from trying to
+                # emit SQL when it rolls back.
+                self.is_active = False
+
+            # but only de-associate from connection if it succeeded
+            self._deactivate_from_connection()
+        else:
+            if self.connection._nested_transaction is self:
+                self.connection._invalid_transaction()
+            else:
+                raise exc.InvalidRequestError(
+                    "This nested transaction is inactive"
+                )
+
+
+class TwoPhaseTransaction(RootTransaction):
+    """Represent a two-phase transaction.
+
+    A new :class:`.TwoPhaseTransaction` object may be procured
+    using the :meth:`_engine.Connection.begin_twophase` method.
+
+    The interface is the same as that of :class:`.Transaction`
+    with the addition of the :meth:`prepare` method.
+
+    """
+
+    __slots__ = ("xid", "_is_prepared")
+
+    xid: Any
+
+    def __init__(self, connection: Connection, xid: Any):
+        self._is_prepared = False
+        self.xid = xid
+        super().__init__(connection)
+
+    def prepare(self) -> None:
+        """Prepare this :class:`.TwoPhaseTransaction`.
+
+        After a PREPARE, the transaction can be committed.
+
+        """
+        if not self.is_active:
+            raise exc.InvalidRequestError("This transaction is inactive")
+        self.connection._prepare_twophase_impl(self.xid)
+        self._is_prepared = True
+
+    def _connection_begin_impl(self) -> None:
+        self.connection._begin_twophase_impl(self)
+
+    def _connection_rollback_impl(self) -> None:
+        self.connection._rollback_twophase_impl(self.xid, self._is_prepared)
+
+    def _connection_commit_impl(self) -> None:
+        self.connection._commit_twophase_impl(self.xid, self._is_prepared)
+
+
+class Engine(
+    ConnectionEventsTarget, log.Identified, inspection.Inspectable["Inspector"]
+):
+    """
+    Connects a :class:`~sqlalchemy.pool.Pool` and
+    :class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a
+    source of database connectivity and behavior.
+
+    An :class:`_engine.Engine` object is instantiated publicly using the
+    :func:`~sqlalchemy.create_engine` function.
+
+    .. seealso::
+
+        :doc:`/core/engines`
+
+        :ref:`connections_toplevel`
+
+    """
+
+    dispatch: dispatcher[ConnectionEventsTarget]
+
+    _compiled_cache: Optional[CompiledCacheType]
+
+    _execution_options: _ExecuteOptions = _EMPTY_EXECUTION_OPTS
+    _has_events: bool = False
+    _connection_cls: Type[Connection] = Connection
+    _sqla_logger_namespace: str = "sqlalchemy.engine.Engine"
+    _is_future: bool = False
+
+    _schema_translate_map: Optional[SchemaTranslateMapType] = None
+    _option_cls: Type[OptionEngine]
+
+    dialect: Dialect
+    pool: Pool
+    url: URL
+    hide_parameters: bool
+
+    def __init__(
+        self,
+        pool: Pool,
+        dialect: Dialect,
+        url: URL,
+        logging_name: Optional[str] = None,
+        echo: Optional[_EchoFlagType] = None,
+        query_cache_size: int = 500,
+        execution_options: Optional[Mapping[str, Any]] = None,
+        hide_parameters: bool = False,
+    ):
+        self.pool = pool
+        self.url = url
+        self.dialect = dialect
+        if logging_name:
+            self.logging_name = logging_name
+        self.echo = echo
+        self.hide_parameters = hide_parameters
+        if query_cache_size != 0:
+            self._compiled_cache = util.LRUCache(
+                query_cache_size, size_alert=self._lru_size_alert
+            )
+        else:
+            self._compiled_cache = None
+        log.instance_logger(self, echoflag=echo)
+        if execution_options:
+            self.update_execution_options(**execution_options)
+
+    def _lru_size_alert(self, cache: util.LRUCache[Any, Any]) -> None:
+        if self._should_log_info():
+            self.logger.info(
+                "Compiled cache size pruning from %d items to %d.  "
+                "Increase cache size to reduce the frequency of pruning.",
+                len(cache),
+                cache.capacity,
+            )
+
+    @property
+    def engine(self) -> Engine:
+        """Returns this :class:`.Engine`.
+
+        Used for legacy schemes that accept :class:`.Connection` /
+        :class:`.Engine` objects within the same variable.
+
+        """
+        return self
+
+    def clear_compiled_cache(self) -> None:
+        """Clear the compiled cache associated with the dialect.
+
+        This applies **only** to the built-in cache that is established
+        via the :paramref:`_engine.create_engine.query_cache_size` parameter.
+        It will not impact any dictionary caches that were passed via the
+        :paramref:`.Connection.execution_options.compiled_cache` parameter.
+
+        .. versionadded:: 1.4
+
+        """
+        if self._compiled_cache:
+            self._compiled_cache.clear()
+
+    def update_execution_options(self, **opt: Any) -> None:
+        r"""Update the default execution_options dictionary
+        of this :class:`_engine.Engine`.
+
+        The given keys/values in \**opt are added to the
+        default execution options that will be used for
+        all connections.  The initial contents of this dictionary
+        can be sent via the ``execution_options`` parameter
+        to :func:`_sa.create_engine`.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.execution_options`
+
+            :meth:`_engine.Engine.execution_options`
+
+        """
+        self.dispatch.set_engine_execution_options(self, opt)
+        self._execution_options = self._execution_options.union(opt)
+        self.dialect.set_engine_execution_options(self, opt)
+
+    @overload
+    def execution_options(
+        self,
+        *,
+        compiled_cache: Optional[CompiledCacheType] = ...,
+        logging_token: str = ...,
+        isolation_level: IsolationLevel = ...,
+        insertmanyvalues_page_size: int = ...,
+        schema_translate_map: Optional[SchemaTranslateMapType] = ...,
+        **opt: Any,
+    ) -> OptionEngine: ...
+
+    @overload
+    def execution_options(self, **opt: Any) -> OptionEngine: ...
+
+    def execution_options(self, **opt: Any) -> OptionEngine:
+        """Return a new :class:`_engine.Engine` that will provide
+        :class:`_engine.Connection` objects with the given execution options.
+
+        The returned :class:`_engine.Engine` remains related to the original
+        :class:`_engine.Engine` in that it shares the same connection pool and
+        other state:
+
+        * The :class:`_pool.Pool` used by the new :class:`_engine.Engine`
+          is the
+          same instance.  The :meth:`_engine.Engine.dispose`
+          method will replace
+          the connection pool instance for the parent engine as well
+          as this one.
+        * Event listeners are "cascaded" - meaning, the new
+          :class:`_engine.Engine`
+          inherits the events of the parent, and new events can be associated
+          with the new :class:`_engine.Engine` individually.
+        * The logging configuration and logging_name is copied from the parent
+          :class:`_engine.Engine`.
+
+        The intent of the :meth:`_engine.Engine.execution_options` method is
+        to implement schemes where multiple :class:`_engine.Engine`
+        objects refer to the same connection pool, but are differentiated
+        by options that affect some execution-level behavior for each
+        engine.    One such example is breaking into separate "reader" and
+        "writer" :class:`_engine.Engine` instances, where one
+        :class:`_engine.Engine`
+        has a lower :term:`isolation level` setting configured or is even
+        transaction-disabled using "autocommit".  An example of this
+        configuration is at :ref:`dbapi_autocommit_multiple`.
+
+        Another example is one that
+        uses a custom option ``shard_id`` which is consumed by an event
+        to change the current schema on a database connection::
+
+            from sqlalchemy import event
+            from sqlalchemy.engine import Engine
+
+            primary_engine = create_engine("mysql+mysqldb://")
+            shard1 = primary_engine.execution_options(shard_id="shard1")
+            shard2 = primary_engine.execution_options(shard_id="shard2")
+
+            shards = {"default": "base", "shard_1": "db1", "shard_2": "db2"}
+
+
+            @event.listens_for(Engine, "before_cursor_execute")
+            def _switch_shard(conn, cursor, stmt, params, context, executemany):
+                shard_id = conn.get_execution_options().get("shard_id", "default")
+                current_shard = conn.info.get("current_shard", None)
+
+                if current_shard != shard_id:
+                    cursor.execute("use %s" % shards[shard_id])
+                    conn.info["current_shard"] = shard_id
+
+        The above recipe illustrates two :class:`_engine.Engine` objects that
+        will each serve as factories for :class:`_engine.Connection` objects
+        that have pre-established "shard_id" execution options present. A
+        :meth:`_events.ConnectionEvents.before_cursor_execute` event handler
+        then interprets this execution option to emit a MySQL ``use`` statement
+        to switch databases before a statement execution, while at the same
+        time keeping track of which database we've established using the
+        :attr:`_engine.Connection.info` dictionary.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.execution_options`
+            - update execution options
+            on a :class:`_engine.Connection` object.
+
+            :meth:`_engine.Engine.update_execution_options`
+            - update the execution
+            options for a given :class:`_engine.Engine` in place.
+
+            :meth:`_engine.Engine.get_execution_options`
+
+
+        """  # noqa: E501
+        return self._option_cls(self, opt)
+
+    def get_execution_options(self) -> _ExecuteOptions:
+        """Get the non-SQL options which will take effect during execution.
+
+        .. versionadded: 1.3
+
+        .. seealso::
+
+            :meth:`_engine.Engine.execution_options`
+        """
+        return self._execution_options
+
+    @property
+    def name(self) -> str:
+        """String name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
+        in use by this :class:`Engine`.
+
+        """
+
+        return self.dialect.name
+
+    @property
+    def driver(self) -> str:
+        """Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
+        in use by this :class:`Engine`.
+
+        """
+
+        return self.dialect.driver
+
+    echo = log.echo_property()
+
+    def __repr__(self) -> str:
+        return "Engine(%r)" % (self.url,)
+
+    def dispose(self, close: bool = True) -> None:
+        """Dispose of the connection pool used by this
+        :class:`_engine.Engine`.
+
+        A new connection pool is created immediately after the old one has been
+        disposed. The previous connection pool is disposed either actively, by
+        closing out all currently checked-in connections in that pool, or
+        passively, by losing references to it but otherwise not closing any
+        connections. The latter strategy is more appropriate for an initializer
+        in a forked Python process.
+
+        :param close: if left at its default of ``True``, has the
+         effect of fully closing all **currently checked in**
+         database connections.  Connections that are still checked out
+         will **not** be closed, however they will no longer be associated
+         with this :class:`_engine.Engine`,
+         so when they are closed individually, eventually the
+         :class:`_pool.Pool` which they are associated with will
+         be garbage collected and they will be closed out fully, if
+         not already closed on checkin.
+
+         If set to ``False``, the previous connection pool is de-referenced,
+         and otherwise not touched in any way.
+
+        .. versionadded:: 1.4.33  Added the :paramref:`.Engine.dispose.close`
+            parameter to allow the replacement of a connection pool in a child
+            process without interfering with the connections used by the parent
+            process.
+
+
+        .. seealso::
+
+            :ref:`engine_disposal`
+
+            :ref:`pooling_multiprocessing`
+
+        """
+        if close:
+            self.pool.dispose()
+        self.pool = self.pool.recreate()
+        self.dispatch.engine_disposed(self)
+
+    @contextlib.contextmanager
+    def _optional_conn_ctx_manager(
+        self, connection: Optional[Connection] = None
+    ) -> Iterator[Connection]:
+        if connection is None:
+            with self.connect() as conn:
+                yield conn
+        else:
+            yield connection
+
+    @contextlib.contextmanager
+    def begin(self) -> Iterator[Connection]:
+        """Return a context manager delivering a :class:`_engine.Connection`
+        with a :class:`.Transaction` established.
+
+        E.g.::
+
+            with engine.begin() as conn:
+                conn.execute(text("insert into table (x, y, z) values (1, 2, 3)"))
+                conn.execute(text("my_special_procedure(5)"))
+
+        Upon successful operation, the :class:`.Transaction`
+        is committed.  If an error is raised, the :class:`.Transaction`
+        is rolled back.
+
+        .. seealso::
+
+            :meth:`_engine.Engine.connect` - procure a
+            :class:`_engine.Connection` from
+            an :class:`_engine.Engine`.
+
+            :meth:`_engine.Connection.begin` - start a :class:`.Transaction`
+            for a particular :class:`_engine.Connection`.
+
+        """  # noqa: E501
+        with self.connect() as conn:
+            with conn.begin():
+                yield conn
+
+    def _run_ddl_visitor(
+        self,
+        visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]],
+        element: SchemaItem,
+        **kwargs: Any,
+    ) -> None:
+        with self.begin() as conn:
+            conn._run_ddl_visitor(visitorcallable, element, **kwargs)
+
+    def connect(self) -> Connection:
+        """Return a new :class:`_engine.Connection` object.
+
+        The :class:`_engine.Connection` acts as a Python context manager, so
+        the typical use of this method looks like::
+
+            with engine.connect() as connection:
+                connection.execute(text("insert into table values ('foo')"))
+                connection.commit()
+
+        Where above, after the block is completed, the connection is "closed"
+        and its underlying DBAPI resources are returned to the connection pool.
+        This also has the effect of rolling back any transaction that
+        was explicitly begun or was begun via autobegin, and will
+        emit the :meth:`_events.ConnectionEvents.rollback` event if one was
+        started and is still in progress.
+
+        .. seealso::
+
+            :meth:`_engine.Engine.begin`
+
+        """
+
+        return self._connection_cls(self)
+
+    def raw_connection(self) -> PoolProxiedConnection:
+        """Return a "raw" DBAPI connection from the connection pool.
+
+        The returned object is a proxied version of the DBAPI
+        connection object used by the underlying driver in use.
+        The object will have all the same behavior as the real DBAPI
+        connection, except that its ``close()`` method will result in the
+        connection being returned to the pool, rather than being closed
+        for real.
+
+        This method provides direct DBAPI connection access for
+        special situations when the API provided by
+        :class:`_engine.Connection`
+        is not needed.   When a :class:`_engine.Connection` object is already
+        present, the DBAPI connection is available using
+        the :attr:`_engine.Connection.connection` accessor.
+
+        .. seealso::
+
+            :ref:`dbapi_connections`
+
+        """
+        return self.pool.connect()
+
+
+class OptionEngineMixin(log.Identified):
+    _sa_propagate_class_events = False
+
+    dispatch: dispatcher[ConnectionEventsTarget]
+    _compiled_cache: Optional[CompiledCacheType]
+    dialect: Dialect
+    pool: Pool
+    url: URL
+    hide_parameters: bool
+    echo: log.echo_property
+
+    def __init__(
+        self, proxied: Engine, execution_options: CoreExecuteOptionsParameter
+    ):
+        self._proxied = proxied
+        self.url = proxied.url
+        self.dialect = proxied.dialect
+        self.logging_name = proxied.logging_name
+        self.echo = proxied.echo
+        self._compiled_cache = proxied._compiled_cache
+        self.hide_parameters = proxied.hide_parameters
+        log.instance_logger(self, echoflag=self.echo)
+
+        # note: this will propagate events that are assigned to the parent
+        # engine after this OptionEngine is created.   Since we share
+        # the events of the parent we also disallow class-level events
+        # to apply to the OptionEngine class directly.
+        #
+        # the other way this can work would be to transfer existing
+        # events only, using:
+        # self.dispatch._update(proxied.dispatch)
+        #
+        # that might be more appropriate however it would be a behavioral
+        # change for logic that assigns events to the parent engine and
+        # would like it to take effect for the already-created sub-engine.
+        self.dispatch = self.dispatch._join(proxied.dispatch)
+
+        self._execution_options = proxied._execution_options
+        self.update_execution_options(**execution_options)
+
+    def update_execution_options(self, **opt: Any) -> None:
+        raise NotImplementedError()
+
+    if not typing.TYPE_CHECKING:
+        # https://github.com/python/typing/discussions/1095
+
+        @property
+        def pool(self) -> Pool:
+            return self._proxied.pool
+
+        @pool.setter
+        def pool(self, pool: Pool) -> None:
+            self._proxied.pool = pool
+
+        @property
+        def _has_events(self) -> bool:
+            return self._proxied._has_events or self.__dict__.get(
+                "_has_events", False
+            )
+
+        @_has_events.setter
+        def _has_events(self, value: bool) -> None:
+            self.__dict__["_has_events"] = value
+
+
+class OptionEngine(OptionEngineMixin, Engine):
+    def update_execution_options(self, **opt: Any) -> None:
+        Engine.update_execution_options(self, **opt)
+
+
+Engine._option_cls = OptionEngine
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py
new file mode 100644
index 00000000..322c28b5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/characteristics.py
@@ -0,0 +1,155 @@
+# engine/characteristics.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
+
+import abc
+import typing
+from typing import Any
+from typing import ClassVar
+
+if typing.TYPE_CHECKING:
+    from .base import Connection
+    from .interfaces import DBAPIConnection
+    from .interfaces import Dialect
+
+
+class ConnectionCharacteristic(abc.ABC):
+    """An abstract base for an object that can set, get and reset a
+    per-connection characteristic, typically one that gets reset when the
+    connection is returned to the connection pool.
+
+    transaction isolation is the canonical example, and the
+    ``IsolationLevelCharacteristic`` implementation provides this for the
+    ``DefaultDialect``.
+
+    The ``ConnectionCharacteristic`` class should call upon the ``Dialect`` for
+    the implementation of each method.   The object exists strictly to serve as
+    a dialect visitor that can be placed into the
+    ``DefaultDialect.connection_characteristics`` dictionary where it will take
+    effect for calls to :meth:`_engine.Connection.execution_options` and
+    related APIs.
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ()
+
+    transactional: ClassVar[bool] = False
+
+    @abc.abstractmethod
+    def reset_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection
+    ) -> None:
+        """Reset the characteristic on the DBAPI connection to its default
+        value."""
+
+    @abc.abstractmethod
+    def set_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any
+    ) -> None:
+        """set characteristic on the DBAPI connection to a given value."""
+
+    def set_connection_characteristic(
+        self,
+        dialect: Dialect,
+        conn: Connection,
+        dbapi_conn: DBAPIConnection,
+        value: Any,
+    ) -> None:
+        """set characteristic on the :class:`_engine.Connection` to a given
+        value.
+
+        .. versionadded:: 2.0.30 - added to support elements that are local
+           to the :class:`_engine.Connection` itself.
+
+        """
+        self.set_characteristic(dialect, dbapi_conn, value)
+
+    @abc.abstractmethod
+    def get_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection
+    ) -> Any:
+        """Given a DBAPI connection, get the current value of the
+        characteristic.
+
+        """
+
+    def get_connection_characteristic(
+        self, dialect: Dialect, conn: Connection, dbapi_conn: DBAPIConnection
+    ) -> Any:
+        """Given a :class:`_engine.Connection`, get the current value of the
+        characteristic.
+
+        .. versionadded:: 2.0.30 - added to support elements that are local
+           to the :class:`_engine.Connection` itself.
+
+        """
+        return self.get_characteristic(dialect, dbapi_conn)
+
+
+class IsolationLevelCharacteristic(ConnectionCharacteristic):
+    """Manage the isolation level on a DBAPI connection"""
+
+    transactional: ClassVar[bool] = True
+
+    def reset_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection
+    ) -> None:
+        dialect.reset_isolation_level(dbapi_conn)
+
+    def set_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any
+    ) -> None:
+        dialect._assert_and_set_isolation_level(dbapi_conn, value)
+
+    def get_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection
+    ) -> Any:
+        return dialect.get_isolation_level(dbapi_conn)
+
+
+class LoggingTokenCharacteristic(ConnectionCharacteristic):
+    """Manage the 'logging_token' option of a :class:`_engine.Connection`.
+
+    .. versionadded:: 2.0.30
+
+    """
+
+    transactional: ClassVar[bool] = False
+
+    def reset_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection
+    ) -> None:
+        pass
+
+    def set_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any
+    ) -> None:
+        raise NotImplementedError()
+
+    def set_connection_characteristic(
+        self,
+        dialect: Dialect,
+        conn: Connection,
+        dbapi_conn: DBAPIConnection,
+        value: Any,
+    ) -> None:
+        if value:
+            conn._message_formatter = lambda msg: "[%s] %s" % (value, msg)
+        else:
+            del conn._message_formatter
+
+    def get_characteristic(
+        self, dialect: Dialect, dbapi_conn: DBAPIConnection
+    ) -> Any:
+        raise NotImplementedError()
+
+    def get_connection_characteristic(
+        self, dialect: Dialect, conn: Connection, dbapi_conn: DBAPIConnection
+    ) -> Any:
+        return conn._execution_options.get("logging_token", None)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py
new file mode 100644
index 00000000..920f620b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/create.py
@@ -0,0 +1,878 @@
+# engine/create.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import inspect
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Type
+from typing import Union
+
+from . import base
+from . import url as _url
+from .interfaces import DBAPIConnection
+from .mock import create_mock_engine
+from .. import event
+from .. import exc
+from .. import util
+from ..pool import _AdhocProxiedConnection
+from ..pool import ConnectionPoolEntry
+from ..sql import compiler
+from ..util import immutabledict
+
+if typing.TYPE_CHECKING:
+    from .base import Engine
+    from .interfaces import _ExecuteOptions
+    from .interfaces import _ParamStyle
+    from .interfaces import IsolationLevel
+    from .url import URL
+    from ..log import _EchoFlagType
+    from ..pool import _CreatorFnType
+    from ..pool import _CreatorWRecFnType
+    from ..pool import _ResetStyleArgType
+    from ..pool import Pool
+    from ..util.typing import Literal
+
+
+@overload
+def create_engine(
+    url: Union[str, URL],
+    *,
+    connect_args: Dict[Any, Any] = ...,
+    convert_unicode: bool = ...,
+    creator: Union[_CreatorFnType, _CreatorWRecFnType] = ...,
+    echo: _EchoFlagType = ...,
+    echo_pool: _EchoFlagType = ...,
+    enable_from_linting: bool = ...,
+    execution_options: _ExecuteOptions = ...,
+    future: Literal[True],
+    hide_parameters: bool = ...,
+    implicit_returning: Literal[True] = ...,
+    insertmanyvalues_page_size: int = ...,
+    isolation_level: IsolationLevel = ...,
+    json_deserializer: Callable[..., Any] = ...,
+    json_serializer: Callable[..., Any] = ...,
+    label_length: Optional[int] = ...,
+    logging_name: str = ...,
+    max_identifier_length: Optional[int] = ...,
+    max_overflow: int = ...,
+    module: Optional[Any] = ...,
+    paramstyle: Optional[_ParamStyle] = ...,
+    pool: Optional[Pool] = ...,
+    poolclass: Optional[Type[Pool]] = ...,
+    pool_logging_name: str = ...,
+    pool_pre_ping: bool = ...,
+    pool_size: int = ...,
+    pool_recycle: int = ...,
+    pool_reset_on_return: Optional[_ResetStyleArgType] = ...,
+    pool_timeout: float = ...,
+    pool_use_lifo: bool = ...,
+    plugins: List[str] = ...,
+    query_cache_size: int = ...,
+    use_insertmanyvalues: bool = ...,
+    **kwargs: Any,
+) -> Engine: ...
+
+
+@overload
+def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: ...
+
+
+@util.deprecated_params(
+    strategy=(
+        "1.4",
+        "The :paramref:`_sa.create_engine.strategy` keyword is deprecated, "
+        "and the only argument accepted is 'mock'; please use "
+        ":func:`.create_mock_engine` going forward.  For general "
+        "customization of create_engine which may have been accomplished "
+        "using strategies, see :class:`.CreateEnginePlugin`.",
+    ),
+    empty_in_strategy=(
+        "1.4",
+        "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
+        "deprecated, and no longer has any effect.  All IN expressions "
+        "are now rendered using "
+        'the "expanding parameter" strategy which renders a set of bound'
+        'expressions, or an "empty set" SELECT, at statement execution'
+        "time.",
+    ),
+    implicit_returning=(
+        "2.0",
+        "The :paramref:`_sa.create_engine.implicit_returning` parameter "
+        "is deprecated and will be removed in a future release. ",
+    ),
+)
+def create_engine(url: Union[str, _url.URL], **kwargs: Any) -> Engine:
+    """Create a new :class:`_engine.Engine` instance.
+
+    The standard calling form is to send the :ref:`URL <database_urls>` as the
+    first positional argument, usually a string
+    that indicates database dialect and connection arguments::
+
+        engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test")
+
+    .. note::
+
+        Please review :ref:`database_urls` for general guidelines in composing
+        URL strings.  In particular, special characters, such as those often
+        part of passwords, must be URL encoded to be properly parsed.
+
+    Additional keyword arguments may then follow it which
+    establish various options on the resulting :class:`_engine.Engine`
+    and its underlying :class:`.Dialect` and :class:`_pool.Pool`
+    constructs::
+
+        engine = create_engine(
+            "mysql+mysqldb://scott:tiger@hostname/dbname",
+            pool_recycle=3600,
+            echo=True,
+        )
+
+    The string form of the URL is
+    ``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
+    ``dialect`` is a database name such as ``mysql``, ``oracle``,
+    ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
+    ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc.  Alternatively,
+    the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
+
+    ``**kwargs`` takes a wide variety of options which are routed
+    towards their appropriate components.  Arguments may be specific to
+    the :class:`_engine.Engine`, the underlying :class:`.Dialect`,
+    as well as the
+    :class:`_pool.Pool`.  Specific dialects also accept keyword arguments that
+    are unique to that dialect.   Here, we describe the parameters
+    that are common to most :func:`_sa.create_engine()` usage.
+
+    Once established, the newly resulting :class:`_engine.Engine` will
+    request a connection from the underlying :class:`_pool.Pool` once
+    :meth:`_engine.Engine.connect` is called, or a method which depends on it
+    such as :meth:`_engine.Engine.execute` is invoked.   The
+    :class:`_pool.Pool` in turn
+    will establish the first actual DBAPI connection when this request
+    is received.   The :func:`_sa.create_engine` call itself does **not**
+    establish any actual DBAPI connections directly.
+
+    .. seealso::
+
+        :doc:`/core/engines`
+
+        :doc:`/dialects/index`
+
+        :ref:`connections_toplevel`
+
+    :param connect_args: a dictionary of options which will be
+        passed directly to the DBAPI's ``connect()`` method as
+        additional keyword arguments.  See the example
+        at :ref:`custom_dbapi_args`.
+
+    :param creator: a callable which returns a DBAPI connection.
+        This creation function will be passed to the underlying
+        connection pool and will be used to create all new database
+        connections. Usage of this function causes connection
+        parameters specified in the URL argument to be bypassed.
+
+        This hook is not as flexible as the newer
+        :meth:`_events.DialectEvents.do_connect` hook which allows complete
+        control over how a connection is made to the database, given the full
+        set of URL arguments and state beforehand.
+
+        .. seealso::
+
+            :meth:`_events.DialectEvents.do_connect` - event hook that allows
+            full control over DBAPI connection mechanics.
+
+            :ref:`custom_dbapi_args`
+
+    :param echo=False: if True, the Engine will log all statements
+        as well as a ``repr()`` of their parameter lists to the default log
+        handler, which defaults to ``sys.stdout`` for output.   If set to the
+        string ``"debug"``, result rows will be printed to the standard output
+        as well. The ``echo`` attribute of ``Engine`` can be modified at any
+        time to turn logging on and off; direct control of logging is also
+        available using the standard Python ``logging`` module.
+
+        .. seealso::
+
+            :ref:`dbengine_logging` - further detail on how to configure
+            logging.
+
+
+    :param echo_pool=False: if True, the connection pool will log
+        informational output such as when connections are invalidated
+        as well as when connections are recycled to the default log handler,
+        which defaults to ``sys.stdout`` for output.   If set to the string
+        ``"debug"``, the logging will include pool checkouts and checkins.
+        Direct control of logging is also available using the standard Python
+        ``logging`` module.
+
+        .. seealso::
+
+            :ref:`dbengine_logging` - further detail on how to configure
+            logging.
+
+
+    :param empty_in_strategy:   No longer used; SQLAlchemy now uses
+        "empty set" behavior for IN in all cases.
+
+    :param enable_from_linting: defaults to True.  Will emit a warning
+        if a given SELECT statement is found to have un-linked FROM elements
+        which would cause a cartesian product.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :ref:`change_4737`
+
+    :param execution_options: Dictionary execution options which will
+        be applied to all connections.  See
+        :meth:`~sqlalchemy.engine.Connection.execution_options`
+
+    :param future: Use the 2.0 style :class:`_engine.Engine` and
+        :class:`_engine.Connection` API.
+
+        As of SQLAlchemy 2.0, this parameter is present for backwards
+        compatibility only and must remain at its default value of ``True``.
+
+        The :paramref:`_sa.create_engine.future` parameter will be
+        deprecated in a subsequent 2.x release and eventually removed.
+
+        .. versionadded:: 1.4
+
+        .. versionchanged:: 2.0 All :class:`_engine.Engine` objects are
+           "future" style engines and there is no longer a ``future=False``
+           mode of operation.
+
+        .. seealso::
+
+            :ref:`migration_20_toplevel`
+
+    :param hide_parameters: Boolean, when set to True, SQL statement parameters
+        will not be displayed in INFO logging nor will they be formatted into
+        the string representation of :class:`.StatementError` objects.
+
+        .. versionadded:: 1.3.8
+
+        .. seealso::
+
+            :ref:`dbengine_logging` - further detail on how to configure
+            logging.
+
+    :param implicit_returning=True:  Legacy parameter that may only be set
+        to True. In SQLAlchemy 2.0, this parameter does nothing. In order to
+        disable "implicit returning" for statements invoked by the ORM,
+        configure this on a per-table basis using the
+        :paramref:`.Table.implicit_returning` parameter.
+
+
+    :param insertmanyvalues_page_size: number of rows to format into an
+     INSERT statement when the statement uses "insertmanyvalues" mode, which is
+     a paged form of bulk insert that is used for many backends when using
+     :term:`executemany` execution typically in conjunction with RETURNING.
+     Defaults to 1000, but may also be subject to dialect-specific limiting
+     factors which may override this value on a per-statement basis.
+
+     .. versionadded:: 2.0
+
+     .. seealso::
+
+        :ref:`engine_insertmanyvalues`
+
+        :ref:`engine_insertmanyvalues_page_size`
+
+        :paramref:`_engine.Connection.execution_options.insertmanyvalues_page_size`
+
+    :param isolation_level: optional string name of an isolation level
+        which will be set on all new connections unconditionally.
+        Isolation levels are typically some subset of the string names
+        ``"SERIALIZABLE"``, ``"REPEATABLE READ"``,
+        ``"READ COMMITTED"``, ``"READ UNCOMMITTED"`` and ``"AUTOCOMMIT"``
+        based on backend.
+
+        The :paramref:`_sa.create_engine.isolation_level` parameter is
+        in contrast to the
+        :paramref:`.Connection.execution_options.isolation_level`
+        execution option, which may be set on an individual
+        :class:`.Connection`, as well as the same parameter passed to
+        :meth:`.Engine.execution_options`, where it may be used to create
+        multiple engines with different isolation levels that share a common
+        connection pool and dialect.
+
+        .. versionchanged:: 2.0 The
+           :paramref:`_sa.create_engine.isolation_level`
+           parameter has been generalized to work on all dialects which support
+           the concept of isolation level, and is provided as a more succinct,
+           up front configuration switch in contrast to the execution option
+           which is more of an ad-hoc programmatic option.
+
+        .. seealso::
+
+            :ref:`dbapi_autocommit`
+
+    :param json_deserializer: for dialects that support the
+        :class:`_types.JSON`
+        datatype, this is a Python callable that will convert a JSON string
+        to a Python object.  By default, the Python ``json.loads`` function is
+        used.
+
+        .. versionchanged:: 1.3.7  The SQLite dialect renamed this from
+           ``_json_deserializer``.
+
+    :param json_serializer: for dialects that support the :class:`_types.JSON`
+        datatype, this is a Python callable that will render a given object
+        as JSON.   By default, the Python ``json.dumps`` function is used.
+
+        .. versionchanged:: 1.3.7  The SQLite dialect renamed this from
+           ``_json_serializer``.
+
+
+    :param label_length=None: optional integer value which limits
+        the size of dynamically generated column labels to that many
+        characters. If less than 6, labels are generated as
+        "_(counter)". If ``None``, the value of
+        ``dialect.max_identifier_length``, which may be affected via the
+        :paramref:`_sa.create_engine.max_identifier_length` parameter,
+        is used instead.   The value of
+        :paramref:`_sa.create_engine.label_length`
+        may not be larger than that of
+        :paramref:`_sa.create_engine.max_identfier_length`.
+
+        .. seealso::
+
+            :paramref:`_sa.create_engine.max_identifier_length`
+
+    :param logging_name:  String identifier which will be used within
+        the "name" field of logging records generated within the
+        "sqlalchemy.engine" logger. Defaults to a hexstring of the
+        object's id.
+
+        .. seealso::
+
+            :ref:`dbengine_logging` - further detail on how to configure
+            logging.
+
+            :paramref:`_engine.Connection.execution_options.logging_token`
+
+    :param max_identifier_length: integer; override the max_identifier_length
+        determined by the dialect.  if ``None`` or zero, has no effect.  This
+        is the database's configured maximum number of characters that may be
+        used in a SQL identifier such as a table name, column name, or label
+        name. All dialects determine this value automatically, however in the
+        case of a new database version for which this value has changed but
+        SQLAlchemy's dialect has not been adjusted, the value may be passed
+        here.
+
+        .. versionadded:: 1.3.9
+
+        .. seealso::
+
+            :paramref:`_sa.create_engine.label_length`
+
+    :param max_overflow=10: the number of connections to allow in
+        connection pool "overflow", that is connections that can be
+        opened above and beyond the pool_size setting, which defaults
+        to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
+
+    :param module=None: reference to a Python module object (the module
+        itself, not its string name).  Specifies an alternate DBAPI module to
+        be used by the engine's dialect.  Each sub-dialect references a
+        specific DBAPI which will be imported before first connect.  This
+        parameter causes the import to be bypassed, and the given module to
+        be used instead. Can be used for testing of DBAPIs as well as to
+        inject "mock" DBAPI implementations into the :class:`_engine.Engine`.
+
+    :param paramstyle=None: The `paramstyle <https://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
+        to use when rendering bound parameters.  This style defaults to the
+        one recommended by the DBAPI itself, which is retrieved from the
+        ``.paramstyle`` attribute of the DBAPI.  However, most DBAPIs accept
+        more than one paramstyle, and in particular it may be desirable
+        to change a "named" paramstyle into a "positional" one, or vice versa.
+        When this attribute is passed, it should be one of the values
+        ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
+        ``"pyformat"``, and should correspond to a parameter style known
+        to be supported by the DBAPI in use.
+
+    :param pool=None: an already-constructed instance of
+        :class:`~sqlalchemy.pool.Pool`, such as a
+        :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
+        pool will be used directly as the underlying connection pool
+        for the engine, bypassing whatever connection parameters are
+        present in the URL argument. For information on constructing
+        connection pools manually, see :ref:`pooling_toplevel`.
+
+    :param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
+        subclass, which will be used to create a connection pool
+        instance using the connection parameters given in the URL. Note
+        this differs from ``pool`` in that you don't actually
+        instantiate the pool in this case, you just indicate what type
+        of pool to be used.
+
+    :param pool_logging_name:  String identifier which will be used within
+       the "name" field of logging records generated within the
+       "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
+       id.
+
+       .. seealso::
+
+            :ref:`dbengine_logging` - further detail on how to configure
+            logging.
+
+    :param pool_pre_ping: boolean, if True will enable the connection pool
+        "pre-ping" feature that tests connections for liveness upon
+        each checkout.
+
+        .. versionadded:: 1.2
+
+        .. seealso::
+
+            :ref:`pool_disconnects_pessimistic`
+
+    :param pool_size=5: the number of connections to keep open
+        inside the connection pool. This used with
+        :class:`~sqlalchemy.pool.QueuePool` as
+        well as :class:`~sqlalchemy.pool.SingletonThreadPool`.  With
+        :class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
+        of 0 indicates no limit; to disable pooling, set ``poolclass`` to
+        :class:`~sqlalchemy.pool.NullPool` instead.
+
+    :param pool_recycle=-1: this setting causes the pool to recycle
+        connections after the given number of seconds has passed. It
+        defaults to -1, or no timeout. For example, setting to 3600
+        means connections will be recycled after one hour. Note that
+        MySQL in particular will disconnect automatically if no
+        activity is detected on a connection for eight hours (although
+        this is configurable with the MySQLDB connection itself and the
+        server configuration as well).
+
+        .. seealso::
+
+            :ref:`pool_setting_recycle`
+
+    :param pool_reset_on_return='rollback': set the
+        :paramref:`_pool.Pool.reset_on_return` parameter of the underlying
+        :class:`_pool.Pool` object, which can be set to the values
+        ``"rollback"``, ``"commit"``, or ``None``.
+
+        .. seealso::
+
+            :ref:`pool_reset_on_return`
+
+    :param pool_timeout=30: number of seconds to wait before giving
+        up on getting a connection from the pool. This is only used
+        with :class:`~sqlalchemy.pool.QueuePool`. This can be a float but is
+        subject to the limitations of Python time functions which may not be
+        reliable in the tens of milliseconds.
+
+        .. note: don't use 30.0 above, it seems to break with the :param tag
+
+    :param pool_use_lifo=False: use LIFO (last-in-first-out) when retrieving
+        connections from :class:`.QueuePool` instead of FIFO
+        (first-in-first-out). Using LIFO, a server-side timeout scheme can
+        reduce the number of connections used during non- peak   periods of
+        use.   When planning for server-side timeouts, ensure that a recycle or
+        pre-ping strategy is in use to gracefully   handle stale connections.
+
+          .. versionadded:: 1.3
+
+          .. seealso::
+
+            :ref:`pool_use_lifo`
+
+            :ref:`pool_disconnects`
+
+    :param plugins: string list of plugin names to load.  See
+        :class:`.CreateEnginePlugin` for background.
+
+        .. versionadded:: 1.2.3
+
+    :param query_cache_size: size of the cache used to cache the SQL string
+     form of queries.  Set to zero to disable caching.
+
+     The cache is pruned of its least recently used items when its size reaches
+     N * 1.5.  Defaults to 500, meaning the cache will always store at least
+     500 SQL statements when filled, and will grow up to 750 items at which
+     point it is pruned back down to 500 by removing the 250 least recently
+     used items.
+
+     Caching is accomplished on a per-statement basis by generating a
+     cache key that represents the statement's structure, then generating
+     string SQL for the current dialect only if that key is not present
+     in the cache.   All statements support caching, however some features
+     such as an INSERT with a large set of parameters will intentionally
+     bypass the cache.   SQL logging will indicate statistics for each
+     statement whether or not it were pull from the cache.
+
+     .. note:: some ORM functions related to unit-of-work persistence as well
+        as some attribute loading strategies will make use of individual
+        per-mapper caches outside of the main cache.
+
+
+     .. seealso::
+
+        :ref:`sql_caching`
+
+     .. versionadded:: 1.4
+
+    :param use_insertmanyvalues: True by default, use the "insertmanyvalues"
+     execution style for INSERT..RETURNING statements by default.
+
+     .. versionadded:: 2.0
+
+     .. seealso::
+
+        :ref:`engine_insertmanyvalues`
+
+    """  # noqa
+
+    if "strategy" in kwargs:
+        strat = kwargs.pop("strategy")
+        if strat == "mock":
+            # this case is deprecated
+            return create_mock_engine(url, **kwargs)  # type: ignore
+        else:
+            raise exc.ArgumentError("unknown strategy: %r" % strat)
+
+    kwargs.pop("empty_in_strategy", None)
+
+    # create url.URL object
+    u = _url.make_url(url)
+
+    u, plugins, kwargs = u._instantiate_plugins(kwargs)
+
+    entrypoint = u._get_entrypoint()
+    _is_async = kwargs.pop("_is_async", False)
+    if _is_async:
+        dialect_cls = entrypoint.get_async_dialect_cls(u)
+    else:
+        dialect_cls = entrypoint.get_dialect_cls(u)
+
+    if kwargs.pop("_coerce_config", False):
+
+        def pop_kwarg(key: str, default: Optional[Any] = None) -> Any:
+            value = kwargs.pop(key, default)
+            if key in dialect_cls.engine_config_types:
+                value = dialect_cls.engine_config_types[key](value)
+            return value
+
+    else:
+        pop_kwarg = kwargs.pop  # type: ignore
+
+    dialect_args = {}
+    # consume dialect arguments from kwargs
+    for k in util.get_cls_kwargs(dialect_cls):
+        if k in kwargs:
+            dialect_args[k] = pop_kwarg(k)
+
+    dbapi = kwargs.pop("module", None)
+    if dbapi is None:
+        dbapi_args = {}
+
+        if "import_dbapi" in dialect_cls.__dict__:
+            dbapi_meth = dialect_cls.import_dbapi
+
+        elif hasattr(dialect_cls, "dbapi") and inspect.ismethod(
+            dialect_cls.dbapi
+        ):
+            util.warn_deprecated(
+                "The dbapi() classmethod on dialect classes has been "
+                "renamed to import_dbapi().  Implement an import_dbapi() "
+                f"classmethod directly on class {dialect_cls} to remove this "
+                "warning; the old .dbapi() classmethod may be maintained for "
+                "backwards compatibility.",
+                "2.0",
+            )
+            dbapi_meth = dialect_cls.dbapi
+        else:
+            dbapi_meth = dialect_cls.import_dbapi
+
+        for k in util.get_func_kwargs(dbapi_meth):
+            if k in kwargs:
+                dbapi_args[k] = pop_kwarg(k)
+        dbapi = dbapi_meth(**dbapi_args)
+
+    dialect_args["dbapi"] = dbapi
+
+    dialect_args.setdefault("compiler_linting", compiler.NO_LINTING)
+    enable_from_linting = kwargs.pop("enable_from_linting", True)
+    if enable_from_linting:
+        dialect_args["compiler_linting"] ^= compiler.COLLECT_CARTESIAN_PRODUCTS
+
+    for plugin in plugins:
+        plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
+
+    # create dialect
+    dialect = dialect_cls(**dialect_args)
+
+    # assemble connection arguments
+    (cargs_tup, cparams) = dialect.create_connect_args(u)
+    cparams.update(pop_kwarg("connect_args", {}))
+
+    if "async_fallback" in cparams and util.asbool(cparams["async_fallback"]):
+        util.warn_deprecated(
+            "The async_fallback dialect argument is deprecated and will be "
+            "removed in SQLAlchemy 2.1.",
+            "2.0",
+        )
+
+    cargs = list(cargs_tup)  # allow mutability
+
+    # look for existing pool or create
+    pool = pop_kwarg("pool", None)
+    if pool is None:
+
+        def connect(
+            connection_record: Optional[ConnectionPoolEntry] = None,
+        ) -> DBAPIConnection:
+            if dialect._has_events:
+                for fn in dialect.dispatch.do_connect:
+                    connection = cast(
+                        DBAPIConnection,
+                        fn(dialect, connection_record, cargs, cparams),
+                    )
+                    if connection is not None:
+                        return connection
+
+            return dialect.connect(*cargs, **cparams)
+
+        creator = pop_kwarg("creator", connect)
+
+        poolclass = pop_kwarg("poolclass", None)
+        if poolclass is None:
+            poolclass = dialect.get_dialect_pool_class(u)
+        pool_args = {"dialect": dialect}
+
+        # consume pool arguments from kwargs, translating a few of
+        # the arguments
+        for k in util.get_cls_kwargs(poolclass):
+            tk = _pool_translate_kwargs.get(k, k)
+            if tk in kwargs:
+                pool_args[k] = pop_kwarg(tk)
+
+        for plugin in plugins:
+            plugin.handle_pool_kwargs(poolclass, pool_args)
+
+        pool = poolclass(creator, **pool_args)
+    else:
+        pool._dialect = dialect
+
+    if (
+        hasattr(pool, "_is_asyncio")
+        and pool._is_asyncio is not dialect.is_async
+    ):
+        raise exc.ArgumentError(
+            f"Pool class {pool.__class__.__name__} cannot be "
+            f"used with {'non-' if not dialect.is_async else ''}"
+            "asyncio engine",
+            code="pcls",
+        )
+
+    # create engine.
+    if not pop_kwarg("future", True):
+        raise exc.ArgumentError(
+            "The 'future' parameter passed to "
+            "create_engine() may only be set to True."
+        )
+
+    engineclass = base.Engine
+
+    engine_args = {}
+    for k in util.get_cls_kwargs(engineclass):
+        if k in kwargs:
+            engine_args[k] = pop_kwarg(k)
+
+    # internal flags used by the test suite for instrumenting / proxying
+    # engines with mocks etc.
+    _initialize = kwargs.pop("_initialize", True)
+
+    # all kwargs should be consumed
+    if kwargs:
+        raise TypeError(
+            "Invalid argument(s) %s sent to create_engine(), "
+            "using configuration %s/%s/%s.  Please check that the "
+            "keyword arguments are appropriate for this combination "
+            "of components."
+            % (
+                ",".join("'%s'" % k for k in kwargs),
+                dialect.__class__.__name__,
+                pool.__class__.__name__,
+                engineclass.__name__,
+            )
+        )
+
+    engine = engineclass(pool, dialect, u, **engine_args)
+
+    if _initialize:
+        do_on_connect = dialect.on_connect_url(u)
+        if do_on_connect:
+
+            def on_connect(
+                dbapi_connection: DBAPIConnection,
+                connection_record: ConnectionPoolEntry,
+            ) -> None:
+                assert do_on_connect is not None
+                do_on_connect(dbapi_connection)
+
+            event.listen(pool, "connect", on_connect)
+
+        builtin_on_connect = dialect._builtin_onconnect()
+        if builtin_on_connect:
+            event.listen(pool, "connect", builtin_on_connect)
+
+        def first_connect(
+            dbapi_connection: DBAPIConnection,
+            connection_record: ConnectionPoolEntry,
+        ) -> None:
+            c = base.Connection(
+                engine,
+                connection=_AdhocProxiedConnection(
+                    dbapi_connection, connection_record
+                ),
+                _has_events=False,
+                # reconnecting will be a reentrant condition, so if the
+                # connection goes away, Connection is then closed
+                _allow_revalidate=False,
+                # dont trigger the autobegin sequence
+                # within the up front dialect checks
+                _allow_autobegin=False,
+            )
+            c._execution_options = util.EMPTY_DICT
+
+            try:
+                dialect.initialize(c)
+            finally:
+                # note that "invalidated" and "closed" are mutually
+                # exclusive in 1.4 Connection.
+                if not c.invalidated and not c.closed:
+                    # transaction is rolled back otherwise, tested by
+                    # test/dialect/postgresql/test_dialect.py
+                    # ::MiscBackendTest::test_initial_transaction_state
+                    dialect.do_rollback(c.connection)
+
+        # previously, the "first_connect" event was used here, which was then
+        # scaled back if the "on_connect" handler were present.  now,
+        # since "on_connect" is virtually always present, just use
+        # "connect" event with once_unless_exception in all cases so that
+        # the connection event flow is consistent in all cases.
+        event.listen(
+            pool, "connect", first_connect, _once_unless_exception=True
+        )
+
+    dialect_cls.engine_created(engine)
+    if entrypoint is not dialect_cls:
+        entrypoint.engine_created(engine)
+
+    for plugin in plugins:
+        plugin.engine_created(engine)
+
+    return engine
+
+
+def engine_from_config(
+    configuration: Dict[str, Any], prefix: str = "sqlalchemy.", **kwargs: Any
+) -> Engine:
+    """Create a new Engine instance using a configuration dictionary.
+
+    The dictionary is typically produced from a config file.
+
+    The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
+    ``sqlalchemy.url``, ``sqlalchemy.echo``, etc.  The 'prefix' argument
+    indicates the prefix to be searched for.  Each matching key (after the
+    prefix is stripped) is treated as though it were the corresponding keyword
+    argument to a :func:`_sa.create_engine` call.
+
+    The only required key is (assuming the default prefix) ``sqlalchemy.url``,
+    which provides the :ref:`database URL <database_urls>`.
+
+    A select set of keyword arguments will be "coerced" to their
+    expected type based on string values.    The set of arguments
+    is extensible per-dialect using the ``engine_config_types`` accessor.
+
+    :param configuration: A dictionary (typically produced from a config file,
+        but this is not a requirement).  Items whose keys start with the value
+        of 'prefix' will have that prefix stripped, and will then be passed to
+        :func:`_sa.create_engine`.
+
+    :param prefix: Prefix to match and then strip from keys
+        in 'configuration'.
+
+    :param kwargs: Each keyword argument to ``engine_from_config()`` itself
+        overrides the corresponding item taken from the 'configuration'
+        dictionary.  Keyword arguments should *not* be prefixed.
+
+    """
+
+    options = {
+        key[len(prefix) :]: configuration[key]
+        for key in configuration
+        if key.startswith(prefix)
+    }
+    options["_coerce_config"] = True
+    options.update(kwargs)
+    url = options.pop("url")
+    return create_engine(url, **options)
+
+
+@overload
+def create_pool_from_url(
+    url: Union[str, URL],
+    *,
+    poolclass: Optional[Type[Pool]] = ...,
+    logging_name: str = ...,
+    pre_ping: bool = ...,
+    size: int = ...,
+    recycle: int = ...,
+    reset_on_return: Optional[_ResetStyleArgType] = ...,
+    timeout: float = ...,
+    use_lifo: bool = ...,
+    **kwargs: Any,
+) -> Pool: ...
+
+
+@overload
+def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: ...
+
+
+def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool:
+    """Create a pool instance from the given url.
+
+    If ``poolclass`` is not provided the pool class used
+    is selected using the dialect specified in the URL.
+
+    The arguments passed to :func:`_sa.create_pool_from_url` are
+    identical to the pool argument passed to the :func:`_sa.create_engine`
+    function.
+
+    .. versionadded:: 2.0.10
+    """
+
+    for key in _pool_translate_kwargs:
+        if key in kwargs:
+            kwargs[_pool_translate_kwargs[key]] = kwargs.pop(key)
+
+    engine = create_engine(url, **kwargs, _initialize=False)
+    return engine.pool
+
+
+_pool_translate_kwargs = immutabledict(
+    {
+        "logging_name": "pool_logging_name",
+        "echo": "echo_pool",
+        "timeout": "pool_timeout",
+        "recycle": "pool_recycle",
+        "events": "pool_events",  # deprecated
+        "reset_on_return": "pool_reset_on_return",
+        "pre_ping": "pool_pre_ping",
+        "use_lifo": "pool_use_lifo",
+    }
+)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py
new file mode 100644
index 00000000..ff14ad8e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/cursor.py
@@ -0,0 +1,2176 @@
+# engine/cursor.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Define cursor-specific result set constructs including
+:class:`.CursorResult`."""
+
+
+from __future__ import annotations
+
+import collections
+import functools
+import operator
+import typing
+from typing import Any
+from typing import cast
+from typing import ClassVar
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .result import IteratorResult
+from .result import MergedResult
+from .result import Result
+from .result import ResultMetaData
+from .result import SimpleResultMetaData
+from .result import tuplegetter
+from .row import Row
+from .. import exc
+from .. import util
+from ..sql import elements
+from ..sql import sqltypes
+from ..sql import util as sql_util
+from ..sql.base import _generative
+from ..sql.compiler import ResultColumnsEntry
+from ..sql.compiler import RM_NAME
+from ..sql.compiler import RM_OBJECTS
+from ..sql.compiler import RM_RENDERED_NAME
+from ..sql.compiler import RM_TYPE
+from ..sql.type_api import TypeEngine
+from ..util import compat
+from ..util.typing import Literal
+from ..util.typing import Self
+
+
+if typing.TYPE_CHECKING:
+    from .base import Connection
+    from .default import DefaultExecutionContext
+    from .interfaces import _DBAPICursorDescription
+    from .interfaces import DBAPICursor
+    from .interfaces import Dialect
+    from .interfaces import ExecutionContext
+    from .result import _KeyIndexType
+    from .result import _KeyMapRecType
+    from .result import _KeyMapType
+    from .result import _KeyType
+    from .result import _ProcessorsType
+    from .result import _TupleGetterType
+    from ..sql.type_api import _ResultProcessorType
+
+
+_T = TypeVar("_T", bound=Any)
+
+
+# metadata entry tuple indexes.
+# using raw tuple is faster than namedtuple.
+# these match up to the positions in
+# _CursorKeyMapRecType
+MD_INDEX: Literal[0] = 0
+"""integer index in cursor.description
+
+"""
+
+MD_RESULT_MAP_INDEX: Literal[1] = 1
+"""integer index in compiled._result_columns"""
+
+MD_OBJECTS: Literal[2] = 2
+"""other string keys and ColumnElement obj that can match.
+
+This comes from compiler.RM_OBJECTS / compiler.ResultColumnsEntry.objects
+
+"""
+
+MD_LOOKUP_KEY: Literal[3] = 3
+"""string key we usually expect for key-based lookup
+
+this comes from compiler.RM_NAME / compiler.ResultColumnsEntry.name
+"""
+
+
+MD_RENDERED_NAME: Literal[4] = 4
+"""name that is usually in cursor.description
+
+this comes from compiler.RENDERED_NAME / compiler.ResultColumnsEntry.keyname
+"""
+
+
+MD_PROCESSOR: Literal[5] = 5
+"""callable to process a result value into a row"""
+
+MD_UNTRANSLATED: Literal[6] = 6
+"""raw name from cursor.description"""
+
+
+_CursorKeyMapRecType = Tuple[
+    Optional[int],  # MD_INDEX, None means the record is ambiguously named
+    int,  # MD_RESULT_MAP_INDEX
+    List[Any],  # MD_OBJECTS
+    str,  # MD_LOOKUP_KEY
+    str,  # MD_RENDERED_NAME
+    Optional["_ResultProcessorType[Any]"],  # MD_PROCESSOR
+    Optional[str],  # MD_UNTRANSLATED
+]
+
+_CursorKeyMapType = Mapping["_KeyType", _CursorKeyMapRecType]
+
+# same as _CursorKeyMapRecType except the MD_INDEX value is definitely
+# not None
+_NonAmbigCursorKeyMapRecType = Tuple[
+    int,
+    int,
+    List[Any],
+    str,
+    str,
+    Optional["_ResultProcessorType[Any]"],
+    str,
+]
+
+
+class CursorResultMetaData(ResultMetaData):
+    """Result metadata for DBAPI cursors."""
+
+    __slots__ = (
+        "_keymap",
+        "_processors",
+        "_keys",
+        "_keymap_by_result_column_idx",
+        "_tuplefilter",
+        "_translated_indexes",
+        "_safe_for_cache",
+        "_unpickled",
+        "_key_to_index",
+        # don't need _unique_filters support here for now.  Can be added
+        # if a need arises.
+    )
+
+    _keymap: _CursorKeyMapType
+    _processors: _ProcessorsType
+    _keymap_by_result_column_idx: Optional[Dict[int, _KeyMapRecType]]
+    _unpickled: bool
+    _safe_for_cache: bool
+    _translated_indexes: Optional[List[int]]
+
+    returns_rows: ClassVar[bool] = True
+
+    def _has_key(self, key: Any) -> bool:
+        return key in self._keymap
+
+    def _for_freeze(self) -> ResultMetaData:
+        return SimpleResultMetaData(
+            self._keys,
+            extra=[self._keymap[key][MD_OBJECTS] for key in self._keys],
+        )
+
+    def _make_new_metadata(
+        self,
+        *,
+        unpickled: bool,
+        processors: _ProcessorsType,
+        keys: Sequence[str],
+        keymap: _KeyMapType,
+        tuplefilter: Optional[_TupleGetterType],
+        translated_indexes: Optional[List[int]],
+        safe_for_cache: bool,
+        keymap_by_result_column_idx: Any,
+    ) -> CursorResultMetaData:
+        new_obj = self.__class__.__new__(self.__class__)
+        new_obj._unpickled = unpickled
+        new_obj._processors = processors
+        new_obj._keys = keys
+        new_obj._keymap = keymap
+        new_obj._tuplefilter = tuplefilter
+        new_obj._translated_indexes = translated_indexes
+        new_obj._safe_for_cache = safe_for_cache
+        new_obj._keymap_by_result_column_idx = keymap_by_result_column_idx
+        new_obj._key_to_index = self._make_key_to_index(keymap, MD_INDEX)
+        return new_obj
+
+    def _remove_processors(self) -> CursorResultMetaData:
+        assert not self._tuplefilter
+        return self._make_new_metadata(
+            unpickled=self._unpickled,
+            processors=[None] * len(self._processors),
+            tuplefilter=None,
+            translated_indexes=None,
+            keymap={
+                key: value[0:5] + (None,) + value[6:]
+                for key, value in self._keymap.items()
+            },
+            keys=self._keys,
+            safe_for_cache=self._safe_for_cache,
+            keymap_by_result_column_idx=self._keymap_by_result_column_idx,
+        )
+
+    def _splice_horizontally(
+        self, other: CursorResultMetaData
+    ) -> CursorResultMetaData:
+        assert not self._tuplefilter
+
+        keymap = dict(self._keymap)
+        offset = len(self._keys)
+        keymap.update(
+            {
+                key: (
+                    # int index should be None for ambiguous key
+                    (
+                        value[0] + offset
+                        if value[0] is not None and key not in keymap
+                        else None
+                    ),
+                    value[1] + offset,
+                    *value[2:],
+                )
+                for key, value in other._keymap.items()
+            }
+        )
+        return self._make_new_metadata(
+            unpickled=self._unpickled,
+            processors=self._processors + other._processors,  # type: ignore
+            tuplefilter=None,
+            translated_indexes=None,
+            keys=self._keys + other._keys,  # type: ignore
+            keymap=keymap,
+            safe_for_cache=self._safe_for_cache,
+            keymap_by_result_column_idx={
+                metadata_entry[MD_RESULT_MAP_INDEX]: metadata_entry
+                for metadata_entry in keymap.values()
+            },
+        )
+
+    def _reduce(self, keys: Sequence[_KeyIndexType]) -> ResultMetaData:
+        recs = list(self._metadata_for_keys(keys))
+
+        indexes = [rec[MD_INDEX] for rec in recs]
+        new_keys: List[str] = [rec[MD_LOOKUP_KEY] for rec in recs]
+
+        if self._translated_indexes:
+            indexes = [self._translated_indexes[idx] for idx in indexes]
+        tup = tuplegetter(*indexes)
+        new_recs = [(index,) + rec[1:] for index, rec in enumerate(recs)]
+
+        keymap = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
+        # TODO: need unit test for:
+        # result = connection.execute("raw sql, no columns").scalars()
+        # without the "or ()" it's failing because MD_OBJECTS is None
+        keymap.update(
+            (e, new_rec)
+            for new_rec in new_recs
+            for e in new_rec[MD_OBJECTS] or ()
+        )
+
+        return self._make_new_metadata(
+            unpickled=self._unpickled,
+            processors=self._processors,
+            keys=new_keys,
+            tuplefilter=tup,
+            translated_indexes=indexes,
+            keymap=keymap,  # type: ignore[arg-type]
+            safe_for_cache=self._safe_for_cache,
+            keymap_by_result_column_idx=self._keymap_by_result_column_idx,
+        )
+
+    def _adapt_to_context(self, context: ExecutionContext) -> ResultMetaData:
+        """When using a cached Compiled construct that has a _result_map,
+        for a new statement that used the cached Compiled, we need to ensure
+        the keymap has the Column objects from our new statement as keys.
+        So here we rewrite keymap with new entries for the new columns
+        as matched to those of the cached statement.
+
+        """
+
+        if not context.compiled or not context.compiled._result_columns:
+            return self
+
+        compiled_statement = context.compiled.statement
+        invoked_statement = context.invoked_statement
+
+        if TYPE_CHECKING:
+            assert isinstance(invoked_statement, elements.ClauseElement)
+
+        if compiled_statement is invoked_statement:
+            return self
+
+        assert invoked_statement is not None
+
+        # this is the most common path for Core statements when
+        # caching is used.  In ORM use, this codepath is not really used
+        # as the _result_disable_adapt_to_context execution option is
+        # set by the ORM.
+
+        # make a copy and add the columns from the invoked statement
+        # to the result map.
+
+        keymap_by_position = self._keymap_by_result_column_idx
+
+        if keymap_by_position is None:
+            # first retrival from cache, this map will not be set up yet,
+            # initialize lazily
+            keymap_by_position = self._keymap_by_result_column_idx = {
+                metadata_entry[MD_RESULT_MAP_INDEX]: metadata_entry
+                for metadata_entry in self._keymap.values()
+            }
+
+        assert not self._tuplefilter
+        return self._make_new_metadata(
+            keymap=compat.dict_union(
+                self._keymap,
+                {
+                    new: keymap_by_position[idx]
+                    for idx, new in enumerate(
+                        invoked_statement._all_selected_columns
+                    )
+                    if idx in keymap_by_position
+                },
+            ),
+            unpickled=self._unpickled,
+            processors=self._processors,
+            tuplefilter=None,
+            translated_indexes=None,
+            keys=self._keys,
+            safe_for_cache=self._safe_for_cache,
+            keymap_by_result_column_idx=self._keymap_by_result_column_idx,
+        )
+
+    def __init__(
+        self,
+        parent: CursorResult[Any],
+        cursor_description: _DBAPICursorDescription,
+    ):
+        context = parent.context
+        self._tuplefilter = None
+        self._translated_indexes = None
+        self._safe_for_cache = self._unpickled = False
+
+        if context.result_column_struct:
+            (
+                result_columns,
+                cols_are_ordered,
+                textual_ordered,
+                ad_hoc_textual,
+                loose_column_name_matching,
+            ) = context.result_column_struct
+            num_ctx_cols = len(result_columns)
+        else:
+            result_columns = cols_are_ordered = (  # type: ignore
+                num_ctx_cols
+            ) = ad_hoc_textual = loose_column_name_matching = (
+                textual_ordered
+            ) = False
+
+        # merge cursor.description with the column info
+        # present in the compiled structure, if any
+        raw = self._merge_cursor_description(
+            context,
+            cursor_description,
+            result_columns,
+            num_ctx_cols,
+            cols_are_ordered,
+            textual_ordered,
+            ad_hoc_textual,
+            loose_column_name_matching,
+        )
+
+        # processors in key order which are used when building up
+        # a row
+        self._processors = [
+            metadata_entry[MD_PROCESSOR] for metadata_entry in raw
+        ]
+
+        # this is used when using this ResultMetaData in a Core-only cache
+        # retrieval context.  it's initialized on first cache retrieval
+        # when the _result_disable_adapt_to_context execution option
+        # (which the ORM generally sets) is not set.
+        self._keymap_by_result_column_idx = None
+
+        # for compiled SQL constructs, copy additional lookup keys into
+        # the key lookup map, such as Column objects, labels,
+        # column keys and other names
+        if num_ctx_cols:
+            # keymap by primary string...
+            by_key = {
+                metadata_entry[MD_LOOKUP_KEY]: metadata_entry
+                for metadata_entry in raw
+            }
+
+            if len(by_key) != num_ctx_cols:
+                # if by-primary-string dictionary smaller than
+                # number of columns, assume we have dupes; (this check
+                # is also in place if string dictionary is bigger, as
+                # can occur when '*' was used as one of the compiled columns,
+                # which may or may not be suggestive of dupes), rewrite
+                # dupe records with "None" for index which results in
+                # ambiguous column exception when accessed.
+                #
+                # this is considered to be the less common case as it is not
+                # common to have dupe column keys in a SELECT statement.
+                #
+                # new in 1.4: get the complete set of all possible keys,
+                # strings, objects, whatever, that are dupes across two
+                # different records, first.
+                index_by_key: Dict[Any, Any] = {}
+                dupes = set()
+                for metadata_entry in raw:
+                    for key in (metadata_entry[MD_RENDERED_NAME],) + (
+                        metadata_entry[MD_OBJECTS] or ()
+                    ):
+                        idx = metadata_entry[MD_INDEX]
+                        # if this key has been associated with more than one
+                        # positional index, it's a dupe
+                        if index_by_key.setdefault(key, idx) != idx:
+                            dupes.add(key)
+
+                # then put everything we have into the keymap excluding only
+                # those keys that are dupes.
+                self._keymap = {
+                    obj_elem: metadata_entry
+                    for metadata_entry in raw
+                    if metadata_entry[MD_OBJECTS]
+                    for obj_elem in metadata_entry[MD_OBJECTS]
+                    if obj_elem not in dupes
+                }
+
+                # then for the dupe keys, put the "ambiguous column"
+                # record into by_key.
+                by_key.update(
+                    {
+                        key: (None, None, [], key, key, None, None)
+                        for key in dupes
+                    }
+                )
+
+            else:
+                # no dupes - copy secondary elements from compiled
+                # columns into self._keymap.  this is the most common
+                # codepath for Core / ORM statement executions before the
+                # result metadata is cached
+                self._keymap = {
+                    obj_elem: metadata_entry
+                    for metadata_entry in raw
+                    if metadata_entry[MD_OBJECTS]
+                    for obj_elem in metadata_entry[MD_OBJECTS]
+                }
+            # update keymap with primary string names taking
+            # precedence
+            self._keymap.update(by_key)
+        else:
+            # no compiled objects to map, just create keymap by primary string
+            self._keymap = {
+                metadata_entry[MD_LOOKUP_KEY]: metadata_entry
+                for metadata_entry in raw
+            }
+
+        # update keymap with "translated" names.  In SQLAlchemy this is a
+        # sqlite only thing, and in fact impacting only extremely old SQLite
+        # versions unlikely to be present in modern Python versions.
+        # however, the pyhive third party dialect is
+        # also using this hook, which means others still might use it as well.
+        # I dislike having this awkward hook here but as long as we need
+        # to use names in cursor.description in some cases we need to have
+        # some hook to accomplish this.
+        if not num_ctx_cols and context._translate_colname:
+            self._keymap.update(
+                {
+                    metadata_entry[MD_UNTRANSLATED]: self._keymap[
+                        metadata_entry[MD_LOOKUP_KEY]
+                    ]
+                    for metadata_entry in raw
+                    if metadata_entry[MD_UNTRANSLATED]
+                }
+            )
+
+        self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
+
+    def _merge_cursor_description(
+        self,
+        context,
+        cursor_description,
+        result_columns,
+        num_ctx_cols,
+        cols_are_ordered,
+        textual_ordered,
+        ad_hoc_textual,
+        loose_column_name_matching,
+    ):
+        """Merge a cursor.description with compiled result column information.
+
+        There are at least four separate strategies used here, selected
+        depending on the type of SQL construct used to start with.
+
+        The most common case is that of the compiled SQL expression construct,
+        which generated the column names present in the raw SQL string and
+        which has the identical number of columns as were reported by
+        cursor.description.  In this case, we assume a 1-1 positional mapping
+        between the entries in cursor.description and the compiled object.
+        This is also the most performant case as we disregard extracting /
+        decoding the column names present in cursor.description since we
+        already have the desired name we generated in the compiled SQL
+        construct.
+
+        The next common case is that of the completely raw string SQL,
+        such as passed to connection.execute().  In this case we have no
+        compiled construct to work with, so we extract and decode the
+        names from cursor.description and index those as the primary
+        result row target keys.
+
+        The remaining fairly common case is that of the textual SQL
+        that includes at least partial column information; this is when
+        we use a :class:`_expression.TextualSelect` construct.
+        This construct may have
+        unordered or ordered column information.  In the ordered case, we
+        merge the cursor.description and the compiled construct's information
+        positionally, and warn if there are additional description names
+        present, however we still decode the names in cursor.description
+        as we don't have a guarantee that the names in the columns match
+        on these.   In the unordered case, we match names in cursor.description
+        to that of the compiled construct based on name matching.
+        In both of these cases, the cursor.description names and the column
+        expression objects and names are indexed as result row target keys.
+
+        The final case is much less common, where we have a compiled
+        non-textual SQL expression construct, but the number of columns
+        in cursor.description doesn't match what's in the compiled
+        construct.  We make the guess here that there might be textual
+        column expressions in the compiled construct that themselves include
+        a comma in them causing them to split.  We do the same name-matching
+        as with textual non-ordered columns.
+
+        The name-matched system of merging is the same as that used by
+        SQLAlchemy for all cases up through the 0.9 series.   Positional
+        matching for compiled SQL expressions was introduced in 1.0 as a
+        major performance feature, and positional matching for textual
+        :class:`_expression.TextualSelect` objects in 1.1.
+        As name matching is no longer
+        a common case, it was acceptable to factor it into smaller generator-
+        oriented methods that are easier to understand, but incur slightly
+        more performance overhead.
+
+        """
+
+        if (
+            num_ctx_cols
+            and cols_are_ordered
+            and not textual_ordered
+            and num_ctx_cols == len(cursor_description)
+        ):
+            self._keys = [elem[0] for elem in result_columns]
+            # pure positional 1-1 case; doesn't need to read
+            # the names from cursor.description
+
+            # most common case for Core and ORM
+
+            # this metadata is safe to cache because we are guaranteed
+            # to have the columns in the same order for new executions
+            self._safe_for_cache = True
+            return [
+                (
+                    idx,
+                    idx,
+                    rmap_entry[RM_OBJECTS],
+                    rmap_entry[RM_NAME],
+                    rmap_entry[RM_RENDERED_NAME],
+                    context.get_result_processor(
+                        rmap_entry[RM_TYPE],
+                        rmap_entry[RM_RENDERED_NAME],
+                        cursor_description[idx][1],
+                    ),
+                    None,
+                )
+                for idx, rmap_entry in enumerate(result_columns)
+            ]
+        else:
+            # name-based or text-positional cases, where we need
+            # to read cursor.description names
+
+            if textual_ordered or (
+                ad_hoc_textual and len(cursor_description) == num_ctx_cols
+            ):
+                self._safe_for_cache = True
+                # textual positional case
+                raw_iterator = self._merge_textual_cols_by_position(
+                    context, cursor_description, result_columns
+                )
+            elif num_ctx_cols:
+                # compiled SQL with a mismatch of description cols
+                # vs. compiled cols, or textual w/ unordered columns
+                # the order of columns can change if the query is
+                # against a "select *", so not safe to cache
+                self._safe_for_cache = False
+                raw_iterator = self._merge_cols_by_name(
+                    context,
+                    cursor_description,
+                    result_columns,
+                    loose_column_name_matching,
+                )
+            else:
+                # no compiled SQL, just a raw string, order of columns
+                # can change for "select *"
+                self._safe_for_cache = False
+                raw_iterator = self._merge_cols_by_none(
+                    context, cursor_description
+                )
+
+            return [
+                (
+                    idx,
+                    ridx,
+                    obj,
+                    cursor_colname,
+                    cursor_colname,
+                    context.get_result_processor(
+                        mapped_type, cursor_colname, coltype
+                    ),
+                    untranslated,
+                )
+                for (
+                    idx,
+                    ridx,
+                    cursor_colname,
+                    mapped_type,
+                    coltype,
+                    obj,
+                    untranslated,
+                ) in raw_iterator
+            ]
+
+    def _colnames_from_description(self, context, cursor_description):
+        """Extract column names and data types from a cursor.description.
+
+        Applies unicode decoding, column translation, "normalization",
+        and case sensitivity rules to the names based on the dialect.
+
+        """
+
+        dialect = context.dialect
+        translate_colname = context._translate_colname
+        normalize_name = (
+            dialect.normalize_name if dialect.requires_name_normalize else None
+        )
+        untranslated = None
+
+        self._keys = []
+
+        for idx, rec in enumerate(cursor_description):
+            colname = rec[0]
+            coltype = rec[1]
+
+            if translate_colname:
+                colname, untranslated = translate_colname(colname)
+
+            if normalize_name:
+                colname = normalize_name(colname)
+
+            self._keys.append(colname)
+
+            yield idx, colname, untranslated, coltype
+
+    def _merge_textual_cols_by_position(
+        self, context, cursor_description, result_columns
+    ):
+        num_ctx_cols = len(result_columns)
+
+        if num_ctx_cols > len(cursor_description):
+            util.warn(
+                "Number of columns in textual SQL (%d) is "
+                "smaller than number of columns requested (%d)"
+                % (num_ctx_cols, len(cursor_description))
+            )
+        seen = set()
+
+        for (
+            idx,
+            colname,
+            untranslated,
+            coltype,
+        ) in self._colnames_from_description(context, cursor_description):
+            if idx < num_ctx_cols:
+                ctx_rec = result_columns[idx]
+                obj = ctx_rec[RM_OBJECTS]
+                ridx = idx
+                mapped_type = ctx_rec[RM_TYPE]
+                if obj[0] in seen:
+                    raise exc.InvalidRequestError(
+                        "Duplicate column expression requested "
+                        "in textual SQL: %r" % obj[0]
+                    )
+                seen.add(obj[0])
+            else:
+                mapped_type = sqltypes.NULLTYPE
+                obj = None
+                ridx = None
+            yield idx, ridx, colname, mapped_type, coltype, obj, untranslated
+
+    def _merge_cols_by_name(
+        self,
+        context,
+        cursor_description,
+        result_columns,
+        loose_column_name_matching,
+    ):
+        match_map = self._create_description_match_map(
+            result_columns, loose_column_name_matching
+        )
+        mapped_type: TypeEngine[Any]
+
+        for (
+            idx,
+            colname,
+            untranslated,
+            coltype,
+        ) in self._colnames_from_description(context, cursor_description):
+            try:
+                ctx_rec = match_map[colname]
+            except KeyError:
+                mapped_type = sqltypes.NULLTYPE
+                obj = None
+                result_columns_idx = None
+            else:
+                obj = ctx_rec[1]
+                mapped_type = ctx_rec[2]
+                result_columns_idx = ctx_rec[3]
+            yield (
+                idx,
+                result_columns_idx,
+                colname,
+                mapped_type,
+                coltype,
+                obj,
+                untranslated,
+            )
+
+    @classmethod
+    def _create_description_match_map(
+        cls,
+        result_columns: List[ResultColumnsEntry],
+        loose_column_name_matching: bool = False,
+    ) -> Dict[
+        Union[str, object], Tuple[str, Tuple[Any, ...], TypeEngine[Any], int]
+    ]:
+        """when matching cursor.description to a set of names that are present
+        in a Compiled object, as is the case with TextualSelect, get all the
+        names we expect might match those in cursor.description.
+        """
+
+        d: Dict[
+            Union[str, object],
+            Tuple[str, Tuple[Any, ...], TypeEngine[Any], int],
+        ] = {}
+        for ridx, elem in enumerate(result_columns):
+            key = elem[RM_RENDERED_NAME]
+            if key in d:
+                # conflicting keyname - just add the column-linked objects
+                # to the existing record.  if there is a duplicate column
+                # name in the cursor description, this will allow all of those
+                # objects to raise an ambiguous column error
+                e_name, e_obj, e_type, e_ridx = d[key]
+                d[key] = e_name, e_obj + elem[RM_OBJECTS], e_type, ridx
+            else:
+                d[key] = (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE], ridx)
+
+            if loose_column_name_matching:
+                # when using a textual statement with an unordered set
+                # of columns that line up, we are expecting the user
+                # to be using label names in the SQL that match to the column
+                # expressions.  Enable more liberal matching for this case;
+                # duplicate keys that are ambiguous will be fixed later.
+                for r_key in elem[RM_OBJECTS]:
+                    d.setdefault(
+                        r_key,
+                        (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE], ridx),
+                    )
+        return d
+
+    def _merge_cols_by_none(self, context, cursor_description):
+        for (
+            idx,
+            colname,
+            untranslated,
+            coltype,
+        ) in self._colnames_from_description(context, cursor_description):
+            yield (
+                idx,
+                None,
+                colname,
+                sqltypes.NULLTYPE,
+                coltype,
+                None,
+                untranslated,
+            )
+
+    if not TYPE_CHECKING:
+
+        def _key_fallback(
+            self, key: Any, err: Optional[Exception], raiseerr: bool = True
+        ) -> Optional[NoReturn]:
+            if raiseerr:
+                if self._unpickled and isinstance(key, elements.ColumnElement):
+                    raise exc.NoSuchColumnError(
+                        "Row was unpickled; lookup by ColumnElement "
+                        "is unsupported"
+                    ) from err
+                else:
+                    raise exc.NoSuchColumnError(
+                        "Could not locate column in row for column '%s'"
+                        % util.string_or_unprintable(key)
+                    ) from err
+            else:
+                return None
+
+    def _raise_for_ambiguous_column_name(self, rec):
+        raise exc.InvalidRequestError(
+            "Ambiguous column name '%s' in "
+            "result set column descriptions" % rec[MD_LOOKUP_KEY]
+        )
+
+    def _index_for_key(self, key: Any, raiseerr: bool = True) -> Optional[int]:
+        # TODO: can consider pre-loading ints and negative ints
+        # into _keymap - also no coverage here
+        if isinstance(key, int):
+            key = self._keys[key]
+
+        try:
+            rec = self._keymap[key]
+        except KeyError as ke:
+            x = self._key_fallback(key, ke, raiseerr)
+            assert x is None
+            return None
+
+        index = rec[0]
+
+        if index is None:
+            self._raise_for_ambiguous_column_name(rec)
+        return index
+
+    def _indexes_for_keys(self, keys):
+        try:
+            return [self._keymap[key][0] for key in keys]
+        except KeyError as ke:
+            # ensure it raises
+            CursorResultMetaData._key_fallback(self, ke.args[0], ke)
+
+    def _metadata_for_keys(
+        self, keys: Sequence[Any]
+    ) -> Iterator[_NonAmbigCursorKeyMapRecType]:
+        for key in keys:
+            if int in key.__class__.__mro__:
+                key = self._keys[key]
+
+            try:
+                rec = self._keymap[key]
+            except KeyError as ke:
+                # ensure it raises
+                CursorResultMetaData._key_fallback(self, ke.args[0], ke)
+
+            index = rec[MD_INDEX]
+
+            if index is None:
+                self._raise_for_ambiguous_column_name(rec)
+
+            yield cast(_NonAmbigCursorKeyMapRecType, rec)
+
+    def __getstate__(self):
+        # TODO: consider serializing this as SimpleResultMetaData
+        return {
+            "_keymap": {
+                key: (
+                    rec[MD_INDEX],
+                    rec[MD_RESULT_MAP_INDEX],
+                    [],
+                    key,
+                    rec[MD_RENDERED_NAME],
+                    None,
+                    None,
+                )
+                for key, rec in self._keymap.items()
+                if isinstance(key, (str, int))
+            },
+            "_keys": self._keys,
+            "_translated_indexes": self._translated_indexes,
+        }
+
+    def __setstate__(self, state):
+        self._processors = [None for _ in range(len(state["_keys"]))]
+        self._keymap = state["_keymap"]
+        self._keymap_by_result_column_idx = None
+        self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
+        self._keys = state["_keys"]
+        self._unpickled = True
+        if state["_translated_indexes"]:
+            self._translated_indexes = cast(
+                "List[int]", state["_translated_indexes"]
+            )
+            self._tuplefilter = tuplegetter(*self._translated_indexes)
+        else:
+            self._translated_indexes = self._tuplefilter = None
+
+
+class ResultFetchStrategy:
+    """Define a fetching strategy for a result object.
+
+
+    .. versionadded:: 1.4
+
+    """
+
+    __slots__ = ()
+
+    alternate_cursor_description: Optional[_DBAPICursorDescription] = None
+
+    def soft_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
+        raise NotImplementedError()
+
+    def hard_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
+        raise NotImplementedError()
+
+    def yield_per(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: Optional[DBAPICursor],
+        num: int,
+    ) -> None:
+        return
+
+    def fetchone(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        hard_close: bool = False,
+    ) -> Any:
+        raise NotImplementedError()
+
+    def fetchmany(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        size: Optional[int] = None,
+    ) -> Any:
+        raise NotImplementedError()
+
+    def fetchall(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+    ) -> Any:
+        raise NotImplementedError()
+
+    def handle_exception(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: Optional[DBAPICursor],
+        err: BaseException,
+    ) -> NoReturn:
+        raise err
+
+
+class NoCursorFetchStrategy(ResultFetchStrategy):
+    """Cursor strategy for a result that has no open cursor.
+
+    There are two varieties of this strategy, one for DQL and one for
+    DML (and also DDL), each of which represent a result that had a cursor
+    but no longer has one.
+
+    """
+
+    __slots__ = ()
+
+    def soft_close(self, result, dbapi_cursor):
+        pass
+
+    def hard_close(self, result, dbapi_cursor):
+        pass
+
+    def fetchone(self, result, dbapi_cursor, hard_close=False):
+        return self._non_result(result, None)
+
+    def fetchmany(self, result, dbapi_cursor, size=None):
+        return self._non_result(result, [])
+
+    def fetchall(self, result, dbapi_cursor):
+        return self._non_result(result, [])
+
+    def _non_result(self, result, default, err=None):
+        raise NotImplementedError()
+
+
+class NoCursorDQLFetchStrategy(NoCursorFetchStrategy):
+    """Cursor strategy for a DQL result that has no open cursor.
+
+    This is a result set that can return rows, i.e. for a SELECT, or for an
+    INSERT, UPDATE, DELETE that includes RETURNING. However it is in the state
+    where the cursor is closed and no rows remain available.  The owning result
+    object may or may not be "hard closed", which determines if the fetch
+    methods send empty results or raise for closed result.
+
+    """
+
+    __slots__ = ()
+
+    def _non_result(self, result, default, err=None):
+        if result.closed:
+            raise exc.ResourceClosedError(
+                "This result object is closed."
+            ) from err
+        else:
+            return default
+
+
+_NO_CURSOR_DQL = NoCursorDQLFetchStrategy()
+
+
+class NoCursorDMLFetchStrategy(NoCursorFetchStrategy):
+    """Cursor strategy for a DML result that has no open cursor.
+
+    This is a result set that does not return rows, i.e. for an INSERT,
+    UPDATE, DELETE that does not include RETURNING.
+
+    """
+
+    __slots__ = ()
+
+    def _non_result(self, result, default, err=None):
+        # we only expect to have a _NoResultMetaData() here right now.
+        assert not result._metadata.returns_rows
+        result._metadata._we_dont_return_rows(err)
+
+
+_NO_CURSOR_DML = NoCursorDMLFetchStrategy()
+
+
+class CursorFetchStrategy(ResultFetchStrategy):
+    """Call fetch methods from a DBAPI cursor.
+
+    Alternate versions of this class may instead buffer the rows from
+    cursors or not use cursors at all.
+
+    """
+
+    __slots__ = ()
+
+    def soft_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
+        result.cursor_strategy = _NO_CURSOR_DQL
+
+    def hard_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
+        result.cursor_strategy = _NO_CURSOR_DQL
+
+    def handle_exception(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: Optional[DBAPICursor],
+        err: BaseException,
+    ) -> NoReturn:
+        result.connection._handle_dbapi_exception(
+            err, None, None, dbapi_cursor, result.context
+        )
+
+    def yield_per(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: Optional[DBAPICursor],
+        num: int,
+    ) -> None:
+        result.cursor_strategy = BufferedRowCursorFetchStrategy(
+            dbapi_cursor,
+            {"max_row_buffer": num},
+            initial_buffer=collections.deque(),
+            growth_factor=0,
+        )
+
+    def fetchone(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        hard_close: bool = False,
+    ) -> Any:
+        try:
+            row = dbapi_cursor.fetchone()
+            if row is None:
+                result._soft_close(hard=hard_close)
+            return row
+        except BaseException as e:
+            self.handle_exception(result, dbapi_cursor, e)
+
+    def fetchmany(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        size: Optional[int] = None,
+    ) -> Any:
+        try:
+            if size is None:
+                l = dbapi_cursor.fetchmany()
+            else:
+                l = dbapi_cursor.fetchmany(size)
+
+            if not l:
+                result._soft_close()
+            return l
+        except BaseException as e:
+            self.handle_exception(result, dbapi_cursor, e)
+
+    def fetchall(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+    ) -> Any:
+        try:
+            rows = dbapi_cursor.fetchall()
+            result._soft_close()
+            return rows
+        except BaseException as e:
+            self.handle_exception(result, dbapi_cursor, e)
+
+
+_DEFAULT_FETCH = CursorFetchStrategy()
+
+
+class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
+    """A cursor fetch strategy with row buffering behavior.
+
+    This strategy buffers the contents of a selection of rows
+    before ``fetchone()`` is called.  This is to allow the results of
+    ``cursor.description`` to be available immediately, when
+    interfacing with a DB-API that requires rows to be consumed before
+    this information is available (currently psycopg2, when used with
+    server-side cursors).
+
+    The pre-fetching behavior fetches only one row initially, and then
+    grows its buffer size by a fixed amount with each successive need
+    for additional rows up the ``max_row_buffer`` size, which defaults
+    to 1000::
+
+        with psycopg2_engine.connect() as conn:
+
+            result = conn.execution_options(
+                stream_results=True, max_row_buffer=50
+            ).execute(text("select * from table"))
+
+    .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
+
+    .. seealso::
+
+        :ref:`psycopg2_execution_options`
+    """
+
+    __slots__ = ("_max_row_buffer", "_rowbuffer", "_bufsize", "_growth_factor")
+
+    def __init__(
+        self,
+        dbapi_cursor,
+        execution_options,
+        growth_factor=5,
+        initial_buffer=None,
+    ):
+        self._max_row_buffer = execution_options.get("max_row_buffer", 1000)
+
+        if initial_buffer is not None:
+            self._rowbuffer = initial_buffer
+        else:
+            self._rowbuffer = collections.deque(dbapi_cursor.fetchmany(1))
+        self._growth_factor = growth_factor
+
+        if growth_factor:
+            self._bufsize = min(self._max_row_buffer, self._growth_factor)
+        else:
+            self._bufsize = self._max_row_buffer
+
+    @classmethod
+    def create(cls, result):
+        return BufferedRowCursorFetchStrategy(
+            result.cursor,
+            result.context.execution_options,
+        )
+
+    def _buffer_rows(self, result, dbapi_cursor):
+        """this is currently used only by fetchone()."""
+
+        size = self._bufsize
+        try:
+            if size < 1:
+                new_rows = dbapi_cursor.fetchall()
+            else:
+                new_rows = dbapi_cursor.fetchmany(size)
+        except BaseException as e:
+            self.handle_exception(result, dbapi_cursor, e)
+
+        if not new_rows:
+            return
+        self._rowbuffer = collections.deque(new_rows)
+        if self._growth_factor and size < self._max_row_buffer:
+            self._bufsize = min(
+                self._max_row_buffer, size * self._growth_factor
+            )
+
+    def yield_per(self, result, dbapi_cursor, num):
+        self._growth_factor = 0
+        self._max_row_buffer = self._bufsize = num
+
+    def soft_close(self, result, dbapi_cursor):
+        self._rowbuffer.clear()
+        super().soft_close(result, dbapi_cursor)
+
+    def hard_close(self, result, dbapi_cursor):
+        self._rowbuffer.clear()
+        super().hard_close(result, dbapi_cursor)
+
+    def fetchone(self, result, dbapi_cursor, hard_close=False):
+        if not self._rowbuffer:
+            self._buffer_rows(result, dbapi_cursor)
+            if not self._rowbuffer:
+                try:
+                    result._soft_close(hard=hard_close)
+                except BaseException as e:
+                    self.handle_exception(result, dbapi_cursor, e)
+                return None
+        return self._rowbuffer.popleft()
+
+    def fetchmany(self, result, dbapi_cursor, size=None):
+        if size is None:
+            return self.fetchall(result, dbapi_cursor)
+
+        rb = self._rowbuffer
+        lb = len(rb)
+        close = False
+        if size > lb:
+            try:
+                new = dbapi_cursor.fetchmany(size - lb)
+            except BaseException as e:
+                self.handle_exception(result, dbapi_cursor, e)
+            else:
+                if not new:
+                    # defer closing since it may clear the row buffer
+                    close = True
+                else:
+                    rb.extend(new)
+
+        res = [rb.popleft() for _ in range(min(size, len(rb)))]
+        if close:
+            result._soft_close()
+        return res
+
+    def fetchall(self, result, dbapi_cursor):
+        try:
+            ret = list(self._rowbuffer) + list(dbapi_cursor.fetchall())
+            self._rowbuffer.clear()
+            result._soft_close()
+            return ret
+        except BaseException as e:
+            self.handle_exception(result, dbapi_cursor, e)
+
+
+class FullyBufferedCursorFetchStrategy(CursorFetchStrategy):
+    """A cursor strategy that buffers rows fully upon creation.
+
+    Used for operations where a result is to be delivered
+    after the database conversation can not be continued,
+    such as MSSQL INSERT...OUTPUT after an autocommit.
+
+    """
+
+    __slots__ = ("_rowbuffer", "alternate_cursor_description")
+
+    def __init__(
+        self, dbapi_cursor, alternate_description=None, initial_buffer=None
+    ):
+        self.alternate_cursor_description = alternate_description
+        if initial_buffer is not None:
+            self._rowbuffer = collections.deque(initial_buffer)
+        else:
+            self._rowbuffer = collections.deque(dbapi_cursor.fetchall())
+
+    def yield_per(self, result, dbapi_cursor, num):
+        pass
+
+    def soft_close(self, result, dbapi_cursor):
+        self._rowbuffer.clear()
+        super().soft_close(result, dbapi_cursor)
+
+    def hard_close(self, result, dbapi_cursor):
+        self._rowbuffer.clear()
+        super().hard_close(result, dbapi_cursor)
+
+    def fetchone(self, result, dbapi_cursor, hard_close=False):
+        if self._rowbuffer:
+            return self._rowbuffer.popleft()
+        else:
+            result._soft_close(hard=hard_close)
+            return None
+
+    def fetchmany(self, result, dbapi_cursor, size=None):
+        if size is None:
+            return self.fetchall(result, dbapi_cursor)
+
+        rb = self._rowbuffer
+        rows = [rb.popleft() for _ in range(min(size, len(rb)))]
+        if not rows:
+            result._soft_close()
+        return rows
+
+    def fetchall(self, result, dbapi_cursor):
+        ret = self._rowbuffer
+        self._rowbuffer = collections.deque()
+        result._soft_close()
+        return ret
+
+
+class _NoResultMetaData(ResultMetaData):
+    __slots__ = ()
+
+    returns_rows = False
+
+    def _we_dont_return_rows(self, err=None):
+        raise exc.ResourceClosedError(
+            "This result object does not return rows. "
+            "It has been closed automatically."
+        ) from err
+
+    def _index_for_key(self, keys, raiseerr):
+        self._we_dont_return_rows()
+
+    def _metadata_for_keys(self, key):
+        self._we_dont_return_rows()
+
+    def _reduce(self, keys):
+        self._we_dont_return_rows()
+
+    @property
+    def _keymap(self):
+        self._we_dont_return_rows()
+
+    @property
+    def _key_to_index(self):
+        self._we_dont_return_rows()
+
+    @property
+    def _processors(self):
+        self._we_dont_return_rows()
+
+    @property
+    def keys(self):
+        self._we_dont_return_rows()
+
+
+_NO_RESULT_METADATA = _NoResultMetaData()
+
+
+def null_dml_result() -> IteratorResult[Any]:
+    it: IteratorResult[Any] = IteratorResult(_NoResultMetaData(), iter([]))
+    it._soft_close()
+    return it
+
+
+class CursorResult(Result[_T]):
+    """A Result that is representing state from a DBAPI cursor.
+
+    .. versionchanged:: 1.4  The :class:`.CursorResult``
+       class replaces the previous :class:`.ResultProxy` interface.
+       This classes are based on the :class:`.Result` calling API
+       which provides an updated usage model and calling facade for
+       SQLAlchemy Core and SQLAlchemy ORM.
+
+    Returns database rows via the :class:`.Row` class, which provides
+    additional API features and behaviors on top of the raw data returned by
+    the DBAPI.   Through the use of filters such as the :meth:`.Result.scalars`
+    method, other kinds of objects may also be returned.
+
+    .. seealso::
+
+        :ref:`tutorial_selecting_data` - introductory material for accessing
+        :class:`_engine.CursorResult` and :class:`.Row` objects.
+
+    """
+
+    __slots__ = (
+        "context",
+        "dialect",
+        "cursor",
+        "cursor_strategy",
+        "_echo",
+        "connection",
+    )
+
+    _metadata: Union[CursorResultMetaData, _NoResultMetaData]
+    _no_result_metadata = _NO_RESULT_METADATA
+    _soft_closed: bool = False
+    closed: bool = False
+    _is_cursor = True
+
+    context: DefaultExecutionContext
+    dialect: Dialect
+    cursor_strategy: ResultFetchStrategy
+    connection: Connection
+
+    def __init__(
+        self,
+        context: DefaultExecutionContext,
+        cursor_strategy: ResultFetchStrategy,
+        cursor_description: Optional[_DBAPICursorDescription],
+    ):
+        self.context = context
+        self.dialect = context.dialect
+        self.cursor = context.cursor
+        self.cursor_strategy = cursor_strategy
+        self.connection = context.root_connection
+        self._echo = echo = (
+            self.connection._echo and context.engine._should_log_debug()
+        )
+
+        if cursor_description is not None:
+            # inline of Result._row_getter(), set up an initial row
+            # getter assuming no transformations will be called as this
+            # is the most common case
+
+            metadata = self._init_metadata(context, cursor_description)
+
+            _make_row: Any
+            _make_row = functools.partial(
+                Row,
+                metadata,
+                metadata._effective_processors,
+                metadata._key_to_index,
+            )
+
+            if context._num_sentinel_cols:
+                sentinel_filter = operator.itemgetter(
+                    slice(-context._num_sentinel_cols)
+                )
+
+                def _sliced_row(raw_data):
+                    return _make_row(sentinel_filter(raw_data))
+
+                sliced_row = _sliced_row
+            else:
+                sliced_row = _make_row
+
+            if echo:
+                log = self.context.connection._log_debug
+
+                def _log_row(row):
+                    log("Row %r", sql_util._repr_row(row))
+                    return row
+
+                self._row_logging_fn = _log_row
+
+                def _make_row_2(row):
+                    return _log_row(sliced_row(row))
+
+                make_row = _make_row_2
+            else:
+                make_row = sliced_row
+            self._set_memoized_attribute("_row_getter", make_row)
+
+        else:
+            assert context._num_sentinel_cols == 0
+            self._metadata = self._no_result_metadata
+
+    def _init_metadata(self, context, cursor_description):
+        if context.compiled:
+            compiled = context.compiled
+
+            if compiled._cached_metadata:
+                metadata = compiled._cached_metadata
+            else:
+                metadata = CursorResultMetaData(self, cursor_description)
+                if metadata._safe_for_cache:
+                    compiled._cached_metadata = metadata
+
+            # result rewrite/ adapt step.  this is to suit the case
+            # when we are invoked against a cached Compiled object, we want
+            # to rewrite the ResultMetaData to reflect the Column objects
+            # that are in our current SQL statement object, not the one
+            # that is associated with the cached Compiled object.
+            # the Compiled object may also tell us to not
+            # actually do this step; this is to support the ORM where
+            # it is to produce a new Result object in any case, and will
+            # be using the cached Column objects against this database result
+            # so we don't want to rewrite them.
+            #
+            # Basically this step suits the use case where the end user
+            # is using Core SQL expressions and is accessing columns in the
+            # result row using row._mapping[table.c.column].
+            if (
+                not context.execution_options.get(
+                    "_result_disable_adapt_to_context", False
+                )
+                and compiled._result_columns
+                and context.cache_hit is context.dialect.CACHE_HIT
+                and compiled.statement is not context.invoked_statement
+            ):
+                metadata = metadata._adapt_to_context(context)
+
+            self._metadata = metadata
+
+        else:
+            self._metadata = metadata = CursorResultMetaData(
+                self, cursor_description
+            )
+        if self._echo:
+            context.connection._log_debug(
+                "Col %r", tuple(x[0] for x in cursor_description)
+            )
+        return metadata
+
+    def _soft_close(self, hard=False):
+        """Soft close this :class:`_engine.CursorResult`.
+
+        This releases all DBAPI cursor resources, but leaves the
+        CursorResult "open" from a semantic perspective, meaning the
+        fetchXXX() methods will continue to return empty results.
+
+        This method is called automatically when:
+
+        * all result rows are exhausted using the fetchXXX() methods.
+        * cursor.description is None.
+
+        This method is **not public**, but is documented in order to clarify
+        the "autoclose" process used.
+
+        .. seealso::
+
+            :meth:`_engine.CursorResult.close`
+
+
+        """
+
+        if (not hard and self._soft_closed) or (hard and self.closed):
+            return
+
+        if hard:
+            self.closed = True
+            self.cursor_strategy.hard_close(self, self.cursor)
+        else:
+            self.cursor_strategy.soft_close(self, self.cursor)
+
+        if not self._soft_closed:
+            cursor = self.cursor
+            self.cursor = None  # type: ignore
+            self.connection._safe_close_cursor(cursor)
+            self._soft_closed = True
+
+    @property
+    def inserted_primary_key_rows(self):
+        """Return the value of
+        :attr:`_engine.CursorResult.inserted_primary_key`
+        as a row contained within a list; some dialects may support a
+        multiple row form as well.
+
+        .. note:: As indicated below, in current SQLAlchemy versions this
+           accessor is only useful beyond what's already supplied by
+           :attr:`_engine.CursorResult.inserted_primary_key` when using the
+           :ref:`postgresql_psycopg2` dialect.   Future versions hope to
+           generalize this feature to more dialects.
+
+        This accessor is added to support dialects that offer the feature
+        that is currently implemented by the :ref:`psycopg2_executemany_mode`
+        feature, currently **only the psycopg2 dialect**, which provides
+        for many rows to be INSERTed at once while still retaining the
+        behavior of being able to return server-generated primary key values.
+
+        * **When using the psycopg2 dialect, or other dialects that may support
+          "fast executemany" style inserts in upcoming releases** : When
+          invoking an INSERT statement while passing a list of rows as the
+          second argument to :meth:`_engine.Connection.execute`, this accessor
+          will then provide a list of rows, where each row contains the primary
+          key value for each row that was INSERTed.
+
+        * **When using all other dialects / backends that don't yet support
+          this feature**: This accessor is only useful for **single row INSERT
+          statements**, and returns the same information as that of the
+          :attr:`_engine.CursorResult.inserted_primary_key` within a
+          single-element list. When an INSERT statement is executed in
+          conjunction with a list of rows to be INSERTed, the list will contain
+          one row per row inserted in the statement, however it will contain
+          ``None`` for any server-generated values.
+
+        Future releases of SQLAlchemy will further generalize the
+        "fast execution helper" feature of psycopg2 to suit other dialects,
+        thus allowing this accessor to be of more general use.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_engine.CursorResult.inserted_primary_key`
+
+        """
+        if not self.context.compiled:
+            raise exc.InvalidRequestError(
+                "Statement is not a compiled expression construct."
+            )
+        elif not self.context.isinsert:
+            raise exc.InvalidRequestError(
+                "Statement is not an insert() expression construct."
+            )
+        elif self.context._is_explicit_returning:
+            raise exc.InvalidRequestError(
+                "Can't call inserted_primary_key "
+                "when returning() "
+                "is used."
+            )
+        return self.context.inserted_primary_key_rows
+
+    @property
+    def inserted_primary_key(self):
+        """Return the primary key for the row just inserted.
+
+        The return value is a :class:`_result.Row` object representing
+        a named tuple of primary key values in the order in which the
+        primary key columns are configured in the source
+        :class:`_schema.Table`.
+
+        .. versionchanged:: 1.4.8 - the
+           :attr:`_engine.CursorResult.inserted_primary_key`
+           value is now a named tuple via the :class:`_result.Row` class,
+           rather than a plain tuple.
+
+        This accessor only applies to single row :func:`_expression.insert`
+        constructs which did not explicitly specify
+        :meth:`_expression.Insert.returning`.    Support for multirow inserts,
+        while not yet available for most backends, would be accessed using
+        the :attr:`_engine.CursorResult.inserted_primary_key_rows` accessor.
+
+        Note that primary key columns which specify a server_default clause, or
+        otherwise do not qualify as "autoincrement" columns (see the notes at
+        :class:`_schema.Column`), and were generated using the database-side
+        default, will appear in this list as ``None`` unless the backend
+        supports "returning" and the insert statement executed with the
+        "implicit returning" enabled.
+
+        Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+        statement is not a compiled expression construct
+        or is not an insert() construct.
+
+        """
+
+        if self.context.executemany:
+            raise exc.InvalidRequestError(
+                "This statement was an executemany call; if primary key "
+                "returning is supported, please "
+                "use .inserted_primary_key_rows."
+            )
+
+        ikp = self.inserted_primary_key_rows
+        if ikp:
+            return ikp[0]
+        else:
+            return None
+
+    def last_updated_params(self):
+        """Return the collection of updated parameters from this
+        execution.
+
+        Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+        statement is not a compiled expression construct
+        or is not an update() construct.
+
+        """
+        if not self.context.compiled:
+            raise exc.InvalidRequestError(
+                "Statement is not a compiled expression construct."
+            )
+        elif not self.context.isupdate:
+            raise exc.InvalidRequestError(
+                "Statement is not an update() expression construct."
+            )
+        elif self.context.executemany:
+            return self.context.compiled_parameters
+        else:
+            return self.context.compiled_parameters[0]
+
+    def last_inserted_params(self):
+        """Return the collection of inserted parameters from this
+        execution.
+
+        Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+        statement is not a compiled expression construct
+        or is not an insert() construct.
+
+        """
+        if not self.context.compiled:
+            raise exc.InvalidRequestError(
+                "Statement is not a compiled expression construct."
+            )
+        elif not self.context.isinsert:
+            raise exc.InvalidRequestError(
+                "Statement is not an insert() expression construct."
+            )
+        elif self.context.executemany:
+            return self.context.compiled_parameters
+        else:
+            return self.context.compiled_parameters[0]
+
+    @property
+    def returned_defaults_rows(self):
+        """Return a list of rows each containing the values of default
+        columns that were fetched using
+        the :meth:`.ValuesBase.return_defaults` feature.
+
+        The return value is a list of :class:`.Row` objects.
+
+        .. versionadded:: 1.4
+
+        """
+        return self.context.returned_default_rows
+
+    def splice_horizontally(self, other):
+        """Return a new :class:`.CursorResult` that "horizontally splices"
+        together the rows of this :class:`.CursorResult` with that of another
+        :class:`.CursorResult`.
+
+        .. tip::  This method is for the benefit of the SQLAlchemy ORM and is
+           not intended for general use.
+
+        "horizontally splices" means that for each row in the first and second
+        result sets, a new row that concatenates the two rows together is
+        produced, which then becomes the new row.  The incoming
+        :class:`.CursorResult` must have the identical number of rows.  It is
+        typically expected that the two result sets come from the same sort
+        order as well, as the result rows are spliced together based on their
+        position in the result.
+
+        The expected use case here is so that multiple INSERT..RETURNING
+        statements (which definitely need to be sorted) against different
+        tables can produce a single result that looks like a JOIN of those two
+        tables.
+
+        E.g.::
+
+            r1 = connection.execute(
+                users.insert().returning(
+                    users.c.user_name, users.c.user_id, sort_by_parameter_order=True
+                ),
+                user_values,
+            )
+
+            r2 = connection.execute(
+                addresses.insert().returning(
+                    addresses.c.address_id,
+                    addresses.c.address,
+                    addresses.c.user_id,
+                    sort_by_parameter_order=True,
+                ),
+                address_values,
+            )
+
+            rows = r1.splice_horizontally(r2).all()
+            assert rows == [
+                ("john", 1, 1, "foo@bar.com", 1),
+                ("jack", 2, 2, "bar@bat.com", 2),
+            ]
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :meth:`.CursorResult.splice_vertically`
+
+
+        """  # noqa: E501
+
+        clone = self._generate()
+        total_rows = [
+            tuple(r1) + tuple(r2)
+            for r1, r2 in zip(
+                list(self._raw_row_iterator()),
+                list(other._raw_row_iterator()),
+            )
+        ]
+
+        clone._metadata = clone._metadata._splice_horizontally(other._metadata)
+
+        clone.cursor_strategy = FullyBufferedCursorFetchStrategy(
+            None,
+            initial_buffer=total_rows,
+        )
+        clone._reset_memoizations()
+        return clone
+
+    def splice_vertically(self, other):
+        """Return a new :class:`.CursorResult` that "vertically splices",
+        i.e. "extends", the rows of this :class:`.CursorResult` with that of
+        another :class:`.CursorResult`.
+
+        .. tip::  This method is for the benefit of the SQLAlchemy ORM and is
+           not intended for general use.
+
+        "vertically splices" means the rows of the given result are appended to
+        the rows of this cursor result. The incoming :class:`.CursorResult`
+        must have rows that represent the identical list of columns in the
+        identical order as they are in this :class:`.CursorResult`.
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :meth:`.CursorResult.splice_horizontally`
+
+        """
+        clone = self._generate()
+        total_rows = list(self._raw_row_iterator()) + list(
+            other._raw_row_iterator()
+        )
+
+        clone.cursor_strategy = FullyBufferedCursorFetchStrategy(
+            None,
+            initial_buffer=total_rows,
+        )
+        clone._reset_memoizations()
+        return clone
+
+    def _rewind(self, rows):
+        """rewind this result back to the given rowset.
+
+        this is used internally for the case where an :class:`.Insert`
+        construct combines the use of
+        :meth:`.Insert.return_defaults` along with the
+        "supplemental columns" feature.
+
+        """
+
+        if self._echo:
+            self.context.connection._log_debug(
+                "CursorResult rewound %d row(s)", len(rows)
+            )
+
+        # the rows given are expected to be Row objects, so we
+        # have to clear out processors which have already run on these
+        # rows
+        self._metadata = cast(
+            CursorResultMetaData, self._metadata
+        )._remove_processors()
+
+        self.cursor_strategy = FullyBufferedCursorFetchStrategy(
+            None,
+            # TODO: if these are Row objects, can we save on not having to
+            # re-make new Row objects out of them a second time?  is that
+            # what's actually happening right now?  maybe look into this
+            initial_buffer=rows,
+        )
+        self._reset_memoizations()
+        return self
+
+    @property
+    def returned_defaults(self):
+        """Return the values of default columns that were fetched using
+        the :meth:`.ValuesBase.return_defaults` feature.
+
+        The value is an instance of :class:`.Row`, or ``None``
+        if :meth:`.ValuesBase.return_defaults` was not used or if the
+        backend does not support RETURNING.
+
+        .. seealso::
+
+            :meth:`.ValuesBase.return_defaults`
+
+        """
+
+        if self.context.executemany:
+            raise exc.InvalidRequestError(
+                "This statement was an executemany call; if return defaults "
+                "is supported, please use .returned_defaults_rows."
+            )
+
+        rows = self.context.returned_default_rows
+        if rows:
+            return rows[0]
+        else:
+            return None
+
+    def lastrow_has_defaults(self):
+        """Return ``lastrow_has_defaults()`` from the underlying
+        :class:`.ExecutionContext`.
+
+        See :class:`.ExecutionContext` for details.
+
+        """
+
+        return self.context.lastrow_has_defaults()
+
+    def postfetch_cols(self):
+        """Return ``postfetch_cols()`` from the underlying
+        :class:`.ExecutionContext`.
+
+        See :class:`.ExecutionContext` for details.
+
+        Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+        statement is not a compiled expression construct
+        or is not an insert() or update() construct.
+
+        """
+
+        if not self.context.compiled:
+            raise exc.InvalidRequestError(
+                "Statement is not a compiled expression construct."
+            )
+        elif not self.context.isinsert and not self.context.isupdate:
+            raise exc.InvalidRequestError(
+                "Statement is not an insert() or update() "
+                "expression construct."
+            )
+        return self.context.postfetch_cols
+
+    def prefetch_cols(self):
+        """Return ``prefetch_cols()`` from the underlying
+        :class:`.ExecutionContext`.
+
+        See :class:`.ExecutionContext` for details.
+
+        Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+        statement is not a compiled expression construct
+        or is not an insert() or update() construct.
+
+        """
+
+        if not self.context.compiled:
+            raise exc.InvalidRequestError(
+                "Statement is not a compiled expression construct."
+            )
+        elif not self.context.isinsert and not self.context.isupdate:
+            raise exc.InvalidRequestError(
+                "Statement is not an insert() or update() "
+                "expression construct."
+            )
+        return self.context.prefetch_cols
+
+    def supports_sane_rowcount(self):
+        """Return ``supports_sane_rowcount`` from the dialect.
+
+        See :attr:`_engine.CursorResult.rowcount` for background.
+
+        """
+
+        return self.dialect.supports_sane_rowcount
+
+    def supports_sane_multi_rowcount(self):
+        """Return ``supports_sane_multi_rowcount`` from the dialect.
+
+        See :attr:`_engine.CursorResult.rowcount` for background.
+
+        """
+
+        return self.dialect.supports_sane_multi_rowcount
+
+    @util.memoized_property
+    def rowcount(self) -> int:
+        """Return the 'rowcount' for this result.
+
+        The primary purpose of 'rowcount' is to report the number of rows
+        matched by the WHERE criterion of an UPDATE or DELETE statement
+        executed once (i.e. for a single parameter set), which may then be
+        compared to the number of rows expected to be updated or deleted as a
+        means of asserting data integrity.
+
+        This attribute is transferred from the ``cursor.rowcount`` attribute
+        of the DBAPI before the cursor is closed, to support DBAPIs that
+        don't make this value available after cursor close.   Some DBAPIs may
+        offer meaningful values for other kinds of statements, such as INSERT
+        and SELECT statements as well.  In order to retrieve ``cursor.rowcount``
+        for these statements, set the
+        :paramref:`.Connection.execution_options.preserve_rowcount`
+        execution option to True, which will cause the ``cursor.rowcount``
+        value to be unconditionally memoized before any results are returned
+        or the cursor is closed, regardless of statement type.
+
+        For cases where the DBAPI does not support rowcount for a particular
+        kind of statement and/or execution, the returned value will be ``-1``,
+        which is delivered directly from the DBAPI and is part of :pep:`249`.
+        All DBAPIs should support rowcount for single-parameter-set
+        UPDATE and DELETE statements, however.
+
+        .. note::
+
+           Notes regarding :attr:`_engine.CursorResult.rowcount`:
+
+
+           * This attribute returns the number of rows *matched*,
+             which is not necessarily the same as the number of rows
+             that were actually *modified*. For example, an UPDATE statement
+             may have no net change on a given row if the SET values
+             given are the same as those present in the row already.
+             Such a row would be matched but not modified.
+             On backends that feature both styles, such as MySQL,
+             rowcount is configured to return the match
+             count in all cases.
+
+           * :attr:`_engine.CursorResult.rowcount` in the default case is
+             *only* useful in conjunction with an UPDATE or DELETE statement,
+             and only with a single set of parameters. For other kinds of
+             statements, SQLAlchemy will not attempt to pre-memoize the value
+             unless the
+             :paramref:`.Connection.execution_options.preserve_rowcount`
+             execution option is used.  Note that contrary to :pep:`249`, many
+             DBAPIs do not support rowcount values for statements that are not
+             UPDATE or DELETE, particularly when rows are being returned which
+             are not fully pre-buffered.   DBAPIs that dont support rowcount
+             for a particular kind of statement should return the value ``-1``
+             for such statements.
+
+           * :attr:`_engine.CursorResult.rowcount` may not be meaningful
+             when executing a single statement with multiple parameter sets
+             (i.e. an :term:`executemany`). Most DBAPIs do not sum "rowcount"
+             values across multiple parameter sets and will return ``-1``
+             when accessed.
+
+           * SQLAlchemy's :ref:`engine_insertmanyvalues` feature does support
+             a correct population of :attr:`_engine.CursorResult.rowcount`
+             when the :paramref:`.Connection.execution_options.preserve_rowcount`
+             execution option is set to True.
+
+           * Statements that use RETURNING may not support rowcount, returning
+             a ``-1`` value instead.
+
+        .. seealso::
+
+            :ref:`tutorial_update_delete_rowcount` - in the :ref:`unified_tutorial`
+
+            :paramref:`.Connection.execution_options.preserve_rowcount`
+
+        """  # noqa: E501
+        try:
+            return self.context.rowcount
+        except BaseException as e:
+            self.cursor_strategy.handle_exception(self, self.cursor, e)
+            raise  # not called
+
+    @property
+    def lastrowid(self):
+        """Return the 'lastrowid' accessor on the DBAPI cursor.
+
+        This is a DBAPI specific method and is only functional
+        for those backends which support it, for statements
+        where it is appropriate.  It's behavior is not
+        consistent across backends.
+
+        Usage of this method is normally unnecessary when
+        using insert() expression constructs; the
+        :attr:`~CursorResult.inserted_primary_key` attribute provides a
+        tuple of primary key values for a newly inserted row,
+        regardless of database backend.
+
+        """
+        try:
+            return self.context.get_lastrowid()
+        except BaseException as e:
+            self.cursor_strategy.handle_exception(self, self.cursor, e)
+
+    @property
+    def returns_rows(self):
+        """True if this :class:`_engine.CursorResult` returns zero or more
+        rows.
+
+        I.e. if it is legal to call the methods
+        :meth:`_engine.CursorResult.fetchone`,
+        :meth:`_engine.CursorResult.fetchmany`
+        :meth:`_engine.CursorResult.fetchall`.
+
+        Overall, the value of :attr:`_engine.CursorResult.returns_rows` should
+        always be synonymous with whether or not the DBAPI cursor had a
+        ``.description`` attribute, indicating the presence of result columns,
+        noting that a cursor that returns zero rows still has a
+        ``.description`` if a row-returning statement was emitted.
+
+        This attribute should be True for all results that are against
+        SELECT statements, as well as for DML statements INSERT/UPDATE/DELETE
+        that use RETURNING.   For INSERT/UPDATE/DELETE statements that were
+        not using RETURNING, the value will usually be False, however
+        there are some dialect-specific exceptions to this, such as when
+        using the MSSQL / pyodbc dialect a SELECT is emitted inline in
+        order to retrieve an inserted primary key value.
+
+
+        """
+        return self._metadata.returns_rows
+
+    @property
+    def is_insert(self):
+        """True if this :class:`_engine.CursorResult` is the result
+        of a executing an expression language compiled
+        :func:`_expression.insert` construct.
+
+        When True, this implies that the
+        :attr:`inserted_primary_key` attribute is accessible,
+        assuming the statement did not include
+        a user defined "returning" construct.
+
+        """
+        return self.context.isinsert
+
+    def _fetchiter_impl(self):
+        fetchone = self.cursor_strategy.fetchone
+
+        while True:
+            row = fetchone(self, self.cursor)
+            if row is None:
+                break
+            yield row
+
+    def _fetchone_impl(self, hard_close=False):
+        return self.cursor_strategy.fetchone(self, self.cursor, hard_close)
+
+    def _fetchall_impl(self):
+        return self.cursor_strategy.fetchall(self, self.cursor)
+
+    def _fetchmany_impl(self, size=None):
+        return self.cursor_strategy.fetchmany(self, self.cursor, size)
+
+    def _raw_row_iterator(self):
+        return self._fetchiter_impl()
+
+    def merge(self, *others: Result[Any]) -> MergedResult[Any]:
+        merged_result = super().merge(*others)
+        if self.context._has_rowcount:
+            merged_result.rowcount = sum(
+                cast("CursorResult[Any]", result).rowcount
+                for result in (self,) + others
+            )
+        return merged_result
+
+    def close(self) -> Any:
+        """Close this :class:`_engine.CursorResult`.
+
+        This closes out the underlying DBAPI cursor corresponding to the
+        statement execution, if one is still present.  Note that the DBAPI
+        cursor is automatically released when the :class:`_engine.CursorResult`
+        exhausts all available rows.  :meth:`_engine.CursorResult.close` is
+        generally an optional method except in the case when discarding a
+        :class:`_engine.CursorResult` that still has additional rows pending
+        for fetch.
+
+        After this method is called, it is no longer valid to call upon
+        the fetch methods, which will raise a :class:`.ResourceClosedError`
+        on subsequent use.
+
+        .. seealso::
+
+            :ref:`connections_toplevel`
+
+        """
+        self._soft_close(hard=True)
+
+    @_generative
+    def yield_per(self, num: int) -> Self:
+        self._yield_per = num
+        self.cursor_strategy.yield_per(self, self.cursor, num)
+        return self
+
+
+ResultProxy = CursorResult
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py
new file mode 100644
index 00000000..dd4250ff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/default.py
@@ -0,0 +1,2367 @@
+# engine/default.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+"""Default implementations of per-dialect sqlalchemy.engine classes.
+
+These are semi-private implementation classes which are only of importance
+to database dialect authors; dialects will usually use the classes here
+as the base class for their own corresponding classes.
+
+"""
+
+from __future__ import annotations
+
+import functools
+import operator
+import random
+import re
+from time import perf_counter
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import MutableSequence
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+import weakref
+
+from . import characteristics
+from . import cursor as _cursor
+from . import interfaces
+from .base import Connection
+from .interfaces import CacheStats
+from .interfaces import DBAPICursor
+from .interfaces import Dialect
+from .interfaces import ExecuteStyle
+from .interfaces import ExecutionContext
+from .reflection import ObjectKind
+from .reflection import ObjectScope
+from .. import event
+from .. import exc
+from .. import pool
+from .. import util
+from ..sql import compiler
+from ..sql import dml
+from ..sql import expression
+from ..sql import type_api
+from ..sql import util as sql_util
+from ..sql._typing import is_tuple_type
+from ..sql.base import _NoArg
+from ..sql.compiler import DDLCompiler
+from ..sql.compiler import InsertmanyvaluesSentinelOpts
+from ..sql.compiler import SQLCompiler
+from ..sql.elements import quoted_name
+from ..util.typing import Final
+from ..util.typing import Literal
+
+if typing.TYPE_CHECKING:
+    from types import ModuleType
+
+    from .base import Engine
+    from .cursor import ResultFetchStrategy
+    from .interfaces import _CoreMultiExecuteParams
+    from .interfaces import _CoreSingleExecuteParams
+    from .interfaces import _DBAPICursorDescription
+    from .interfaces import _DBAPIMultiExecuteParams
+    from .interfaces import _ExecuteOptions
+    from .interfaces import _MutableCoreSingleExecuteParams
+    from .interfaces import _ParamStyle
+    from .interfaces import DBAPIConnection
+    from .interfaces import IsolationLevel
+    from .row import Row
+    from .url import URL
+    from ..event import _ListenerFnType
+    from ..pool import Pool
+    from ..pool import PoolProxiedConnection
+    from ..sql import Executable
+    from ..sql.compiler import Compiled
+    from ..sql.compiler import Linting
+    from ..sql.compiler import ResultColumnsEntry
+    from ..sql.dml import DMLState
+    from ..sql.dml import UpdateBase
+    from ..sql.elements import BindParameter
+    from ..sql.schema import Column
+    from ..sql.type_api import _BindProcessorType
+    from ..sql.type_api import _ResultProcessorType
+    from ..sql.type_api import TypeEngine
+
+# When we're handed literal SQL, ensure it's a SELECT query
+SERVER_SIDE_CURSOR_RE = re.compile(r"\s*SELECT", re.I | re.UNICODE)
+
+
+(
+    CACHE_HIT,
+    CACHE_MISS,
+    CACHING_DISABLED,
+    NO_CACHE_KEY,
+    NO_DIALECT_SUPPORT,
+) = list(CacheStats)
+
+
+class DefaultDialect(Dialect):
+    """Default implementation of Dialect"""
+
+    statement_compiler = compiler.SQLCompiler
+    ddl_compiler = compiler.DDLCompiler
+    type_compiler_cls = compiler.GenericTypeCompiler
+
+    preparer = compiler.IdentifierPreparer
+    supports_alter = True
+    supports_comments = False
+    supports_constraint_comments = False
+    inline_comments = False
+    supports_statement_cache = True
+
+    div_is_floordiv = True
+
+    bind_typing = interfaces.BindTyping.NONE
+
+    include_set_input_sizes: Optional[Set[Any]] = None
+    exclude_set_input_sizes: Optional[Set[Any]] = None
+
+    # the first value we'd get for an autoincrement column.
+    default_sequence_base = 1
+
+    # most DBAPIs happy with this for execute().
+    # not cx_oracle.
+    execute_sequence_format = tuple
+
+    supports_schemas = True
+    supports_views = True
+    supports_sequences = False
+    sequences_optional = False
+    preexecute_autoincrement_sequences = False
+    supports_identity_columns = False
+    postfetch_lastrowid = True
+    favor_returning_over_lastrowid = False
+    insert_null_pk_still_autoincrements = False
+    update_returning = False
+    delete_returning = False
+    update_returning_multifrom = False
+    delete_returning_multifrom = False
+    insert_returning = False
+
+    cte_follows_insert = False
+
+    supports_native_enum = False
+    supports_native_boolean = False
+    supports_native_uuid = False
+    returns_native_bytes = False
+
+    non_native_boolean_check_constraint = True
+
+    supports_simple_order_by_label = True
+
+    tuple_in_values = False
+
+    connection_characteristics = util.immutabledict(
+        {
+            "isolation_level": characteristics.IsolationLevelCharacteristic(),
+            "logging_token": characteristics.LoggingTokenCharacteristic(),
+        }
+    )
+
+    engine_config_types: Mapping[str, Any] = util.immutabledict(
+        {
+            "pool_timeout": util.asint,
+            "echo": util.bool_or_str("debug"),
+            "echo_pool": util.bool_or_str("debug"),
+            "pool_recycle": util.asint,
+            "pool_size": util.asint,
+            "max_overflow": util.asint,
+            "future": util.asbool,
+        }
+    )
+
+    # if the NUMERIC type
+    # returns decimal.Decimal.
+    # *not* the FLOAT type however.
+    supports_native_decimal = False
+
+    name = "default"
+
+    # length at which to truncate
+    # any identifier.
+    max_identifier_length = 9999
+    _user_defined_max_identifier_length: Optional[int] = None
+
+    isolation_level: Optional[str] = None
+
+    # sub-categories of max_identifier_length.
+    # currently these accommodate for MySQL which allows alias names
+    # of 255 but DDL names only of 64.
+    max_index_name_length: Optional[int] = None
+    max_constraint_name_length: Optional[int] = None
+
+    supports_sane_rowcount = True
+    supports_sane_multi_rowcount = True
+    colspecs: MutableMapping[Type[TypeEngine[Any]], Type[TypeEngine[Any]]] = {}
+    default_paramstyle = "named"
+
+    supports_default_values = False
+    """dialect supports INSERT... DEFAULT VALUES syntax"""
+
+    supports_default_metavalue = False
+    """dialect supports INSERT... VALUES (DEFAULT) syntax"""
+
+    default_metavalue_token = "DEFAULT"
+    """for INSERT... VALUES (DEFAULT) syntax, the token to put in the
+    parenthesis."""
+
+    # not sure if this is a real thing but the compiler will deliver it
+    # if this is the only flag enabled.
+    supports_empty_insert = True
+    """dialect supports INSERT () VALUES ()"""
+
+    supports_multivalues_insert = False
+
+    use_insertmanyvalues: bool = False
+
+    use_insertmanyvalues_wo_returning: bool = False
+
+    insertmanyvalues_implicit_sentinel: InsertmanyvaluesSentinelOpts = (
+        InsertmanyvaluesSentinelOpts.NOT_SUPPORTED
+    )
+
+    insertmanyvalues_page_size: int = 1000
+    insertmanyvalues_max_parameters = 32700
+
+    supports_is_distinct_from = True
+
+    supports_server_side_cursors = False
+
+    server_side_cursors = False
+
+    # extra record-level locking features (#4860)
+    supports_for_update_of = False
+
+    server_version_info = None
+
+    default_schema_name: Optional[str] = None
+
+    # indicates symbol names are
+    # UPPERCASED if they are case insensitive
+    # within the database.
+    # if this is True, the methods normalize_name()
+    # and denormalize_name() must be provided.
+    requires_name_normalize = False
+
+    is_async = False
+
+    has_terminate = False
+
+    # TODO: this is not to be part of 2.0.  implement rudimentary binary
+    # literals for SQLite, PostgreSQL, MySQL only within
+    # _Binary.literal_processor
+    _legacy_binary_type_literal_encoding = "utf-8"
+
+    @util.deprecated_params(
+        empty_in_strategy=(
+            "1.4",
+            "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
+            "deprecated, and no longer has any effect.  All IN expressions "
+            "are now rendered using "
+            'the "expanding parameter" strategy which renders a set of bound'
+            'expressions, or an "empty set" SELECT, at statement execution'
+            "time.",
+        ),
+        server_side_cursors=(
+            "1.4",
+            "The :paramref:`_sa.create_engine.server_side_cursors` parameter "
+            "is deprecated and will be removed in a future release.  Please "
+            "use the "
+            ":paramref:`_engine.Connection.execution_options.stream_results` "
+            "parameter.",
+        ),
+    )
+    def __init__(
+        self,
+        paramstyle: Optional[_ParamStyle] = None,
+        isolation_level: Optional[IsolationLevel] = None,
+        dbapi: Optional[ModuleType] = None,
+        implicit_returning: Literal[True] = True,
+        supports_native_boolean: Optional[bool] = None,
+        max_identifier_length: Optional[int] = None,
+        label_length: Optional[int] = None,
+        insertmanyvalues_page_size: Union[_NoArg, int] = _NoArg.NO_ARG,
+        use_insertmanyvalues: Optional[bool] = None,
+        # util.deprecated_params decorator cannot render the
+        # Linting.NO_LINTING constant
+        compiler_linting: Linting = int(compiler.NO_LINTING),  # type: ignore
+        server_side_cursors: bool = False,
+        **kwargs: Any,
+    ):
+        if server_side_cursors:
+            if not self.supports_server_side_cursors:
+                raise exc.ArgumentError(
+                    "Dialect %s does not support server side cursors" % self
+                )
+            else:
+                self.server_side_cursors = True
+
+        if getattr(self, "use_setinputsizes", False):
+            util.warn_deprecated(
+                "The dialect-level use_setinputsizes attribute is "
+                "deprecated.  Please use "
+                "bind_typing = BindTyping.SETINPUTSIZES",
+                "2.0",
+            )
+            self.bind_typing = interfaces.BindTyping.SETINPUTSIZES
+
+        self.positional = False
+        self._ischema = None
+
+        self.dbapi = dbapi
+
+        if paramstyle is not None:
+            self.paramstyle = paramstyle
+        elif self.dbapi is not None:
+            self.paramstyle = self.dbapi.paramstyle
+        else:
+            self.paramstyle = self.default_paramstyle
+        self.positional = self.paramstyle in (
+            "qmark",
+            "format",
+            "numeric",
+            "numeric_dollar",
+        )
+        self.identifier_preparer = self.preparer(self)
+        self._on_connect_isolation_level = isolation_level
+
+        legacy_tt_callable = getattr(self, "type_compiler", None)
+        if legacy_tt_callable is not None:
+            tt_callable = cast(
+                Type[compiler.GenericTypeCompiler],
+                self.type_compiler,
+            )
+        else:
+            tt_callable = self.type_compiler_cls
+
+        self.type_compiler_instance = self.type_compiler = tt_callable(self)
+
+        if supports_native_boolean is not None:
+            self.supports_native_boolean = supports_native_boolean
+
+        self._user_defined_max_identifier_length = max_identifier_length
+        if self._user_defined_max_identifier_length:
+            self.max_identifier_length = (
+                self._user_defined_max_identifier_length
+            )
+        self.label_length = label_length
+        self.compiler_linting = compiler_linting
+
+        if use_insertmanyvalues is not None:
+            self.use_insertmanyvalues = use_insertmanyvalues
+
+        if insertmanyvalues_page_size is not _NoArg.NO_ARG:
+            self.insertmanyvalues_page_size = insertmanyvalues_page_size
+
+    @property
+    @util.deprecated(
+        "2.0",
+        "full_returning is deprecated, please use insert_returning, "
+        "update_returning, delete_returning",
+    )
+    def full_returning(self):
+        return (
+            self.insert_returning
+            and self.update_returning
+            and self.delete_returning
+        )
+
+    @util.memoized_property
+    def insert_executemany_returning(self):
+        """Default implementation for insert_executemany_returning, if not
+        otherwise overridden by the specific dialect.
+
+        The default dialect determines "insert_executemany_returning" is
+        available if the dialect in use has opted into using the
+        "use_insertmanyvalues" feature. If they haven't opted into that, then
+        this attribute is False, unless the dialect in question overrides this
+        and provides some other implementation (such as the Oracle Database
+        dialects).
+
+        """
+        return self.insert_returning and self.use_insertmanyvalues
+
+    @util.memoized_property
+    def insert_executemany_returning_sort_by_parameter_order(self):
+        """Default implementation for
+        insert_executemany_returning_deterministic_order, if not otherwise
+        overridden by the specific dialect.
+
+        The default dialect determines "insert_executemany_returning" can have
+        deterministic order only if the dialect in use has opted into using the
+        "use_insertmanyvalues" feature, which implements deterministic ordering
+        using client side sentinel columns only by default.  The
+        "insertmanyvalues" feature also features alternate forms that can
+        use server-generated PK values as "sentinels", but those are only
+        used if the :attr:`.Dialect.insertmanyvalues_implicit_sentinel`
+        bitflag enables those alternate SQL forms, which are disabled
+        by default.
+
+        If the dialect in use hasn't opted into that, then this attribute is
+        False, unless the dialect in question overrides this and provides some
+        other implementation (such as the Oracle Database dialects).
+
+        """
+        return self.insert_returning and self.use_insertmanyvalues
+
+    update_executemany_returning = False
+    delete_executemany_returning = False
+
+    @util.memoized_property
+    def loaded_dbapi(self) -> ModuleType:
+        if self.dbapi is None:
+            raise exc.InvalidRequestError(
+                f"Dialect {self} does not have a Python DBAPI established "
+                "and cannot be used for actual database interaction"
+            )
+        return self.dbapi
+
+    @util.memoized_property
+    def _bind_typing_render_casts(self):
+        return self.bind_typing is interfaces.BindTyping.RENDER_CASTS
+
+    def _ensure_has_table_connection(self, arg):
+        if not isinstance(arg, Connection):
+            raise exc.ArgumentError(
+                "The argument passed to Dialect.has_table() should be a "
+                "%s, got %s. "
+                "Additionally, the Dialect.has_table() method is for "
+                "internal dialect "
+                "use only; please use "
+                "``inspect(some_engine).has_table(<tablename>>)`` "
+                "for public API use." % (Connection, type(arg))
+            )
+
+    @util.memoized_property
+    def _supports_statement_cache(self):
+        ssc = self.__class__.__dict__.get("supports_statement_cache", None)
+        if ssc is None:
+            util.warn(
+                "Dialect %s:%s will not make use of SQL compilation caching "
+                "as it does not set the 'supports_statement_cache' attribute "
+                "to ``True``.  This can have "
+                "significant performance implications including some "
+                "performance degradations in comparison to prior SQLAlchemy "
+                "versions.  Dialect maintainers should seek to set this "
+                "attribute to True after appropriate development and testing "
+                "for SQLAlchemy 1.4 caching support.   Alternatively, this "
+                "attribute may be set to False which will disable this "
+                "warning." % (self.name, self.driver),
+                code="cprf",
+            )
+
+        return bool(ssc)
+
+    @util.memoized_property
+    def _type_memos(self):
+        return weakref.WeakKeyDictionary()
+
+    @property
+    def dialect_description(self):
+        return self.name + "+" + self.driver
+
+    @property
+    def supports_sane_rowcount_returning(self):
+        """True if this dialect supports sane rowcount even if RETURNING is
+        in use.
+
+        For dialects that don't support RETURNING, this is synonymous with
+        ``supports_sane_rowcount``.
+
+        """
+        return self.supports_sane_rowcount
+
+    @classmethod
+    def get_pool_class(cls, url: URL) -> Type[Pool]:
+        return getattr(cls, "poolclass", pool.QueuePool)
+
+    def get_dialect_pool_class(self, url: URL) -> Type[Pool]:
+        return self.get_pool_class(url)
+
+    @classmethod
+    def load_provisioning(cls):
+        package = ".".join(cls.__module__.split(".")[0:-1])
+        try:
+            __import__(package + ".provision")
+        except ImportError:
+            pass
+
+    def _builtin_onconnect(self) -> Optional[_ListenerFnType]:
+        if self._on_connect_isolation_level is not None:
+
+            def builtin_connect(dbapi_conn, conn_rec):
+                self._assert_and_set_isolation_level(
+                    dbapi_conn, self._on_connect_isolation_level
+                )
+
+            return builtin_connect
+        else:
+            return None
+
+    def initialize(self, connection):
+        try:
+            self.server_version_info = self._get_server_version_info(
+                connection
+            )
+        except NotImplementedError:
+            self.server_version_info = None
+        try:
+            self.default_schema_name = self._get_default_schema_name(
+                connection
+            )
+        except NotImplementedError:
+            self.default_schema_name = None
+
+        try:
+            self.default_isolation_level = self.get_default_isolation_level(
+                connection.connection.dbapi_connection
+            )
+        except NotImplementedError:
+            self.default_isolation_level = None
+
+        if not self._user_defined_max_identifier_length:
+            max_ident_length = self._check_max_identifier_length(connection)
+            if max_ident_length:
+                self.max_identifier_length = max_ident_length
+
+        if (
+            self.label_length
+            and self.label_length > self.max_identifier_length
+        ):
+            raise exc.ArgumentError(
+                "Label length of %d is greater than this dialect's"
+                " maximum identifier length of %d"
+                % (self.label_length, self.max_identifier_length)
+            )
+
+    def on_connect(self):
+        # inherits the docstring from interfaces.Dialect.on_connect
+        return None
+
+    def _check_max_identifier_length(self, connection):
+        """Perform a connection / server version specific check to determine
+        the max_identifier_length.
+
+        If the dialect's class level max_identifier_length should be used,
+        can return None.
+
+        .. versionadded:: 1.3.9
+
+        """
+        return None
+
+    def get_default_isolation_level(self, dbapi_conn):
+        """Given a DBAPI connection, return its isolation level, or
+        a default isolation level if one cannot be retrieved.
+
+        May be overridden by subclasses in order to provide a
+        "fallback" isolation level for databases that cannot reliably
+        retrieve the actual isolation level.
+
+        By default, calls the :meth:`_engine.Interfaces.get_isolation_level`
+        method, propagating any exceptions raised.
+
+        .. versionadded:: 1.3.22
+
+        """
+        return self.get_isolation_level(dbapi_conn)
+
+    def type_descriptor(self, typeobj):
+        """Provide a database-specific :class:`.TypeEngine` object, given
+        the generic object which comes from the types module.
+
+        This method looks for a dictionary called
+        ``colspecs`` as a class or instance-level variable,
+        and passes on to :func:`_types.adapt_type`.
+
+        """
+        return type_api.adapt_type(typeobj, self.colspecs)
+
+    def has_index(self, connection, table_name, index_name, schema=None, **kw):
+        if not self.has_table(connection, table_name, schema=schema, **kw):
+            return False
+        for idx in self.get_indexes(
+            connection, table_name, schema=schema, **kw
+        ):
+            if idx["name"] == index_name:
+                return True
+        else:
+            return False
+
+    def has_schema(
+        self, connection: Connection, schema_name: str, **kw: Any
+    ) -> bool:
+        return schema_name in self.get_schema_names(connection, **kw)
+
+    def validate_identifier(self, ident):
+        if len(ident) > self.max_identifier_length:
+            raise exc.IdentifierError(
+                "Identifier '%s' exceeds maximum length of %d characters"
+                % (ident, self.max_identifier_length)
+            )
+
+    def connect(self, *cargs, **cparams):
+        # inherits the docstring from interfaces.Dialect.connect
+        return self.loaded_dbapi.connect(*cargs, **cparams)
+
+    def create_connect_args(self, url):
+        # inherits the docstring from interfaces.Dialect.create_connect_args
+        opts = url.translate_connect_args()
+        opts.update(url.query)
+        return ([], opts)
+
+    def set_engine_execution_options(
+        self, engine: Engine, opts: Mapping[str, Any]
+    ) -> None:
+        supported_names = set(self.connection_characteristics).intersection(
+            opts
+        )
+        if supported_names:
+            characteristics: Mapping[str, Any] = util.immutabledict(
+                (name, opts[name]) for name in supported_names
+            )
+
+            @event.listens_for(engine, "engine_connect")
+            def set_connection_characteristics(connection):
+                self._set_connection_characteristics(
+                    connection, characteristics
+                )
+
+    def set_connection_execution_options(
+        self, connection: Connection, opts: Mapping[str, Any]
+    ) -> None:
+        supported_names = set(self.connection_characteristics).intersection(
+            opts
+        )
+        if supported_names:
+            characteristics: Mapping[str, Any] = util.immutabledict(
+                (name, opts[name]) for name in supported_names
+            )
+            self._set_connection_characteristics(connection, characteristics)
+
+    def _set_connection_characteristics(self, connection, characteristics):
+        characteristic_values = [
+            (name, self.connection_characteristics[name], value)
+            for name, value in characteristics.items()
+        ]
+
+        if connection.in_transaction():
+            trans_objs = [
+                (name, obj)
+                for name, obj, _ in characteristic_values
+                if obj.transactional
+            ]
+            if trans_objs:
+                raise exc.InvalidRequestError(
+                    "This connection has already initialized a SQLAlchemy "
+                    "Transaction() object via begin() or autobegin; "
+                    "%s may not be altered unless rollback() or commit() "
+                    "is called first."
+                    % (", ".join(name for name, obj in trans_objs))
+                )
+
+        dbapi_connection = connection.connection.dbapi_connection
+        for _, characteristic, value in characteristic_values:
+            characteristic.set_connection_characteristic(
+                self, connection, dbapi_connection, value
+            )
+        connection.connection._connection_record.finalize_callback.append(
+            functools.partial(self._reset_characteristics, characteristics)
+        )
+
+    def _reset_characteristics(self, characteristics, dbapi_connection):
+        for characteristic_name in characteristics:
+            characteristic = self.connection_characteristics[
+                characteristic_name
+            ]
+            characteristic.reset_characteristic(self, dbapi_connection)
+
+    def do_begin(self, dbapi_connection):
+        pass
+
+    def do_rollback(self, dbapi_connection):
+        dbapi_connection.rollback()
+
+    def do_commit(self, dbapi_connection):
+        dbapi_connection.commit()
+
+    def do_terminate(self, dbapi_connection):
+        self.do_close(dbapi_connection)
+
+    def do_close(self, dbapi_connection):
+        dbapi_connection.close()
+
+    @util.memoized_property
+    def _dialect_specific_select_one(self):
+        return str(expression.select(1).compile(dialect=self))
+
+    def _do_ping_w_event(self, dbapi_connection: DBAPIConnection) -> bool:
+        try:
+            return self.do_ping(dbapi_connection)
+        except self.loaded_dbapi.Error as err:
+            is_disconnect = self.is_disconnect(err, dbapi_connection, None)
+
+            if self._has_events:
+                try:
+                    Connection._handle_dbapi_exception_noconnection(
+                        err,
+                        self,
+                        is_disconnect=is_disconnect,
+                        invalidate_pool_on_disconnect=False,
+                        is_pre_ping=True,
+                    )
+                except exc.StatementError as new_err:
+                    is_disconnect = new_err.connection_invalidated
+
+            if is_disconnect:
+                return False
+            else:
+                raise
+
+    def do_ping(self, dbapi_connection: DBAPIConnection) -> bool:
+        cursor = None
+
+        cursor = dbapi_connection.cursor()
+        try:
+            cursor.execute(self._dialect_specific_select_one)
+        finally:
+            cursor.close()
+        return True
+
+    def create_xid(self):
+        """Create a random two-phase transaction ID.
+
+        This id will be passed to do_begin_twophase(), do_rollback_twophase(),
+        do_commit_twophase().  Its format is unspecified.
+        """
+
+        return "_sa_%032x" % random.randint(0, 2**128)
+
+    def do_savepoint(self, connection, name):
+        connection.execute(expression.SavepointClause(name))
+
+    def do_rollback_to_savepoint(self, connection, name):
+        connection.execute(expression.RollbackToSavepointClause(name))
+
+    def do_release_savepoint(self, connection, name):
+        connection.execute(expression.ReleaseSavepointClause(name))
+
+    def _deliver_insertmanyvalues_batches(
+        self,
+        connection,
+        cursor,
+        statement,
+        parameters,
+        generic_setinputsizes,
+        context,
+    ):
+        context = cast(DefaultExecutionContext, context)
+        compiled = cast(SQLCompiler, context.compiled)
+
+        _composite_sentinel_proc: Sequence[
+            Optional[_ResultProcessorType[Any]]
+        ] = ()
+        _scalar_sentinel_proc: Optional[_ResultProcessorType[Any]] = None
+        _sentinel_proc_initialized: bool = False
+
+        compiled_parameters = context.compiled_parameters
+
+        imv = compiled._insertmanyvalues
+        assert imv is not None
+
+        is_returning: Final[bool] = bool(compiled.effective_returning)
+        batch_size = context.execution_options.get(
+            "insertmanyvalues_page_size", self.insertmanyvalues_page_size
+        )
+
+        if compiled.schema_translate_map:
+            schema_translate_map = context.execution_options.get(
+                "schema_translate_map", {}
+            )
+        else:
+            schema_translate_map = None
+
+        if is_returning:
+            result: Optional[List[Any]] = []
+            context._insertmanyvalues_rows = result
+
+            sort_by_parameter_order = imv.sort_by_parameter_order
+
+        else:
+            sort_by_parameter_order = False
+            result = None
+
+        for imv_batch in compiled._deliver_insertmanyvalues_batches(
+            statement,
+            parameters,
+            compiled_parameters,
+            generic_setinputsizes,
+            batch_size,
+            sort_by_parameter_order,
+            schema_translate_map,
+        ):
+            yield imv_batch
+
+            if is_returning:
+
+                try:
+                    rows = context.fetchall_for_returning(cursor)
+                except BaseException as be:
+                    connection._handle_dbapi_exception(
+                        be,
+                        sql_util._long_statement(imv_batch.replaced_statement),
+                        imv_batch.replaced_parameters,
+                        None,
+                        context,
+                        is_sub_exec=True,
+                    )
+
+                # I would have thought "is_returning: Final[bool]"
+                # would have assured this but pylance thinks not
+                assert result is not None
+
+                if imv.num_sentinel_columns and not imv_batch.is_downgraded:
+                    composite_sentinel = imv.num_sentinel_columns > 1
+                    if imv.implicit_sentinel:
+                        # for implicit sentinel, which is currently single-col
+                        # integer autoincrement, do a simple sort.
+                        assert not composite_sentinel
+                        result.extend(
+                            sorted(rows, key=operator.itemgetter(-1))
+                        )
+                        continue
+
+                    # otherwise, create dictionaries to match up batches
+                    # with parameters
+                    assert imv.sentinel_param_keys
+                    assert imv.sentinel_columns
+
+                    _nsc = imv.num_sentinel_columns
+
+                    if not _sentinel_proc_initialized:
+                        if composite_sentinel:
+                            _composite_sentinel_proc = [
+                                col.type._cached_result_processor(
+                                    self, cursor_desc[1]
+                                )
+                                for col, cursor_desc in zip(
+                                    imv.sentinel_columns,
+                                    cursor.description[-_nsc:],
+                                )
+                            ]
+                        else:
+                            _scalar_sentinel_proc = (
+                                imv.sentinel_columns[0]
+                            ).type._cached_result_processor(
+                                self, cursor.description[-1][1]
+                            )
+                        _sentinel_proc_initialized = True
+
+                    rows_by_sentinel: Union[
+                        Dict[Tuple[Any, ...], Any],
+                        Dict[Any, Any],
+                    ]
+                    if composite_sentinel:
+                        rows_by_sentinel = {
+                            tuple(
+                                (proc(val) if proc else val)
+                                for val, proc in zip(
+                                    row[-_nsc:], _composite_sentinel_proc
+                                )
+                            ): row
+                            for row in rows
+                        }
+                    elif _scalar_sentinel_proc:
+                        rows_by_sentinel = {
+                            _scalar_sentinel_proc(row[-1]): row for row in rows
+                        }
+                    else:
+                        rows_by_sentinel = {row[-1]: row for row in rows}
+
+                    if len(rows_by_sentinel) != len(imv_batch.batch):
+                        # see test_insert_exec.py::
+                        # IMVSentinelTest::test_sentinel_incorrect_rowcount
+                        # for coverage / demonstration
+                        raise exc.InvalidRequestError(
+                            f"Sentinel-keyed result set did not produce "
+                            f"correct number of rows {len(imv_batch.batch)}; "
+                            "produced "
+                            f"{len(rows_by_sentinel)}.  Please ensure the "
+                            "sentinel column is fully unique and populated in "
+                            "all cases."
+                        )
+
+                    try:
+                        ordered_rows = [
+                            rows_by_sentinel[sentinel_keys]
+                            for sentinel_keys in imv_batch.sentinel_values
+                        ]
+                    except KeyError as ke:
+                        # see test_insert_exec.py::
+                        # IMVSentinelTest::test_sentinel_cant_match_keys
+                        # for coverage / demonstration
+                        raise exc.InvalidRequestError(
+                            f"Can't match sentinel values in result set to "
+                            f"parameter sets; key {ke.args[0]!r} was not "
+                            "found. "
+                            "There may be a mismatch between the datatype "
+                            "passed to the DBAPI driver vs. that which it "
+                            "returns in a result row.  Ensure the given "
+                            "Python value matches the expected result type "
+                            "*exactly*, taking care to not rely upon implicit "
+                            "conversions which may occur such as when using "
+                            "strings in place of UUID or integer values, etc. "
+                        ) from ke
+
+                    result.extend(ordered_rows)
+
+                else:
+                    result.extend(rows)
+
+    def do_executemany(self, cursor, statement, parameters, context=None):
+        cursor.executemany(statement, parameters)
+
+    def do_execute(self, cursor, statement, parameters, context=None):
+        cursor.execute(statement, parameters)
+
+    def do_execute_no_params(self, cursor, statement, context=None):
+        cursor.execute(statement)
+
+    def is_disconnect(self, e, connection, cursor):
+        return False
+
+    @util.memoized_instancemethod
+    def _gen_allowed_isolation_levels(self, dbapi_conn):
+        try:
+            raw_levels = list(self.get_isolation_level_values(dbapi_conn))
+        except NotImplementedError:
+            return None
+        else:
+            normalized_levels = [
+                level.replace("_", " ").upper() for level in raw_levels
+            ]
+            if raw_levels != normalized_levels:
+                raise ValueError(
+                    f"Dialect {self.name!r} get_isolation_level_values() "
+                    f"method should return names as UPPERCASE using spaces, "
+                    f"not underscores; got "
+                    f"{sorted(set(raw_levels).difference(normalized_levels))}"
+                )
+            return tuple(normalized_levels)
+
+    def _assert_and_set_isolation_level(self, dbapi_conn, level):
+        level = level.replace("_", " ").upper()
+
+        _allowed_isolation_levels = self._gen_allowed_isolation_levels(
+            dbapi_conn
+        )
+        if (
+            _allowed_isolation_levels
+            and level not in _allowed_isolation_levels
+        ):
+            raise exc.ArgumentError(
+                f"Invalid value {level!r} for isolation_level. "
+                f"Valid isolation levels for {self.name!r} are "
+                f"{', '.join(_allowed_isolation_levels)}"
+            )
+
+        self.set_isolation_level(dbapi_conn, level)
+
+    def reset_isolation_level(self, dbapi_conn):
+        if self._on_connect_isolation_level is not None:
+            assert (
+                self._on_connect_isolation_level == "AUTOCOMMIT"
+                or self._on_connect_isolation_level
+                == self.default_isolation_level
+            )
+            self._assert_and_set_isolation_level(
+                dbapi_conn, self._on_connect_isolation_level
+            )
+        else:
+            assert self.default_isolation_level is not None
+            self._assert_and_set_isolation_level(
+                dbapi_conn,
+                self.default_isolation_level,
+            )
+
+    def normalize_name(self, name):
+        if name is None:
+            return None
+
+        name_lower = name.lower()
+        name_upper = name.upper()
+
+        if name_upper == name_lower:
+            # name has no upper/lower conversion, e.g. non-european characters.
+            # return unchanged
+            return name
+        elif name_upper == name and not (
+            self.identifier_preparer._requires_quotes
+        )(name_lower):
+            # name is all uppercase and doesn't require quoting; normalize
+            # to all lower case
+            return name_lower
+        elif name_lower == name:
+            # name is all lower case, which if denormalized means we need to
+            # force quoting on it
+            return quoted_name(name, quote=True)
+        else:
+            # name is mixed case, means it will be quoted in SQL when used
+            # later, no normalizes
+            return name
+
+    def denormalize_name(self, name):
+        if name is None:
+            return None
+
+        name_lower = name.lower()
+        name_upper = name.upper()
+
+        if name_upper == name_lower:
+            # name has no upper/lower conversion, e.g. non-european characters.
+            # return unchanged
+            return name
+        elif name_lower == name and not (
+            self.identifier_preparer._requires_quotes
+        )(name_lower):
+            name = name_upper
+        return name
+
+    def get_driver_connection(self, connection):
+        return connection
+
+    def _overrides_default(self, method):
+        return (
+            getattr(type(self), method).__code__
+            is not getattr(DefaultDialect, method).__code__
+        )
+
+    def _default_multi_reflect(
+        self,
+        single_tbl_method,
+        connection,
+        kind,
+        schema,
+        filter_names,
+        scope,
+        **kw,
+    ):
+        names_fns = []
+        temp_names_fns = []
+        if ObjectKind.TABLE in kind:
+            names_fns.append(self.get_table_names)
+            temp_names_fns.append(self.get_temp_table_names)
+        if ObjectKind.VIEW in kind:
+            names_fns.append(self.get_view_names)
+            temp_names_fns.append(self.get_temp_view_names)
+        if ObjectKind.MATERIALIZED_VIEW in kind:
+            names_fns.append(self.get_materialized_view_names)
+            # no temp materialized view at the moment
+            # temp_names_fns.append(self.get_temp_materialized_view_names)
+
+        unreflectable = kw.pop("unreflectable", {})
+
+        if (
+            filter_names
+            and scope is ObjectScope.ANY
+            and kind is ObjectKind.ANY
+        ):
+            # if names are given and no qualification on type of table
+            # (i.e. the Table(..., autoload) case), take the names as given,
+            # don't run names queries. If a table does not exit
+            # NoSuchTableError is raised and it's skipped
+
+            # this also suits the case for mssql where we can reflect
+            # individual temp tables but there's no temp_names_fn
+            names = filter_names
+        else:
+            names = []
+            name_kw = {"schema": schema, **kw}
+            fns = []
+            if ObjectScope.DEFAULT in scope:
+                fns.extend(names_fns)
+            if ObjectScope.TEMPORARY in scope:
+                fns.extend(temp_names_fns)
+
+            for fn in fns:
+                try:
+                    names.extend(fn(connection, **name_kw))
+                except NotImplementedError:
+                    pass
+
+        if filter_names:
+            filter_names = set(filter_names)
+
+        # iterate over all the tables/views and call the single table method
+        for table in names:
+            if not filter_names or table in filter_names:
+                key = (schema, table)
+                try:
+                    yield (
+                        key,
+                        single_tbl_method(
+                            connection, table, schema=schema, **kw
+                        ),
+                    )
+                except exc.UnreflectableTableError as err:
+                    if key not in unreflectable:
+                        unreflectable[key] = err
+                except exc.NoSuchTableError:
+                    pass
+
+    def get_multi_table_options(self, connection, **kw):
+        return self._default_multi_reflect(
+            self.get_table_options, connection, **kw
+        )
+
+    def get_multi_columns(self, connection, **kw):
+        return self._default_multi_reflect(self.get_columns, connection, **kw)
+
+    def get_multi_pk_constraint(self, connection, **kw):
+        return self._default_multi_reflect(
+            self.get_pk_constraint, connection, **kw
+        )
+
+    def get_multi_foreign_keys(self, connection, **kw):
+        return self._default_multi_reflect(
+            self.get_foreign_keys, connection, **kw
+        )
+
+    def get_multi_indexes(self, connection, **kw):
+        return self._default_multi_reflect(self.get_indexes, connection, **kw)
+
+    def get_multi_unique_constraints(self, connection, **kw):
+        return self._default_multi_reflect(
+            self.get_unique_constraints, connection, **kw
+        )
+
+    def get_multi_check_constraints(self, connection, **kw):
+        return self._default_multi_reflect(
+            self.get_check_constraints, connection, **kw
+        )
+
+    def get_multi_table_comment(self, connection, **kw):
+        return self._default_multi_reflect(
+            self.get_table_comment, connection, **kw
+        )
+
+
+class StrCompileDialect(DefaultDialect):
+    statement_compiler = compiler.StrSQLCompiler
+    ddl_compiler = compiler.DDLCompiler
+    type_compiler_cls = compiler.StrSQLTypeCompiler
+    preparer = compiler.IdentifierPreparer
+
+    insert_returning = True
+    update_returning = True
+    delete_returning = True
+
+    supports_statement_cache = True
+
+    supports_identity_columns = True
+
+    supports_sequences = True
+    sequences_optional = True
+    preexecute_autoincrement_sequences = False
+
+    supports_native_boolean = True
+
+    supports_multivalues_insert = True
+    supports_simple_order_by_label = True
+
+
+class DefaultExecutionContext(ExecutionContext):
+    isinsert = False
+    isupdate = False
+    isdelete = False
+    is_crud = False
+    is_text = False
+    isddl = False
+
+    execute_style: ExecuteStyle = ExecuteStyle.EXECUTE
+
+    compiled: Optional[Compiled] = None
+    result_column_struct: Optional[
+        Tuple[List[ResultColumnsEntry], bool, bool, bool, bool]
+    ] = None
+    returned_default_rows: Optional[Sequence[Row[Any]]] = None
+
+    execution_options: _ExecuteOptions = util.EMPTY_DICT
+
+    cursor_fetch_strategy = _cursor._DEFAULT_FETCH
+
+    invoked_statement: Optional[Executable] = None
+
+    _is_implicit_returning = False
+    _is_explicit_returning = False
+    _is_supplemental_returning = False
+    _is_server_side = False
+
+    _soft_closed = False
+
+    _rowcount: Optional[int] = None
+
+    # a hook for SQLite's translation of
+    # result column names
+    # NOTE: pyhive is using this hook, can't remove it :(
+    _translate_colname: Optional[Callable[[str], str]] = None
+
+    _expanded_parameters: Mapping[str, List[str]] = util.immutabledict()
+    """used by set_input_sizes().
+
+    This collection comes from ``ExpandedState.parameter_expansion``.
+
+    """
+
+    cache_hit = NO_CACHE_KEY
+
+    root_connection: Connection
+    _dbapi_connection: PoolProxiedConnection
+    dialect: Dialect
+    unicode_statement: str
+    cursor: DBAPICursor
+    compiled_parameters: List[_MutableCoreSingleExecuteParams]
+    parameters: _DBAPIMultiExecuteParams
+    extracted_parameters: Optional[Sequence[BindParameter[Any]]]
+
+    _empty_dict_params = cast("Mapping[str, Any]", util.EMPTY_DICT)
+
+    _insertmanyvalues_rows: Optional[List[Tuple[Any, ...]]] = None
+    _num_sentinel_cols: int = 0
+
+    @classmethod
+    def _init_ddl(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+        compiled_ddl: DDLCompiler,
+    ) -> ExecutionContext:
+        """Initialize execution context for an ExecutableDDLElement
+        construct."""
+
+        self = cls.__new__(cls)
+        self.root_connection = connection
+        self._dbapi_connection = dbapi_connection
+        self.dialect = connection.dialect
+
+        self.compiled = compiled = compiled_ddl
+        self.isddl = True
+
+        self.execution_options = execution_options
+
+        self.unicode_statement = str(compiled)
+        if compiled.schema_translate_map:
+            schema_translate_map = self.execution_options.get(
+                "schema_translate_map", {}
+            )
+
+            rst = compiled.preparer._render_schema_translates
+            self.unicode_statement = rst(
+                self.unicode_statement, schema_translate_map
+            )
+
+        self.statement = self.unicode_statement
+
+        self.cursor = self.create_cursor()
+        self.compiled_parameters = []
+
+        if dialect.positional:
+            self.parameters = [dialect.execute_sequence_format()]
+        else:
+            self.parameters = [self._empty_dict_params]
+
+        return self
+
+    @classmethod
+    def _init_compiled(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+        compiled: SQLCompiler,
+        parameters: _CoreMultiExecuteParams,
+        invoked_statement: Executable,
+        extracted_parameters: Optional[Sequence[BindParameter[Any]]],
+        cache_hit: CacheStats = CacheStats.CACHING_DISABLED,
+    ) -> ExecutionContext:
+        """Initialize execution context for a Compiled construct."""
+
+        self = cls.__new__(cls)
+        self.root_connection = connection
+        self._dbapi_connection = dbapi_connection
+        self.dialect = connection.dialect
+        self.extracted_parameters = extracted_parameters
+        self.invoked_statement = invoked_statement
+        self.compiled = compiled
+        self.cache_hit = cache_hit
+
+        self.execution_options = execution_options
+
+        self.result_column_struct = (
+            compiled._result_columns,
+            compiled._ordered_columns,
+            compiled._textual_ordered_columns,
+            compiled._ad_hoc_textual,
+            compiled._loose_column_name_matching,
+        )
+
+        self.isinsert = ii = compiled.isinsert
+        self.isupdate = iu = compiled.isupdate
+        self.isdelete = id_ = compiled.isdelete
+        self.is_text = compiled.isplaintext
+
+        if ii or iu or id_:
+            dml_statement = compiled.compile_state.statement  # type: ignore
+            if TYPE_CHECKING:
+                assert isinstance(dml_statement, UpdateBase)
+            self.is_crud = True
+            self._is_explicit_returning = ier = bool(dml_statement._returning)
+            self._is_implicit_returning = iir = bool(
+                compiled.implicit_returning
+            )
+            if iir and dml_statement._supplemental_returning:
+                self._is_supplemental_returning = True
+
+            # dont mix implicit and explicit returning
+            assert not (iir and ier)
+
+            if (ier or iir) and compiled.for_executemany:
+                if ii and not self.dialect.insert_executemany_returning:
+                    raise exc.InvalidRequestError(
+                        f"Dialect {self.dialect.dialect_description} with "
+                        f"current server capabilities does not support "
+                        "INSERT..RETURNING when executemany is used"
+                    )
+                elif (
+                    ii
+                    and dml_statement._sort_by_parameter_order
+                    and not self.dialect.insert_executemany_returning_sort_by_parameter_order  # noqa: E501
+                ):
+                    raise exc.InvalidRequestError(
+                        f"Dialect {self.dialect.dialect_description} with "
+                        f"current server capabilities does not support "
+                        "INSERT..RETURNING with deterministic row ordering "
+                        "when executemany is used"
+                    )
+                elif (
+                    ii
+                    and self.dialect.use_insertmanyvalues
+                    and not compiled._insertmanyvalues
+                ):
+                    raise exc.InvalidRequestError(
+                        'Statement does not have "insertmanyvalues" '
+                        "enabled, can't use INSERT..RETURNING with "
+                        "executemany in this case."
+                    )
+                elif iu and not self.dialect.update_executemany_returning:
+                    raise exc.InvalidRequestError(
+                        f"Dialect {self.dialect.dialect_description} with "
+                        f"current server capabilities does not support "
+                        "UPDATE..RETURNING when executemany is used"
+                    )
+                elif id_ and not self.dialect.delete_executemany_returning:
+                    raise exc.InvalidRequestError(
+                        f"Dialect {self.dialect.dialect_description} with "
+                        f"current server capabilities does not support "
+                        "DELETE..RETURNING when executemany is used"
+                    )
+
+        if not parameters:
+            self.compiled_parameters = [
+                compiled.construct_params(
+                    extracted_parameters=extracted_parameters,
+                    escape_names=False,
+                )
+            ]
+        else:
+            self.compiled_parameters = [
+                compiled.construct_params(
+                    m,
+                    escape_names=False,
+                    _group_number=grp,
+                    extracted_parameters=extracted_parameters,
+                )
+                for grp, m in enumerate(parameters)
+            ]
+
+            if len(parameters) > 1:
+                if self.isinsert and compiled._insertmanyvalues:
+                    self.execute_style = ExecuteStyle.INSERTMANYVALUES
+
+                    imv = compiled._insertmanyvalues
+                    if imv.sentinel_columns is not None:
+                        self._num_sentinel_cols = imv.num_sentinel_columns
+                else:
+                    self.execute_style = ExecuteStyle.EXECUTEMANY
+
+        self.unicode_statement = compiled.string
+
+        self.cursor = self.create_cursor()
+
+        if self.compiled.insert_prefetch or self.compiled.update_prefetch:
+            self._process_execute_defaults()
+
+        processors = compiled._bind_processors
+
+        flattened_processors: Mapping[
+            str, _BindProcessorType[Any]
+        ] = processors  # type: ignore[assignment]
+
+        if compiled.literal_execute_params or compiled.post_compile_params:
+            if self.executemany:
+                raise exc.InvalidRequestError(
+                    "'literal_execute' or 'expanding' parameters can't be "
+                    "used with executemany()"
+                )
+
+            expanded_state = compiled._process_parameters_for_postcompile(
+                self.compiled_parameters[0]
+            )
+
+            # re-assign self.unicode_statement
+            self.unicode_statement = expanded_state.statement
+
+            self._expanded_parameters = expanded_state.parameter_expansion
+
+            flattened_processors = dict(processors)  # type: ignore
+            flattened_processors.update(expanded_state.processors)
+            positiontup = expanded_state.positiontup
+        elif compiled.positional:
+            positiontup = self.compiled.positiontup
+        else:
+            positiontup = None
+
+        if compiled.schema_translate_map:
+            schema_translate_map = self.execution_options.get(
+                "schema_translate_map", {}
+            )
+            rst = compiled.preparer._render_schema_translates
+            self.unicode_statement = rst(
+                self.unicode_statement, schema_translate_map
+            )
+
+        # final self.unicode_statement is now assigned, encode if needed
+        # by dialect
+        self.statement = self.unicode_statement
+
+        # Convert the dictionary of bind parameter values
+        # into a dict or list to be sent to the DBAPI's
+        # execute() or executemany() method.
+
+        if compiled.positional:
+            core_positional_parameters: MutableSequence[Sequence[Any]] = []
+            assert positiontup is not None
+            for compiled_params in self.compiled_parameters:
+                l_param: List[Any] = [
+                    (
+                        flattened_processors[key](compiled_params[key])
+                        if key in flattened_processors
+                        else compiled_params[key]
+                    )
+                    for key in positiontup
+                ]
+                core_positional_parameters.append(
+                    dialect.execute_sequence_format(l_param)
+                )
+
+            self.parameters = core_positional_parameters
+        else:
+            core_dict_parameters: MutableSequence[Dict[str, Any]] = []
+            escaped_names = compiled.escaped_bind_names
+
+            # note that currently, "expanded" parameters will be present
+            # in self.compiled_parameters in their quoted form.   This is
+            # slightly inconsistent with the approach taken as of
+            # #8056 where self.compiled_parameters is meant to contain unquoted
+            # param names.
+            d_param: Dict[str, Any]
+            for compiled_params in self.compiled_parameters:
+                if escaped_names:
+                    d_param = {
+                        escaped_names.get(key, key): (
+                            flattened_processors[key](compiled_params[key])
+                            if key in flattened_processors
+                            else compiled_params[key]
+                        )
+                        for key in compiled_params
+                    }
+                else:
+                    d_param = {
+                        key: (
+                            flattened_processors[key](compiled_params[key])
+                            if key in flattened_processors
+                            else compiled_params[key]
+                        )
+                        for key in compiled_params
+                    }
+
+                core_dict_parameters.append(d_param)
+
+            self.parameters = core_dict_parameters
+
+        return self
+
+    @classmethod
+    def _init_statement(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+        statement: str,
+        parameters: _DBAPIMultiExecuteParams,
+    ) -> ExecutionContext:
+        """Initialize execution context for a string SQL statement."""
+
+        self = cls.__new__(cls)
+        self.root_connection = connection
+        self._dbapi_connection = dbapi_connection
+        self.dialect = connection.dialect
+        self.is_text = True
+
+        self.execution_options = execution_options
+
+        if not parameters:
+            if self.dialect.positional:
+                self.parameters = [dialect.execute_sequence_format()]
+            else:
+                self.parameters = [self._empty_dict_params]
+        elif isinstance(parameters[0], dialect.execute_sequence_format):
+            self.parameters = parameters
+        elif isinstance(parameters[0], dict):
+            self.parameters = parameters
+        else:
+            self.parameters = [
+                dialect.execute_sequence_format(p) for p in parameters
+            ]
+
+        if len(parameters) > 1:
+            self.execute_style = ExecuteStyle.EXECUTEMANY
+
+        self.statement = self.unicode_statement = statement
+
+        self.cursor = self.create_cursor()
+        return self
+
+    @classmethod
+    def _init_default(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+    ) -> ExecutionContext:
+        """Initialize execution context for a ColumnDefault construct."""
+
+        self = cls.__new__(cls)
+        self.root_connection = connection
+        self._dbapi_connection = dbapi_connection
+        self.dialect = connection.dialect
+
+        self.execution_options = execution_options
+
+        self.cursor = self.create_cursor()
+        return self
+
+    def _get_cache_stats(self) -> str:
+        if self.compiled is None:
+            return "raw sql"
+
+        now = perf_counter()
+
+        ch = self.cache_hit
+
+        gen_time = self.compiled._gen_time
+        assert gen_time is not None
+
+        if ch is NO_CACHE_KEY:
+            return "no key %.5fs" % (now - gen_time,)
+        elif ch is CACHE_HIT:
+            return "cached since %.4gs ago" % (now - gen_time,)
+        elif ch is CACHE_MISS:
+            return "generated in %.5fs" % (now - gen_time,)
+        elif ch is CACHING_DISABLED:
+            if "_cache_disable_reason" in self.execution_options:
+                return "caching disabled (%s) %.5fs " % (
+                    self.execution_options["_cache_disable_reason"],
+                    now - gen_time,
+                )
+            else:
+                return "caching disabled %.5fs" % (now - gen_time,)
+        elif ch is NO_DIALECT_SUPPORT:
+            return "dialect %s+%s does not support caching %.5fs" % (
+                self.dialect.name,
+                self.dialect.driver,
+                now - gen_time,
+            )
+        else:
+            return "unknown"
+
+    @property
+    def executemany(self):
+        return self.execute_style in (
+            ExecuteStyle.EXECUTEMANY,
+            ExecuteStyle.INSERTMANYVALUES,
+        )
+
+    @util.memoized_property
+    def identifier_preparer(self):
+        if self.compiled:
+            return self.compiled.preparer
+        elif "schema_translate_map" in self.execution_options:
+            return self.dialect.identifier_preparer._with_schema_translate(
+                self.execution_options["schema_translate_map"]
+            )
+        else:
+            return self.dialect.identifier_preparer
+
+    @util.memoized_property
+    def engine(self):
+        return self.root_connection.engine
+
+    @util.memoized_property
+    def postfetch_cols(self) -> Optional[Sequence[Column[Any]]]:
+        if TYPE_CHECKING:
+            assert isinstance(self.compiled, SQLCompiler)
+        return self.compiled.postfetch
+
+    @util.memoized_property
+    def prefetch_cols(self) -> Optional[Sequence[Column[Any]]]:
+        if TYPE_CHECKING:
+            assert isinstance(self.compiled, SQLCompiler)
+        if self.isinsert:
+            return self.compiled.insert_prefetch
+        elif self.isupdate:
+            return self.compiled.update_prefetch
+        else:
+            return ()
+
+    @util.memoized_property
+    def no_parameters(self):
+        return self.execution_options.get("no_parameters", False)
+
+    def _execute_scalar(self, stmt, type_, parameters=None):
+        """Execute a string statement on the current cursor, returning a
+        scalar result.
+
+        Used to fire off sequences, default phrases, and "select lastrowid"
+        types of statements individually or in the context of a parent INSERT
+        or UPDATE statement.
+
+        """
+
+        conn = self.root_connection
+
+        if "schema_translate_map" in self.execution_options:
+            schema_translate_map = self.execution_options.get(
+                "schema_translate_map", {}
+            )
+
+            rst = self.identifier_preparer._render_schema_translates
+            stmt = rst(stmt, schema_translate_map)
+
+        if not parameters:
+            if self.dialect.positional:
+                parameters = self.dialect.execute_sequence_format()
+            else:
+                parameters = {}
+
+        conn._cursor_execute(self.cursor, stmt, parameters, context=self)
+        row = self.cursor.fetchone()
+        if row is not None:
+            r = row[0]
+        else:
+            r = None
+        if type_ is not None:
+            # apply type post processors to the result
+            proc = type_._cached_result_processor(
+                self.dialect, self.cursor.description[0][1]
+            )
+            if proc:
+                return proc(r)
+        return r
+
+    @util.memoized_property
+    def connection(self):
+        return self.root_connection
+
+    def _use_server_side_cursor(self):
+        if not self.dialect.supports_server_side_cursors:
+            return False
+
+        if self.dialect.server_side_cursors:
+            # this is deprecated
+            use_server_side = self.execution_options.get(
+                "stream_results", True
+            ) and (
+                self.compiled
+                and isinstance(self.compiled.statement, expression.Selectable)
+                or (
+                    (
+                        not self.compiled
+                        or isinstance(
+                            self.compiled.statement, expression.TextClause
+                        )
+                    )
+                    and self.unicode_statement
+                    and SERVER_SIDE_CURSOR_RE.match(self.unicode_statement)
+                )
+            )
+        else:
+            use_server_side = self.execution_options.get(
+                "stream_results", False
+            )
+
+        return use_server_side
+
+    def create_cursor(self):
+        if (
+            # inlining initial preference checks for SS cursors
+            self.dialect.supports_server_side_cursors
+            and (
+                self.execution_options.get("stream_results", False)
+                or (
+                    self.dialect.server_side_cursors
+                    and self._use_server_side_cursor()
+                )
+            )
+        ):
+            self._is_server_side = True
+            return self.create_server_side_cursor()
+        else:
+            self._is_server_side = False
+            return self.create_default_cursor()
+
+    def fetchall_for_returning(self, cursor):
+        return cursor.fetchall()
+
+    def create_default_cursor(self):
+        return self._dbapi_connection.cursor()
+
+    def create_server_side_cursor(self):
+        raise NotImplementedError()
+
+    def pre_exec(self):
+        pass
+
+    def get_out_parameter_values(self, names):
+        raise NotImplementedError(
+            "This dialect does not support OUT parameters"
+        )
+
+    def post_exec(self):
+        pass
+
+    def get_result_processor(self, type_, colname, coltype):
+        """Return a 'result processor' for a given type as present in
+        cursor.description.
+
+        This has a default implementation that dialects can override
+        for context-sensitive result type handling.
+
+        """
+        return type_._cached_result_processor(self.dialect, coltype)
+
+    def get_lastrowid(self):
+        """return self.cursor.lastrowid, or equivalent, after an INSERT.
+
+        This may involve calling special cursor functions, issuing a new SELECT
+        on the cursor (or a new one), or returning a stored value that was
+        calculated within post_exec().
+
+        This function will only be called for dialects which support "implicit"
+        primary key generation, keep preexecute_autoincrement_sequences set to
+        False, and when no explicit id value was bound to the statement.
+
+        The function is called once for an INSERT statement that would need to
+        return the last inserted primary key for those dialects that make use
+        of the lastrowid concept.  In these cases, it is called directly after
+        :meth:`.ExecutionContext.post_exec`.
+
+        """
+        return self.cursor.lastrowid
+
+    def handle_dbapi_exception(self, e):
+        pass
+
+    @util.non_memoized_property
+    def rowcount(self) -> int:
+        if self._rowcount is not None:
+            return self._rowcount
+        else:
+            return self.cursor.rowcount
+
+    @property
+    def _has_rowcount(self):
+        return self._rowcount is not None
+
+    def supports_sane_rowcount(self):
+        return self.dialect.supports_sane_rowcount
+
+    def supports_sane_multi_rowcount(self):
+        return self.dialect.supports_sane_multi_rowcount
+
+    def _setup_result_proxy(self):
+        exec_opt = self.execution_options
+
+        if self._rowcount is None and exec_opt.get("preserve_rowcount", False):
+            self._rowcount = self.cursor.rowcount
+
+        if self.is_crud or self.is_text:
+            result = self._setup_dml_or_text_result()
+            yp = sr = False
+        else:
+            yp = exec_opt.get("yield_per", None)
+            sr = self._is_server_side or exec_opt.get("stream_results", False)
+            strategy = self.cursor_fetch_strategy
+            if sr and strategy is _cursor._DEFAULT_FETCH:
+                strategy = _cursor.BufferedRowCursorFetchStrategy(
+                    self.cursor, self.execution_options
+                )
+            cursor_description: _DBAPICursorDescription = (
+                strategy.alternate_cursor_description
+                or self.cursor.description
+            )
+            if cursor_description is None:
+                strategy = _cursor._NO_CURSOR_DQL
+
+            result = _cursor.CursorResult(self, strategy, cursor_description)
+
+        compiled = self.compiled
+
+        if (
+            compiled
+            and not self.isddl
+            and cast(SQLCompiler, compiled).has_out_parameters
+        ):
+            self._setup_out_parameters(result)
+
+        self._soft_closed = result._soft_closed
+
+        if yp:
+            result = result.yield_per(yp)
+
+        return result
+
+    def _setup_out_parameters(self, result):
+        compiled = cast(SQLCompiler, self.compiled)
+
+        out_bindparams = [
+            (param, name)
+            for param, name in compiled.bind_names.items()
+            if param.isoutparam
+        ]
+        out_parameters = {}
+
+        for bindparam, raw_value in zip(
+            [param for param, name in out_bindparams],
+            self.get_out_parameter_values(
+                [name for param, name in out_bindparams]
+            ),
+        ):
+            type_ = bindparam.type
+            impl_type = type_.dialect_impl(self.dialect)
+            dbapi_type = impl_type.get_dbapi_type(self.dialect.loaded_dbapi)
+            result_processor = impl_type.result_processor(
+                self.dialect, dbapi_type
+            )
+            if result_processor is not None:
+                raw_value = result_processor(raw_value)
+            out_parameters[bindparam.key] = raw_value
+
+        result.out_parameters = out_parameters
+
+    def _setup_dml_or_text_result(self):
+        compiled = cast(SQLCompiler, self.compiled)
+
+        strategy: ResultFetchStrategy = self.cursor_fetch_strategy
+
+        if self.isinsert:
+            if (
+                self.execute_style is ExecuteStyle.INSERTMANYVALUES
+                and compiled.effective_returning
+            ):
+                strategy = _cursor.FullyBufferedCursorFetchStrategy(
+                    self.cursor,
+                    initial_buffer=self._insertmanyvalues_rows,
+                    # maintain alt cursor description if set by the
+                    # dialect, e.g. mssql preserves it
+                    alternate_description=(
+                        strategy.alternate_cursor_description
+                    ),
+                )
+
+            if compiled.postfetch_lastrowid:
+                self.inserted_primary_key_rows = (
+                    self._setup_ins_pk_from_lastrowid()
+                )
+            # else if not self._is_implicit_returning,
+            # the default inserted_primary_key_rows accessor will
+            # return an "empty" primary key collection when accessed.
+
+        if self._is_server_side and strategy is _cursor._DEFAULT_FETCH:
+            strategy = _cursor.BufferedRowCursorFetchStrategy(
+                self.cursor, self.execution_options
+            )
+
+        if strategy is _cursor._NO_CURSOR_DML:
+            cursor_description = None
+        else:
+            cursor_description = (
+                strategy.alternate_cursor_description
+                or self.cursor.description
+            )
+
+        if cursor_description is None:
+            strategy = _cursor._NO_CURSOR_DML
+        elif self._num_sentinel_cols:
+            assert self.execute_style is ExecuteStyle.INSERTMANYVALUES
+            # strip out the sentinel columns from cursor description
+            # a similar logic is done to the rows only in CursorResult
+            cursor_description = cursor_description[
+                0 : -self._num_sentinel_cols
+            ]
+
+        result: _cursor.CursorResult[Any] = _cursor.CursorResult(
+            self, strategy, cursor_description
+        )
+
+        if self.isinsert:
+            if self._is_implicit_returning:
+                rows = result.all()
+
+                self.returned_default_rows = rows
+
+                self.inserted_primary_key_rows = (
+                    self._setup_ins_pk_from_implicit_returning(result, rows)
+                )
+
+                # test that it has a cursor metadata that is accurate. the
+                # first row will have been fetched and current assumptions
+                # are that the result has only one row, until executemany()
+                # support is added here.
+                assert result._metadata.returns_rows
+
+                # Insert statement has both return_defaults() and
+                # returning().  rewind the result on the list of rows
+                # we just used.
+                if self._is_supplemental_returning:
+                    result._rewind(rows)
+                else:
+                    result._soft_close()
+            elif not self._is_explicit_returning:
+                result._soft_close()
+
+                # we assume here the result does not return any rows.
+                # *usually*, this will be true.  However, some dialects
+                # such as that of MSSQL/pyodbc need to SELECT a post fetch
+                # function so this is not necessarily true.
+                # assert not result.returns_rows
+
+        elif self._is_implicit_returning:
+            rows = result.all()
+
+            if rows:
+                self.returned_default_rows = rows
+            self._rowcount = len(rows)
+
+            if self._is_supplemental_returning:
+                result._rewind(rows)
+            else:
+                result._soft_close()
+
+            # test that it has a cursor metadata that is accurate.
+            # the rows have all been fetched however.
+            assert result._metadata.returns_rows
+
+        elif not result._metadata.returns_rows:
+            # no results, get rowcount
+            # (which requires open cursor on some drivers)
+            if self._rowcount is None:
+                self._rowcount = self.cursor.rowcount
+            result._soft_close()
+        elif self.isupdate or self.isdelete:
+            if self._rowcount is None:
+                self._rowcount = self.cursor.rowcount
+        return result
+
+    @util.memoized_property
+    def inserted_primary_key_rows(self):
+        # if no specific "get primary key" strategy was set up
+        # during execution, return a "default" primary key based
+        # on what's in the compiled_parameters and nothing else.
+        return self._setup_ins_pk_from_empty()
+
+    def _setup_ins_pk_from_lastrowid(self):
+        getter = cast(
+            SQLCompiler, self.compiled
+        )._inserted_primary_key_from_lastrowid_getter
+        lastrowid = self.get_lastrowid()
+        return [getter(lastrowid, self.compiled_parameters[0])]
+
+    def _setup_ins_pk_from_empty(self):
+        getter = cast(
+            SQLCompiler, self.compiled
+        )._inserted_primary_key_from_lastrowid_getter
+        return [getter(None, param) for param in self.compiled_parameters]
+
+    def _setup_ins_pk_from_implicit_returning(self, result, rows):
+        if not rows:
+            return []
+
+        getter = cast(
+            SQLCompiler, self.compiled
+        )._inserted_primary_key_from_returning_getter
+        compiled_params = self.compiled_parameters
+
+        return [
+            getter(row, param) for row, param in zip(rows, compiled_params)
+        ]
+
+    def lastrow_has_defaults(self):
+        return (self.isinsert or self.isupdate) and bool(
+            cast(SQLCompiler, self.compiled).postfetch
+        )
+
+    def _prepare_set_input_sizes(
+        self,
+    ) -> Optional[List[Tuple[str, Any, TypeEngine[Any]]]]:
+        """Given a cursor and ClauseParameters, prepare arguments
+        in order to call the appropriate
+        style of ``setinputsizes()`` on the cursor, using DB-API types
+        from the bind parameter's ``TypeEngine`` objects.
+
+        This method only called by those dialects which set the
+        :attr:`.Dialect.bind_typing` attribute to
+        :attr:`.BindTyping.SETINPUTSIZES`.  Python-oracledb and cx_Oracle are
+        the only DBAPIs that requires setinputsizes(); pyodbc offers it as an
+        option.
+
+        Prior to SQLAlchemy 2.0, the setinputsizes() approach was also used
+        for pg8000 and asyncpg, which has been changed to inline rendering
+        of casts.
+
+        """
+        if self.isddl or self.is_text:
+            return None
+
+        compiled = cast(SQLCompiler, self.compiled)
+
+        inputsizes = compiled._get_set_input_sizes_lookup()
+
+        if inputsizes is None:
+            return None
+
+        dialect = self.dialect
+
+        # all of the rest of this... cython?
+
+        if dialect._has_events:
+            inputsizes = dict(inputsizes)
+            dialect.dispatch.do_setinputsizes(
+                inputsizes, self.cursor, self.statement, self.parameters, self
+            )
+
+        if compiled.escaped_bind_names:
+            escaped_bind_names = compiled.escaped_bind_names
+        else:
+            escaped_bind_names = None
+
+        if dialect.positional:
+            items = [
+                (key, compiled.binds[key])
+                for key in compiled.positiontup or ()
+            ]
+        else:
+            items = [
+                (key, bindparam)
+                for bindparam, key in compiled.bind_names.items()
+            ]
+
+        generic_inputsizes: List[Tuple[str, Any, TypeEngine[Any]]] = []
+        for key, bindparam in items:
+            if bindparam in compiled.literal_execute_params:
+                continue
+
+            if key in self._expanded_parameters:
+                if is_tuple_type(bindparam.type):
+                    num = len(bindparam.type.types)
+                    dbtypes = inputsizes[bindparam]
+                    generic_inputsizes.extend(
+                        (
+                            (
+                                escaped_bind_names.get(paramname, paramname)
+                                if escaped_bind_names is not None
+                                else paramname
+                            ),
+                            dbtypes[idx % num],
+                            bindparam.type.types[idx % num],
+                        )
+                        for idx, paramname in enumerate(
+                            self._expanded_parameters[key]
+                        )
+                    )
+                else:
+                    dbtype = inputsizes.get(bindparam, None)
+                    generic_inputsizes.extend(
+                        (
+                            (
+                                escaped_bind_names.get(paramname, paramname)
+                                if escaped_bind_names is not None
+                                else paramname
+                            ),
+                            dbtype,
+                            bindparam.type,
+                        )
+                        for paramname in self._expanded_parameters[key]
+                    )
+            else:
+                dbtype = inputsizes.get(bindparam, None)
+
+                escaped_name = (
+                    escaped_bind_names.get(key, key)
+                    if escaped_bind_names is not None
+                    else key
+                )
+
+                generic_inputsizes.append(
+                    (escaped_name, dbtype, bindparam.type)
+                )
+
+        return generic_inputsizes
+
+    def _exec_default(self, column, default, type_):
+        if default.is_sequence:
+            return self.fire_sequence(default, type_)
+        elif default.is_callable:
+            # this codepath is not normally used as it's inlined
+            # into _process_execute_defaults
+            self.current_column = column
+            return default.arg(self)
+        elif default.is_clause_element:
+            return self._exec_default_clause_element(column, default, type_)
+        else:
+            # this codepath is not normally used as it's inlined
+            # into _process_execute_defaults
+            return default.arg
+
+    def _exec_default_clause_element(self, column, default, type_):
+        # execute a default that's a complete clause element.  Here, we have
+        # to re-implement a miniature version of the compile->parameters->
+        # cursor.execute() sequence, since we don't want to modify the state
+        # of the connection  / result in progress or create new connection/
+        # result objects etc.
+        # .. versionchanged:: 1.4
+
+        if not default._arg_is_typed:
+            default_arg = expression.type_coerce(default.arg, type_)
+        else:
+            default_arg = default.arg
+        compiled = expression.select(default_arg).compile(dialect=self.dialect)
+        compiled_params = compiled.construct_params()
+        processors = compiled._bind_processors
+        if compiled.positional:
+            parameters = self.dialect.execute_sequence_format(
+                [
+                    (
+                        processors[key](compiled_params[key])  # type: ignore
+                        if key in processors
+                        else compiled_params[key]
+                    )
+                    for key in compiled.positiontup or ()
+                ]
+            )
+        else:
+            parameters = {
+                key: (
+                    processors[key](compiled_params[key])  # type: ignore
+                    if key in processors
+                    else compiled_params[key]
+                )
+                for key in compiled_params
+            }
+        return self._execute_scalar(
+            str(compiled), type_, parameters=parameters
+        )
+
+    current_parameters: Optional[_CoreSingleExecuteParams] = None
+    """A dictionary of parameters applied to the current row.
+
+    This attribute is only available in the context of a user-defined default
+    generation function, e.g. as described at :ref:`context_default_functions`.
+    It consists of a dictionary which includes entries for each column/value
+    pair that is to be part of the INSERT or UPDATE statement. The keys of the
+    dictionary will be the key value of each :class:`_schema.Column`,
+    which is usually
+    synonymous with the name.
+
+    Note that the :attr:`.DefaultExecutionContext.current_parameters` attribute
+    does not accommodate for the "multi-values" feature of the
+    :meth:`_expression.Insert.values` method.  The
+    :meth:`.DefaultExecutionContext.get_current_parameters` method should be
+    preferred.
+
+    .. seealso::
+
+        :meth:`.DefaultExecutionContext.get_current_parameters`
+
+        :ref:`context_default_functions`
+
+    """
+
+    def get_current_parameters(self, isolate_multiinsert_groups=True):
+        """Return a dictionary of parameters applied to the current row.
+
+        This method can only be used in the context of a user-defined default
+        generation function, e.g. as described at
+        :ref:`context_default_functions`. When invoked, a dictionary is
+        returned which includes entries for each column/value pair that is part
+        of the INSERT or UPDATE statement. The keys of the dictionary will be
+        the key value of each :class:`_schema.Column`,
+        which is usually synonymous
+        with the name.
+
+        :param isolate_multiinsert_groups=True: indicates that multi-valued
+         INSERT constructs created using :meth:`_expression.Insert.values`
+         should be
+         handled by returning only the subset of parameters that are local
+         to the current column default invocation.   When ``False``, the
+         raw parameters of the statement are returned including the
+         naming convention used in the case of multi-valued INSERT.
+
+        .. versionadded:: 1.2  added
+           :meth:`.DefaultExecutionContext.get_current_parameters`
+           which provides more functionality over the existing
+           :attr:`.DefaultExecutionContext.current_parameters`
+           attribute.
+
+        .. seealso::
+
+            :attr:`.DefaultExecutionContext.current_parameters`
+
+            :ref:`context_default_functions`
+
+        """
+        try:
+            parameters = self.current_parameters
+            column = self.current_column
+        except AttributeError:
+            raise exc.InvalidRequestError(
+                "get_current_parameters() can only be invoked in the "
+                "context of a Python side column default function"
+            )
+        else:
+            assert column is not None
+            assert parameters is not None
+        compile_state = cast(
+            "DMLState", cast(SQLCompiler, self.compiled).compile_state
+        )
+        assert compile_state is not None
+        if (
+            isolate_multiinsert_groups
+            and dml.isinsert(compile_state)
+            and compile_state._has_multi_parameters
+        ):
+            if column._is_multiparam_column:
+                index = column.index + 1
+                d = {column.original.key: parameters[column.key]}
+            else:
+                d = {column.key: parameters[column.key]}
+                index = 0
+            assert compile_state._dict_parameters is not None
+            keys = compile_state._dict_parameters.keys()
+            d.update(
+                (key, parameters["%s_m%d" % (key, index)]) for key in keys
+            )
+            return d
+        else:
+            return parameters
+
+    def get_insert_default(self, column):
+        if column.default is None:
+            return None
+        else:
+            return self._exec_default(column, column.default, column.type)
+
+    def get_update_default(self, column):
+        if column.onupdate is None:
+            return None
+        else:
+            return self._exec_default(column, column.onupdate, column.type)
+
+    def _process_execute_defaults(self):
+        compiled = cast(SQLCompiler, self.compiled)
+
+        key_getter = compiled._within_exec_param_key_getter
+
+        sentinel_counter = 0
+
+        if compiled.insert_prefetch:
+            prefetch_recs = [
+                (
+                    c,
+                    key_getter(c),
+                    c._default_description_tuple,
+                    self.get_insert_default,
+                )
+                for c in compiled.insert_prefetch
+            ]
+        elif compiled.update_prefetch:
+            prefetch_recs = [
+                (
+                    c,
+                    key_getter(c),
+                    c._onupdate_description_tuple,
+                    self.get_update_default,
+                )
+                for c in compiled.update_prefetch
+            ]
+        else:
+            prefetch_recs = []
+
+        for param in self.compiled_parameters:
+            self.current_parameters = param
+
+            for (
+                c,
+                param_key,
+                (arg, is_scalar, is_callable, is_sentinel),
+                fallback,
+            ) in prefetch_recs:
+                if is_sentinel:
+                    param[param_key] = sentinel_counter
+                    sentinel_counter += 1
+                elif is_scalar:
+                    param[param_key] = arg
+                elif is_callable:
+                    self.current_column = c
+                    param[param_key] = arg(self)
+                else:
+                    val = fallback(c)
+                    if val is not None:
+                        param[param_key] = val
+
+        del self.current_parameters
+
+
+DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py
new file mode 100644
index 00000000..b759382c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/events.py
@@ -0,0 +1,965 @@
+# engine/events.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import Union
+
+from .base import Connection
+from .base import Engine
+from .interfaces import ConnectionEventsTarget
+from .interfaces import DBAPIConnection
+from .interfaces import DBAPICursor
+from .interfaces import Dialect
+from .. import event
+from .. import exc
+from ..util.typing import Literal
+
+if typing.TYPE_CHECKING:
+    from .interfaces import _CoreMultiExecuteParams
+    from .interfaces import _CoreSingleExecuteParams
+    from .interfaces import _DBAPIAnyExecuteParams
+    from .interfaces import _DBAPIMultiExecuteParams
+    from .interfaces import _DBAPISingleExecuteParams
+    from .interfaces import _ExecuteOptions
+    from .interfaces import ExceptionContext
+    from .interfaces import ExecutionContext
+    from .result import Result
+    from ..pool import ConnectionPoolEntry
+    from ..sql import Executable
+    from ..sql.elements import BindParameter
+
+
+class ConnectionEvents(event.Events[ConnectionEventsTarget]):
+    """Available events for
+    :class:`_engine.Connection` and :class:`_engine.Engine`.
+
+    The methods here define the name of an event as well as the names of
+    members that are passed to listener functions.
+
+    An event listener can be associated with any
+    :class:`_engine.Connection` or :class:`_engine.Engine`
+    class or instance, such as an :class:`_engine.Engine`, e.g.::
+
+        from sqlalchemy import event, create_engine
+
+
+        def before_cursor_execute(
+            conn, cursor, statement, parameters, context, executemany
+        ):
+            log.info("Received statement: %s", statement)
+
+
+        engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test")
+        event.listen(engine, "before_cursor_execute", before_cursor_execute)
+
+    or with a specific :class:`_engine.Connection`::
+
+        with engine.begin() as conn:
+
+            @event.listens_for(conn, "before_cursor_execute")
+            def before_cursor_execute(
+                conn, cursor, statement, parameters, context, executemany
+            ):
+                log.info("Received statement: %s", statement)
+
+    When the methods are called with a `statement` parameter, such as in
+    :meth:`.after_cursor_execute` or :meth:`.before_cursor_execute`,
+    the statement is the exact SQL string that was prepared for transmission
+    to the DBAPI ``cursor`` in the connection's :class:`.Dialect`.
+
+    The :meth:`.before_execute` and :meth:`.before_cursor_execute`
+    events can also be established with the ``retval=True`` flag, which
+    allows modification of the statement and parameters to be sent
+    to the database.  The :meth:`.before_cursor_execute` event is
+    particularly useful here to add ad-hoc string transformations, such
+    as comments, to all executions::
+
+        from sqlalchemy.engine import Engine
+        from sqlalchemy import event
+
+
+        @event.listens_for(Engine, "before_cursor_execute", retval=True)
+        def comment_sql_calls(
+            conn, cursor, statement, parameters, context, executemany
+        ):
+            statement = statement + " -- some comment"
+            return statement, parameters
+
+    .. note:: :class:`_events.ConnectionEvents` can be established on any
+       combination of :class:`_engine.Engine`, :class:`_engine.Connection`,
+       as well
+       as instances of each of those classes.  Events across all
+       four scopes will fire off for a given instance of
+       :class:`_engine.Connection`.  However, for performance reasons, the
+       :class:`_engine.Connection` object determines at instantiation time
+       whether or not its parent :class:`_engine.Engine` has event listeners
+       established.   Event listeners added to the :class:`_engine.Engine`
+       class or to an instance of :class:`_engine.Engine`
+       *after* the instantiation
+       of a dependent :class:`_engine.Connection` instance will usually
+       *not* be available on that :class:`_engine.Connection` instance.
+       The newly
+       added listeners will instead take effect for
+       :class:`_engine.Connection`
+       instances created subsequent to those event listeners being
+       established on the parent :class:`_engine.Engine` class or instance.
+
+    :param retval=False: Applies to the :meth:`.before_execute` and
+      :meth:`.before_cursor_execute` events only.  When True, the
+      user-defined event function must have a return value, which
+      is a tuple of parameters that replace the given statement
+      and parameters.  See those methods for a description of
+      specific return arguments.
+
+    """  # noqa
+
+    _target_class_doc = "SomeEngine"
+    _dispatch_target = ConnectionEventsTarget
+
+    @classmethod
+    def _accept_with(
+        cls,
+        target: Union[ConnectionEventsTarget, Type[ConnectionEventsTarget]],
+        identifier: str,
+    ) -> Optional[Union[ConnectionEventsTarget, Type[ConnectionEventsTarget]]]:
+        default_dispatch = super()._accept_with(target, identifier)
+        if default_dispatch is None and hasattr(
+            target, "_no_async_engine_events"
+        ):
+            target._no_async_engine_events()
+
+        return default_dispatch
+
+    @classmethod
+    def _listen(
+        cls,
+        event_key: event._EventKey[ConnectionEventsTarget],
+        *,
+        retval: bool = False,
+        **kw: Any,
+    ) -> None:
+        target, identifier, fn = (
+            event_key.dispatch_target,
+            event_key.identifier,
+            event_key._listen_fn,
+        )
+        target._has_events = True
+
+        if not retval:
+            if identifier == "before_execute":
+                orig_fn = fn
+
+                def wrap_before_execute(  # type: ignore
+                    conn, clauseelement, multiparams, params, execution_options
+                ):
+                    orig_fn(
+                        conn,
+                        clauseelement,
+                        multiparams,
+                        params,
+                        execution_options,
+                    )
+                    return clauseelement, multiparams, params
+
+                fn = wrap_before_execute
+            elif identifier == "before_cursor_execute":
+                orig_fn = fn
+
+                def wrap_before_cursor_execute(  # type: ignore
+                    conn, cursor, statement, parameters, context, executemany
+                ):
+                    orig_fn(
+                        conn,
+                        cursor,
+                        statement,
+                        parameters,
+                        context,
+                        executemany,
+                    )
+                    return statement, parameters
+
+                fn = wrap_before_cursor_execute
+        elif retval and identifier not in (
+            "before_execute",
+            "before_cursor_execute",
+        ):
+            raise exc.ArgumentError(
+                "Only the 'before_execute', "
+                "'before_cursor_execute' and 'handle_error' engine "
+                "event listeners accept the 'retval=True' "
+                "argument."
+            )
+        event_key.with_wrapper(fn).base_listen()
+
+    @event._legacy_signature(
+        "1.4",
+        ["conn", "clauseelement", "multiparams", "params"],
+        lambda conn, clauseelement, multiparams, params, execution_options: (
+            conn,
+            clauseelement,
+            multiparams,
+            params,
+        ),
+    )
+    def before_execute(
+        self,
+        conn: Connection,
+        clauseelement: Executable,
+        multiparams: _CoreMultiExecuteParams,
+        params: _CoreSingleExecuteParams,
+        execution_options: _ExecuteOptions,
+    ) -> Optional[
+        Tuple[Executable, _CoreMultiExecuteParams, _CoreSingleExecuteParams]
+    ]:
+        """Intercept high level execute() events, receiving uncompiled
+        SQL constructs and other objects prior to rendering into SQL.
+
+        This event is good for debugging SQL compilation issues as well
+        as early manipulation of the parameters being sent to the database,
+        as the parameter lists will be in a consistent format here.
+
+        This event can be optionally established with the ``retval=True``
+        flag.  The ``clauseelement``, ``multiparams``, and ``params``
+        arguments should be returned as a three-tuple in this case::
+
+            @event.listens_for(Engine, "before_execute", retval=True)
+            def before_execute(conn, clauseelement, multiparams, params):
+                # do something with clauseelement, multiparams, params
+                return clauseelement, multiparams, params
+
+        :param conn: :class:`_engine.Connection` object
+        :param clauseelement: SQL expression construct, :class:`.Compiled`
+         instance, or string statement passed to
+         :meth:`_engine.Connection.execute`.
+        :param multiparams: Multiple parameter sets, a list of dictionaries.
+        :param params: Single parameter set, a single dictionary.
+        :param execution_options: dictionary of execution
+         options passed along with the statement, if any.  This is a merge
+         of all options that will be used, including those of the statement,
+         the connection, and those passed in to the method itself for
+         the 2.0 style of execution.
+
+         .. versionadded: 1.4
+
+        .. seealso::
+
+            :meth:`.before_cursor_execute`
+
+        """
+
+    @event._legacy_signature(
+        "1.4",
+        ["conn", "clauseelement", "multiparams", "params", "result"],
+        lambda conn, clauseelement, multiparams, params, execution_options, result: (  # noqa
+            conn,
+            clauseelement,
+            multiparams,
+            params,
+            result,
+        ),
+    )
+    def after_execute(
+        self,
+        conn: Connection,
+        clauseelement: Executable,
+        multiparams: _CoreMultiExecuteParams,
+        params: _CoreSingleExecuteParams,
+        execution_options: _ExecuteOptions,
+        result: Result[Any],
+    ) -> None:
+        """Intercept high level execute() events after execute.
+
+
+        :param conn: :class:`_engine.Connection` object
+        :param clauseelement: SQL expression construct, :class:`.Compiled`
+         instance, or string statement passed to
+         :meth:`_engine.Connection.execute`.
+        :param multiparams: Multiple parameter sets, a list of dictionaries.
+        :param params: Single parameter set, a single dictionary.
+        :param execution_options: dictionary of execution
+         options passed along with the statement, if any.  This is a merge
+         of all options that will be used, including those of the statement,
+         the connection, and those passed in to the method itself for
+         the 2.0 style of execution.
+
+         .. versionadded: 1.4
+
+        :param result: :class:`_engine.CursorResult` generated by the
+         execution.
+
+        """
+
+    def before_cursor_execute(
+        self,
+        conn: Connection,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPIAnyExecuteParams,
+        context: Optional[ExecutionContext],
+        executemany: bool,
+    ) -> Optional[Tuple[str, _DBAPIAnyExecuteParams]]:
+        """Intercept low-level cursor execute() events before execution,
+        receiving the string SQL statement and DBAPI-specific parameter list to
+        be invoked against a cursor.
+
+        This event is a good choice for logging as well as late modifications
+        to the SQL string.  It's less ideal for parameter modifications except
+        for those which are specific to a target backend.
+
+        This event can be optionally established with the ``retval=True``
+        flag.  The ``statement`` and ``parameters`` arguments should be
+        returned as a two-tuple in this case::
+
+            @event.listens_for(Engine, "before_cursor_execute", retval=True)
+            def before_cursor_execute(
+                conn, cursor, statement, parameters, context, executemany
+            ):
+                # do something with statement, parameters
+                return statement, parameters
+
+        See the example at :class:`_events.ConnectionEvents`.
+
+        :param conn: :class:`_engine.Connection` object
+        :param cursor: DBAPI cursor object
+        :param statement: string SQL statement, as to be passed to the DBAPI
+        :param parameters: Dictionary, tuple, or list of parameters being
+         passed to the ``execute()`` or ``executemany()`` method of the
+         DBAPI ``cursor``.  In some cases may be ``None``.
+        :param context: :class:`.ExecutionContext` object in use.  May
+         be ``None``.
+        :param executemany: boolean, if ``True``, this is an ``executemany()``
+         call, if ``False``, this is an ``execute()`` call.
+
+        .. seealso::
+
+            :meth:`.before_execute`
+
+            :meth:`.after_cursor_execute`
+
+        """
+
+    def after_cursor_execute(
+        self,
+        conn: Connection,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPIAnyExecuteParams,
+        context: Optional[ExecutionContext],
+        executemany: bool,
+    ) -> None:
+        """Intercept low-level cursor execute() events after execution.
+
+        :param conn: :class:`_engine.Connection` object
+        :param cursor: DBAPI cursor object.  Will have results pending
+         if the statement was a SELECT, but these should not be consumed
+         as they will be needed by the :class:`_engine.CursorResult`.
+        :param statement: string SQL statement, as passed to the DBAPI
+        :param parameters: Dictionary, tuple, or list of parameters being
+         passed to the ``execute()`` or ``executemany()`` method of the
+         DBAPI ``cursor``.  In some cases may be ``None``.
+        :param context: :class:`.ExecutionContext` object in use.  May
+         be ``None``.
+        :param executemany: boolean, if ``True``, this is an ``executemany()``
+         call, if ``False``, this is an ``execute()`` call.
+
+        """
+
+    @event._legacy_signature(
+        "2.0", ["conn", "branch"], converter=lambda conn: (conn, False)
+    )
+    def engine_connect(self, conn: Connection) -> None:
+        """Intercept the creation of a new :class:`_engine.Connection`.
+
+        This event is called typically as the direct result of calling
+        the :meth:`_engine.Engine.connect` method.
+
+        It differs from the :meth:`_events.PoolEvents.connect` method, which
+        refers to the actual connection to a database at the DBAPI level;
+        a DBAPI connection may be pooled and reused for many operations.
+        In contrast, this event refers only to the production of a higher level
+        :class:`_engine.Connection` wrapper around such a DBAPI connection.
+
+        It also differs from the :meth:`_events.PoolEvents.checkout` event
+        in that it is specific to the :class:`_engine.Connection` object,
+        not the
+        DBAPI connection that :meth:`_events.PoolEvents.checkout` deals with,
+        although
+        this DBAPI connection is available here via the
+        :attr:`_engine.Connection.connection` attribute.
+        But note there can in fact
+        be multiple :meth:`_events.PoolEvents.checkout`
+        events within the lifespan
+        of a single :class:`_engine.Connection` object, if that
+        :class:`_engine.Connection`
+        is invalidated and re-established.
+
+        :param conn: :class:`_engine.Connection` object.
+
+        .. seealso::
+
+            :meth:`_events.PoolEvents.checkout`
+            the lower-level pool checkout event
+            for an individual DBAPI connection
+
+        """
+
+    def set_connection_execution_options(
+        self, conn: Connection, opts: Dict[str, Any]
+    ) -> None:
+        """Intercept when the :meth:`_engine.Connection.execution_options`
+        method is called.
+
+        This method is called after the new :class:`_engine.Connection`
+        has been
+        produced, with the newly updated execution options collection, but
+        before the :class:`.Dialect` has acted upon any of those new options.
+
+        Note that this method is not called when a new
+        :class:`_engine.Connection`
+        is produced which is inheriting execution options from its parent
+        :class:`_engine.Engine`; to intercept this condition, use the
+        :meth:`_events.ConnectionEvents.engine_connect` event.
+
+        :param conn: The newly copied :class:`_engine.Connection` object
+
+        :param opts: dictionary of options that were passed to the
+         :meth:`_engine.Connection.execution_options` method.
+         This dictionary may be modified in place to affect the ultimate
+         options which take effect.
+
+         .. versionadded:: 2.0 the ``opts`` dictionary may be modified
+            in place.
+
+
+        .. seealso::
+
+            :meth:`_events.ConnectionEvents.set_engine_execution_options`
+            - event
+            which is called when :meth:`_engine.Engine.execution_options`
+            is called.
+
+
+        """
+
+    def set_engine_execution_options(
+        self, engine: Engine, opts: Dict[str, Any]
+    ) -> None:
+        """Intercept when the :meth:`_engine.Engine.execution_options`
+        method is called.
+
+        The :meth:`_engine.Engine.execution_options` method produces a shallow
+        copy of the :class:`_engine.Engine` which stores the new options.
+        That new
+        :class:`_engine.Engine` is passed here.
+        A particular application of this
+        method is to add a :meth:`_events.ConnectionEvents.engine_connect`
+        event
+        handler to the given :class:`_engine.Engine`
+        which will perform some per-
+        :class:`_engine.Connection` task specific to these execution options.
+
+        :param conn: The newly copied :class:`_engine.Engine` object
+
+        :param opts: dictionary of options that were passed to the
+         :meth:`_engine.Connection.execution_options` method.
+         This dictionary may be modified in place to affect the ultimate
+         options which take effect.
+
+         .. versionadded:: 2.0 the ``opts`` dictionary may be modified
+            in place.
+
+        .. seealso::
+
+            :meth:`_events.ConnectionEvents.set_connection_execution_options`
+            - event
+            which is called when :meth:`_engine.Connection.execution_options`
+            is
+            called.
+
+        """
+
+    def engine_disposed(self, engine: Engine) -> None:
+        """Intercept when the :meth:`_engine.Engine.dispose` method is called.
+
+        The :meth:`_engine.Engine.dispose` method instructs the engine to
+        "dispose" of it's connection pool (e.g. :class:`_pool.Pool`), and
+        replaces it with a new one.  Disposing of the old pool has the
+        effect that existing checked-in connections are closed.  The new
+        pool does not establish any new connections until it is first used.
+
+        This event can be used to indicate that resources related to the
+        :class:`_engine.Engine` should also be cleaned up,
+        keeping in mind that the
+        :class:`_engine.Engine`
+        can still be used for new requests in which case
+        it re-acquires connection resources.
+
+        """
+
+    def begin(self, conn: Connection) -> None:
+        """Intercept begin() events.
+
+        :param conn: :class:`_engine.Connection` object
+
+        """
+
+    def rollback(self, conn: Connection) -> None:
+        """Intercept rollback() events, as initiated by a
+        :class:`.Transaction`.
+
+        Note that the :class:`_pool.Pool` also "auto-rolls back"
+        a DBAPI connection upon checkin, if the ``reset_on_return``
+        flag is set to its default value of ``'rollback'``.
+        To intercept this
+        rollback, use the :meth:`_events.PoolEvents.reset` hook.
+
+        :param conn: :class:`_engine.Connection` object
+
+        .. seealso::
+
+            :meth:`_events.PoolEvents.reset`
+
+        """
+
+    def commit(self, conn: Connection) -> None:
+        """Intercept commit() events, as initiated by a
+        :class:`.Transaction`.
+
+        Note that the :class:`_pool.Pool` may also "auto-commit"
+        a DBAPI connection upon checkin, if the ``reset_on_return``
+        flag is set to the value ``'commit'``.  To intercept this
+        commit, use the :meth:`_events.PoolEvents.reset` hook.
+
+        :param conn: :class:`_engine.Connection` object
+        """
+
+    def savepoint(self, conn: Connection, name: str) -> None:
+        """Intercept savepoint() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param name: specified name used for the savepoint.
+
+        """
+
+    def rollback_savepoint(
+        self, conn: Connection, name: str, context: None
+    ) -> None:
+        """Intercept rollback_savepoint() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param name: specified name used for the savepoint.
+        :param context: not used
+
+        """
+        # TODO: deprecate "context"
+
+    def release_savepoint(
+        self, conn: Connection, name: str, context: None
+    ) -> None:
+        """Intercept release_savepoint() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param name: specified name used for the savepoint.
+        :param context: not used
+
+        """
+        # TODO: deprecate "context"
+
+    def begin_twophase(self, conn: Connection, xid: Any) -> None:
+        """Intercept begin_twophase() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param xid: two-phase XID identifier
+
+        """
+
+    def prepare_twophase(self, conn: Connection, xid: Any) -> None:
+        """Intercept prepare_twophase() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param xid: two-phase XID identifier
+        """
+
+    def rollback_twophase(
+        self, conn: Connection, xid: Any, is_prepared: bool
+    ) -> None:
+        """Intercept rollback_twophase() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param xid: two-phase XID identifier
+        :param is_prepared: boolean, indicates if
+         :meth:`.TwoPhaseTransaction.prepare` was called.
+
+        """
+
+    def commit_twophase(
+        self, conn: Connection, xid: Any, is_prepared: bool
+    ) -> None:
+        """Intercept commit_twophase() events.
+
+        :param conn: :class:`_engine.Connection` object
+        :param xid: two-phase XID identifier
+        :param is_prepared: boolean, indicates if
+         :meth:`.TwoPhaseTransaction.prepare` was called.
+
+        """
+
+
+class DialectEvents(event.Events[Dialect]):
+    """event interface for execution-replacement functions.
+
+    These events allow direct instrumentation and replacement
+    of key dialect functions which interact with the DBAPI.
+
+    .. note::
+
+        :class:`.DialectEvents` hooks should be considered **semi-public**
+        and experimental.
+        These hooks are not for general use and are only for those situations
+        where intricate re-statement of DBAPI mechanics must be injected onto
+        an existing dialect.  For general-use statement-interception events,
+        please use the :class:`_events.ConnectionEvents` interface.
+
+    .. seealso::
+
+        :meth:`_events.ConnectionEvents.before_cursor_execute`
+
+        :meth:`_events.ConnectionEvents.before_execute`
+
+        :meth:`_events.ConnectionEvents.after_cursor_execute`
+
+        :meth:`_events.ConnectionEvents.after_execute`
+
+    """
+
+    _target_class_doc = "SomeEngine"
+    _dispatch_target = Dialect
+
+    @classmethod
+    def _listen(
+        cls,
+        event_key: event._EventKey[Dialect],
+        *,
+        retval: bool = False,
+        **kw: Any,
+    ) -> None:
+        target = event_key.dispatch_target
+
+        target._has_events = True
+        event_key.base_listen()
+
+    @classmethod
+    def _accept_with(
+        cls,
+        target: Union[Engine, Type[Engine], Dialect, Type[Dialect]],
+        identifier: str,
+    ) -> Optional[Union[Dialect, Type[Dialect]]]:
+        if isinstance(target, type):
+            if issubclass(target, Engine):
+                return Dialect
+            elif issubclass(target, Dialect):
+                return target
+        elif isinstance(target, Engine):
+            return target.dialect
+        elif isinstance(target, Dialect):
+            return target
+        elif isinstance(target, Connection) and identifier == "handle_error":
+            raise exc.InvalidRequestError(
+                "The handle_error() event hook as of SQLAlchemy 2.0 is "
+                "established on the Dialect, and may only be applied to the "
+                "Engine as a whole or to a specific Dialect as a whole, "
+                "not on a per-Connection basis."
+            )
+        elif hasattr(target, "_no_async_engine_events"):
+            target._no_async_engine_events()
+        else:
+            return None
+
+    def handle_error(
+        self, exception_context: ExceptionContext
+    ) -> Optional[BaseException]:
+        r"""Intercept all exceptions processed by the
+        :class:`_engine.Dialect`, typically but not limited to those
+        emitted within the scope of a :class:`_engine.Connection`.
+
+        .. versionchanged:: 2.0 the :meth:`.DialectEvents.handle_error` event
+           is moved to the :class:`.DialectEvents` class, moved from the
+           :class:`.ConnectionEvents` class, so that it may also participate in
+           the "pre ping" operation configured with the
+           :paramref:`_sa.create_engine.pool_pre_ping` parameter. The event
+           remains registered by using the :class:`_engine.Engine` as the event
+           target, however note that using the :class:`_engine.Connection` as
+           an event target for :meth:`.DialectEvents.handle_error` is no longer
+           supported.
+
+        This includes all exceptions emitted by the DBAPI as well as
+        within SQLAlchemy's statement invocation process, including
+        encoding errors and other statement validation errors.  Other areas
+        in which the event is invoked include transaction begin and end,
+        result row fetching, cursor creation.
+
+        Note that :meth:`.handle_error` may support new kinds of exceptions
+        and new calling scenarios at *any time*.  Code which uses this
+        event must expect new calling patterns to be present in minor
+        releases.
+
+        To support the wide variety of members that correspond to an exception,
+        as well as to allow extensibility of the event without backwards
+        incompatibility, the sole argument received is an instance of
+        :class:`.ExceptionContext`.   This object contains data members
+        representing detail about the exception.
+
+        Use cases supported by this hook include:
+
+        * read-only, low-level exception handling for logging and
+          debugging purposes
+        * Establishing whether a DBAPI connection error message indicates
+          that the database connection needs to be reconnected, including
+          for the "pre_ping" handler used by **some** dialects
+        * Establishing or disabling whether a connection or the owning
+          connection pool is invalidated or expired in response to a
+          specific exception
+        * exception re-writing
+
+        The hook is called while the cursor from the failed operation
+        (if any) is still open and accessible.   Special cleanup operations
+        can be called on this cursor; SQLAlchemy will attempt to close
+        this cursor subsequent to this hook being invoked.
+
+        As of SQLAlchemy 2.0, the "pre_ping" handler enabled using the
+        :paramref:`_sa.create_engine.pool_pre_ping` parameter will also
+        participate in the :meth:`.handle_error` process, **for those dialects
+        that rely upon disconnect codes to detect database liveness**. Note
+        that some dialects such as psycopg, psycopg2, and most MySQL dialects
+        make use of a native ``ping()`` method supplied by the DBAPI which does
+        not make use of disconnect codes.
+
+        .. versionchanged:: 2.0.0 The :meth:`.DialectEvents.handle_error`
+           event hook participates in connection pool "pre-ping" operations.
+           Within this usage, the :attr:`.ExceptionContext.engine` attribute
+           will be ``None``, however the :class:`.Dialect` in use is always
+           available via the :attr:`.ExceptionContext.dialect` attribute.
+
+        .. versionchanged:: 2.0.5 Added :attr:`.ExceptionContext.is_pre_ping`
+           attribute which will be set to ``True`` when the
+           :meth:`.DialectEvents.handle_error` event hook is triggered within
+           a connection pool pre-ping operation.
+
+        .. versionchanged:: 2.0.5 An issue was repaired that allows for the
+           PostgreSQL ``psycopg`` and ``psycopg2`` drivers, as well as all
+           MySQL drivers, to properly participate in the
+           :meth:`.DialectEvents.handle_error` event hook during
+           connection pool "pre-ping" operations; previously, the
+           implementation was non-working for these drivers.
+
+
+        A handler function has two options for replacing
+        the SQLAlchemy-constructed exception into one that is user
+        defined.   It can either raise this new exception directly, in
+        which case all further event listeners are bypassed and the
+        exception will be raised, after appropriate cleanup as taken
+        place::
+
+            @event.listens_for(Engine, "handle_error")
+            def handle_exception(context):
+                if isinstance(
+                    context.original_exception, psycopg2.OperationalError
+                ) and "failed" in str(context.original_exception):
+                    raise MySpecialException("failed operation")
+
+        .. warning::  Because the
+           :meth:`_events.DialectEvents.handle_error`
+           event specifically provides for exceptions to be re-thrown as
+           the ultimate exception raised by the failed statement,
+           **stack traces will be misleading** if the user-defined event
+           handler itself fails and throws an unexpected exception;
+           the stack trace may not illustrate the actual code line that
+           failed!  It is advised to code carefully here and use
+           logging and/or inline debugging if unexpected exceptions are
+           occurring.
+
+        Alternatively, a "chained" style of event handling can be
+        used, by configuring the handler with the ``retval=True``
+        modifier and returning the new exception instance from the
+        function.  In this case, event handling will continue onto the
+        next handler.   The "chained" exception is available using
+        :attr:`.ExceptionContext.chained_exception`::
+
+            @event.listens_for(Engine, "handle_error", retval=True)
+            def handle_exception(context):
+                if (
+                    context.chained_exception is not None
+                    and "special" in context.chained_exception.message
+                ):
+                    return MySpecialException(
+                        "failed", cause=context.chained_exception
+                    )
+
+        Handlers that return ``None`` may be used within the chain; when
+        a handler returns ``None``, the previous exception instance,
+        if any, is maintained as the current exception that is passed onto the
+        next handler.
+
+        When a custom exception is raised or returned, SQLAlchemy raises
+        this new exception as-is, it is not wrapped by any SQLAlchemy
+        object.  If the exception is not a subclass of
+        :class:`sqlalchemy.exc.StatementError`,
+        certain features may not be available; currently this includes
+        the ORM's feature of adding a detail hint about "autoflush" to
+        exceptions raised within the autoflush process.
+
+        :param context: an :class:`.ExceptionContext` object.  See this
+         class for details on all available members.
+
+
+        .. seealso::
+
+            :ref:`pool_new_disconnect_codes`
+
+        """
+
+    def do_connect(
+        self,
+        dialect: Dialect,
+        conn_rec: ConnectionPoolEntry,
+        cargs: Tuple[Any, ...],
+        cparams: Dict[str, Any],
+    ) -> Optional[DBAPIConnection]:
+        """Receive connection arguments before a connection is made.
+
+        This event is useful in that it allows the handler to manipulate the
+        cargs and/or cparams collections that control how the DBAPI
+        ``connect()`` function will be called. ``cargs`` will always be a
+        Python list that can be mutated in-place, and ``cparams`` a Python
+        dictionary that may also be mutated::
+
+            e = create_engine("postgresql+psycopg2://user@host/dbname")
+
+
+            @event.listens_for(e, "do_connect")
+            def receive_do_connect(dialect, conn_rec, cargs, cparams):
+                cparams["password"] = "some_password"
+
+        The event hook may also be used to override the call to ``connect()``
+        entirely, by returning a non-``None`` DBAPI connection object::
+
+            e = create_engine("postgresql+psycopg2://user@host/dbname")
+
+
+            @event.listens_for(e, "do_connect")
+            def receive_do_connect(dialect, conn_rec, cargs, cparams):
+                return psycopg2.connect(*cargs, **cparams)
+
+        .. seealso::
+
+            :ref:`custom_dbapi_args`
+
+        """
+
+    def do_executemany(
+        self,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPIMultiExecuteParams,
+        context: ExecutionContext,
+    ) -> Optional[Literal[True]]:
+        """Receive a cursor to have executemany() called.
+
+        Return the value True to halt further events from invoking,
+        and to indicate that the cursor execution has already taken
+        place within the event handler.
+
+        """
+
+    def do_execute_no_params(
+        self, cursor: DBAPICursor, statement: str, context: ExecutionContext
+    ) -> Optional[Literal[True]]:
+        """Receive a cursor to have execute() with no parameters called.
+
+        Return the value True to halt further events from invoking,
+        and to indicate that the cursor execution has already taken
+        place within the event handler.
+
+        """
+
+    def do_execute(
+        self,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPISingleExecuteParams,
+        context: ExecutionContext,
+    ) -> Optional[Literal[True]]:
+        """Receive a cursor to have execute() called.
+
+        Return the value True to halt further events from invoking,
+        and to indicate that the cursor execution has already taken
+        place within the event handler.
+
+        """
+
+    def do_setinputsizes(
+        self,
+        inputsizes: Dict[BindParameter[Any], Any],
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPIAnyExecuteParams,
+        context: ExecutionContext,
+    ) -> None:
+        """Receive the setinputsizes dictionary for possible modification.
+
+        This event is emitted in the case where the dialect makes use of the
+        DBAPI ``cursor.setinputsizes()`` method which passes information about
+        parameter binding for a particular statement.   The given
+        ``inputsizes`` dictionary will contain :class:`.BindParameter` objects
+        as keys, linked to DBAPI-specific type objects as values; for
+        parameters that are not bound, they are added to the dictionary with
+        ``None`` as the value, which means the parameter will not be included
+        in the ultimate setinputsizes call.   The event may be used to inspect
+        and/or log the datatypes that are being bound, as well as to modify the
+        dictionary in place.  Parameters can be added, modified, or removed
+        from this dictionary.   Callers will typically want to inspect the
+        :attr:`.BindParameter.type` attribute of the given bind objects in
+        order to make decisions about the DBAPI object.
+
+        After the event, the ``inputsizes`` dictionary is converted into
+        an appropriate datastructure to be passed to ``cursor.setinputsizes``;
+        either a list for a positional bound parameter execution style,
+        or a dictionary of string parameter keys to DBAPI type objects for
+        a named bound parameter execution style.
+
+        The setinputsizes hook overall is only used for dialects which include
+        the flag ``use_setinputsizes=True``.  Dialects which use this
+        include python-oracledb, cx_Oracle, pg8000, asyncpg, and pyodbc
+        dialects.
+
+        .. note::
+
+            For use with pyodbc, the ``use_setinputsizes`` flag
+            must be passed to the dialect, e.g.::
+
+                create_engine("mssql+pyodbc://...", use_setinputsizes=True)
+
+            .. seealso::
+
+                  :ref:`mssql_pyodbc_setinputsizes`
+
+        .. versionadded:: 1.2.9
+
+        .. seealso::
+
+            :ref:`cx_oracle_setinputsizes`
+
+        """
+        pass
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py
new file mode 100644
index 00000000..9fb39db7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/interfaces.py
@@ -0,0 +1,3406 @@
+# engine/interfaces.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define core interfaces used by the engine system."""
+
+from __future__ import annotations
+
+from enum import Enum
+from types import ModuleType
+from typing import Any
+from typing import Awaitable
+from typing import Callable
+from typing import ClassVar
+from typing import Collection
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .. import util
+from ..event import EventTarget
+from ..pool import Pool
+from ..pool import PoolProxiedConnection
+from ..sql.compiler import Compiled as Compiled
+from ..sql.compiler import Compiled  # noqa
+from ..sql.compiler import TypeCompiler as TypeCompiler
+from ..sql.compiler import TypeCompiler  # noqa
+from ..util import immutabledict
+from ..util.concurrency import await_only
+from ..util.typing import Literal
+from ..util.typing import NotRequired
+from ..util.typing import Protocol
+from ..util.typing import TypedDict
+
+if TYPE_CHECKING:
+    from .base import Connection
+    from .base import Engine
+    from .cursor import CursorResult
+    from .url import URL
+    from ..event import _ListenerFnType
+    from ..event import dispatcher
+    from ..exc import StatementError
+    from ..sql import Executable
+    from ..sql.compiler import _InsertManyValuesBatch
+    from ..sql.compiler import DDLCompiler
+    from ..sql.compiler import IdentifierPreparer
+    from ..sql.compiler import InsertmanyvaluesSentinelOpts
+    from ..sql.compiler import Linting
+    from ..sql.compiler import SQLCompiler
+    from ..sql.elements import BindParameter
+    from ..sql.elements import ClauseElement
+    from ..sql.schema import Column
+    from ..sql.schema import DefaultGenerator
+    from ..sql.schema import SchemaItem
+    from ..sql.schema import Sequence as Sequence_SchemaItem
+    from ..sql.sqltypes import Integer
+    from ..sql.type_api import _TypeMemoDict
+    from ..sql.type_api import TypeEngine
+
+ConnectArgsType = Tuple[Sequence[str], MutableMapping[str, Any]]
+
+_T = TypeVar("_T", bound="Any")
+
+
+class CacheStats(Enum):
+    CACHE_HIT = 0
+    CACHE_MISS = 1
+    CACHING_DISABLED = 2
+    NO_CACHE_KEY = 3
+    NO_DIALECT_SUPPORT = 4
+
+
+class ExecuteStyle(Enum):
+    """indicates the :term:`DBAPI` cursor method that will be used to invoke
+    a statement."""
+
+    EXECUTE = 0
+    """indicates cursor.execute() will be used"""
+
+    EXECUTEMANY = 1
+    """indicates cursor.executemany() will be used."""
+
+    INSERTMANYVALUES = 2
+    """indicates cursor.execute() will be used with an INSERT where the
+    VALUES expression will be expanded to accommodate for multiple
+    parameter sets
+
+    .. seealso::
+
+        :ref:`engine_insertmanyvalues`
+
+    """
+
+
+class DBAPIConnection(Protocol):
+    """protocol representing a :pep:`249` database connection.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        `Connection Objects <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_
+        - in :pep:`249`
+
+    """  # noqa: E501
+
+    def close(self) -> None: ...
+
+    def commit(self) -> None: ...
+
+    def cursor(self) -> DBAPICursor: ...
+
+    def rollback(self) -> None: ...
+
+    autocommit: bool
+
+
+class DBAPIType(Protocol):
+    """protocol representing a :pep:`249` database type.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        `Type Objects <https://www.python.org/dev/peps/pep-0249/#type-objects>`_
+        - in :pep:`249`
+
+    """  # noqa: E501
+
+
+class DBAPICursor(Protocol):
+    """protocol representing a :pep:`249` database cursor.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        `Cursor Objects <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_
+        - in :pep:`249`
+
+    """  # noqa: E501
+
+    @property
+    def description(
+        self,
+    ) -> _DBAPICursorDescription:
+        """The description attribute of the Cursor.
+
+        .. seealso::
+
+            `cursor.description <https://www.python.org/dev/peps/pep-0249/#description>`_
+            - in :pep:`249`
+
+
+        """  # noqa: E501
+        ...
+
+    @property
+    def rowcount(self) -> int: ...
+
+    arraysize: int
+
+    lastrowid: int
+
+    def close(self) -> None: ...
+
+    def execute(
+        self,
+        operation: Any,
+        parameters: Optional[_DBAPISingleExecuteParams] = None,
+    ) -> Any: ...
+
+    def executemany(
+        self,
+        operation: Any,
+        parameters: _DBAPIMultiExecuteParams,
+    ) -> Any: ...
+
+    def fetchone(self) -> Optional[Any]: ...
+
+    def fetchmany(self, size: int = ...) -> Sequence[Any]: ...
+
+    def fetchall(self) -> Sequence[Any]: ...
+
+    def setinputsizes(self, sizes: Sequence[Any]) -> None: ...
+
+    def setoutputsize(self, size: Any, column: Any) -> None: ...
+
+    def callproc(
+        self, procname: str, parameters: Sequence[Any] = ...
+    ) -> Any: ...
+
+    def nextset(self) -> Optional[bool]: ...
+
+    def __getattr__(self, key: str) -> Any: ...
+
+
+_CoreSingleExecuteParams = Mapping[str, Any]
+_MutableCoreSingleExecuteParams = MutableMapping[str, Any]
+_CoreMultiExecuteParams = Sequence[_CoreSingleExecuteParams]
+_CoreAnyExecuteParams = Union[
+    _CoreMultiExecuteParams, _CoreSingleExecuteParams
+]
+
+_DBAPISingleExecuteParams = Union[Sequence[Any], _CoreSingleExecuteParams]
+
+_DBAPIMultiExecuteParams = Union[
+    Sequence[Sequence[Any]], _CoreMultiExecuteParams
+]
+_DBAPIAnyExecuteParams = Union[
+    _DBAPIMultiExecuteParams, _DBAPISingleExecuteParams
+]
+_DBAPICursorDescription = Sequence[
+    Tuple[
+        str,
+        "DBAPIType",
+        Optional[int],
+        Optional[int],
+        Optional[int],
+        Optional[int],
+        Optional[bool],
+    ]
+]
+
+_AnySingleExecuteParams = _DBAPISingleExecuteParams
+_AnyMultiExecuteParams = _DBAPIMultiExecuteParams
+_AnyExecuteParams = _DBAPIAnyExecuteParams
+
+CompiledCacheType = MutableMapping[Any, "Compiled"]
+SchemaTranslateMapType = Mapping[Optional[str], Optional[str]]
+
+_ImmutableExecuteOptions = immutabledict[str, Any]
+
+_ParamStyle = Literal[
+    "qmark", "numeric", "named", "format", "pyformat", "numeric_dollar"
+]
+
+_GenericSetInputSizesType = List[Tuple[str, Any, "TypeEngine[Any]"]]
+
+IsolationLevel = Literal[
+    "SERIALIZABLE",
+    "REPEATABLE READ",
+    "READ COMMITTED",
+    "READ UNCOMMITTED",
+    "AUTOCOMMIT",
+]
+
+
+class _CoreKnownExecutionOptions(TypedDict, total=False):
+    compiled_cache: Optional[CompiledCacheType]
+    logging_token: str
+    isolation_level: IsolationLevel
+    no_parameters: bool
+    stream_results: bool
+    max_row_buffer: int
+    yield_per: int
+    insertmanyvalues_page_size: int
+    schema_translate_map: Optional[SchemaTranslateMapType]
+    preserve_rowcount: bool
+
+
+_ExecuteOptions = immutabledict[str, Any]
+CoreExecuteOptionsParameter = Union[
+    _CoreKnownExecutionOptions, Mapping[str, Any]
+]
+
+
+class ReflectedIdentity(TypedDict):
+    """represent the reflected IDENTITY structure of a column, corresponding
+    to the :class:`_schema.Identity` construct.
+
+    The :class:`.ReflectedIdentity` structure is part of the
+    :class:`.ReflectedColumn` structure, which is returned by the
+    :meth:`.Inspector.get_columns` method.
+
+    """
+
+    always: bool
+    """type of identity column"""
+
+    on_null: bool
+    """indicates ON NULL"""
+
+    start: int
+    """starting index of the sequence"""
+
+    increment: int
+    """increment value of the sequence"""
+
+    minvalue: int
+    """the minimum value of the sequence."""
+
+    maxvalue: int
+    """the maximum value of the sequence."""
+
+    nominvalue: bool
+    """no minimum value of the sequence."""
+
+    nomaxvalue: bool
+    """no maximum value of the sequence."""
+
+    cycle: bool
+    """allows the sequence to wrap around when the maxvalue
+    or minvalue has been reached."""
+
+    cache: Optional[int]
+    """number of future values in the
+    sequence which are calculated in advance."""
+
+    order: bool
+    """if true, renders the ORDER keyword."""
+
+
+class ReflectedComputed(TypedDict):
+    """Represent the reflected elements of a computed column, corresponding
+    to the :class:`_schema.Computed` construct.
+
+    The :class:`.ReflectedComputed` structure is part of the
+    :class:`.ReflectedColumn` structure, which is returned by the
+    :meth:`.Inspector.get_columns` method.
+
+    """
+
+    sqltext: str
+    """the expression used to generate this column returned
+    as a string SQL expression"""
+
+    persisted: NotRequired[bool]
+    """indicates if the value is stored in the table or computed on demand"""
+
+
+class ReflectedColumn(TypedDict):
+    """Dictionary representing the reflected elements corresponding to
+    a :class:`_schema.Column` object.
+
+    The :class:`.ReflectedColumn` structure is returned by the
+    :class:`.Inspector.get_columns` method.
+
+    """
+
+    name: str
+    """column name"""
+
+    type: TypeEngine[Any]
+    """column type represented as a :class:`.TypeEngine` instance."""
+
+    nullable: bool
+    """boolean flag if the column is NULL or NOT NULL"""
+
+    default: Optional[str]
+    """column default expression as a SQL string"""
+
+    autoincrement: NotRequired[bool]
+    """database-dependent autoincrement flag.
+
+    This flag indicates if the column has a database-side "autoincrement"
+    flag of some kind.   Within SQLAlchemy, other kinds of columns may
+    also act as an "autoincrement" column without necessarily having
+    such a flag on them.
+
+    See :paramref:`_schema.Column.autoincrement` for more background on
+    "autoincrement".
+
+    """
+
+    comment: NotRequired[Optional[str]]
+    """comment for the column, if present.
+    Only some dialects return this key
+    """
+
+    computed: NotRequired[ReflectedComputed]
+    """indicates that this column is computed by the database.
+    Only some dialects return this key.
+
+    .. versionadded:: 1.3.16 - added support for computed reflection.
+    """
+
+    identity: NotRequired[ReflectedIdentity]
+    """indicates this column is an IDENTITY column.
+    Only some dialects return this key.
+
+    .. versionadded:: 1.4 - added support for identity column reflection.
+    """
+
+    dialect_options: NotRequired[Dict[str, Any]]
+    """Additional dialect-specific options detected for this reflected
+    object"""
+
+
+class ReflectedConstraint(TypedDict):
+    """Dictionary representing the reflected elements corresponding to
+    :class:`.Constraint`
+
+    A base class for all constraints
+    """
+
+    name: Optional[str]
+    """constraint name"""
+
+    comment: NotRequired[Optional[str]]
+    """comment for the constraint, if present"""
+
+
+class ReflectedCheckConstraint(ReflectedConstraint):
+    """Dictionary representing the reflected elements corresponding to
+    :class:`.CheckConstraint`.
+
+    The :class:`.ReflectedCheckConstraint` structure is returned by the
+    :meth:`.Inspector.get_check_constraints` method.
+
+    """
+
+    sqltext: str
+    """the check constraint's SQL expression"""
+
+    dialect_options: NotRequired[Dict[str, Any]]
+    """Additional dialect-specific options detected for this check constraint
+
+    .. versionadded:: 1.3.8
+    """
+
+
+class ReflectedUniqueConstraint(ReflectedConstraint):
+    """Dictionary representing the reflected elements corresponding to
+    :class:`.UniqueConstraint`.
+
+    The :class:`.ReflectedUniqueConstraint` structure is returned by the
+    :meth:`.Inspector.get_unique_constraints` method.
+
+    """
+
+    column_names: List[str]
+    """column names which comprise the unique constraint"""
+
+    duplicates_index: NotRequired[Optional[str]]
+    "Indicates if this unique constraint duplicates an index with this name"
+
+    dialect_options: NotRequired[Dict[str, Any]]
+    """Additional dialect-specific options detected for this unique
+    constraint"""
+
+
+class ReflectedPrimaryKeyConstraint(ReflectedConstraint):
+    """Dictionary representing the reflected elements corresponding to
+    :class:`.PrimaryKeyConstraint`.
+
+    The :class:`.ReflectedPrimaryKeyConstraint` structure is returned by the
+    :meth:`.Inspector.get_pk_constraint` method.
+
+    """
+
+    constrained_columns: List[str]
+    """column names which comprise the primary key"""
+
+    dialect_options: NotRequired[Dict[str, Any]]
+    """Additional dialect-specific options detected for this primary key"""
+
+
+class ReflectedForeignKeyConstraint(ReflectedConstraint):
+    """Dictionary representing the reflected elements corresponding to
+    :class:`.ForeignKeyConstraint`.
+
+    The :class:`.ReflectedForeignKeyConstraint` structure is returned by
+    the :meth:`.Inspector.get_foreign_keys` method.
+
+    """
+
+    constrained_columns: List[str]
+    """local column names which comprise the foreign key"""
+
+    referred_schema: Optional[str]
+    """schema name of the table being referred"""
+
+    referred_table: str
+    """name of the table being referred"""
+
+    referred_columns: List[str]
+    """referred column names that correspond to ``constrained_columns``"""
+
+    options: NotRequired[Dict[str, Any]]
+    """Additional options detected for this foreign key constraint"""
+
+
+class ReflectedIndex(TypedDict):
+    """Dictionary representing the reflected elements corresponding to
+    :class:`.Index`.
+
+    The :class:`.ReflectedIndex` structure is returned by the
+    :meth:`.Inspector.get_indexes` method.
+
+    """
+
+    name: Optional[str]
+    """index name"""
+
+    column_names: List[Optional[str]]
+    """column names which the index references.
+    An element of this list is ``None`` if it's an expression and is
+    returned in the ``expressions`` list.
+    """
+
+    expressions: NotRequired[List[str]]
+    """Expressions that compose the index. This list, when present, contains
+    both plain column names (that are also in ``column_names``) and
+    expressions (that are ``None`` in ``column_names``).
+    """
+
+    unique: bool
+    """whether or not the index has a unique flag"""
+
+    duplicates_constraint: NotRequired[Optional[str]]
+    "Indicates if this index mirrors a constraint with this name"
+
+    include_columns: NotRequired[List[str]]
+    """columns to include in the INCLUDE clause for supporting databases.
+
+    .. deprecated:: 2.0
+
+        Legacy value, will be replaced with
+        ``index_dict["dialect_options"]["<dialect name>_include"]``
+
+    """
+
+    column_sorting: NotRequired[Dict[str, Tuple[str]]]
+    """optional dict mapping column names or expressions to tuple of sort
+    keywords, which may include ``asc``, ``desc``, ``nulls_first``,
+    ``nulls_last``.
+
+    .. versionadded:: 1.3.5
+    """
+
+    dialect_options: NotRequired[Dict[str, Any]]
+    """Additional dialect-specific options detected for this index"""
+
+
+class ReflectedTableComment(TypedDict):
+    """Dictionary representing the reflected comment corresponding to
+    the :attr:`_schema.Table.comment` attribute.
+
+    The :class:`.ReflectedTableComment` structure is returned by the
+    :meth:`.Inspector.get_table_comment` method.
+
+    """
+
+    text: Optional[str]
+    """text of the comment"""
+
+
+class BindTyping(Enum):
+    """Define different methods of passing typing information for
+    bound parameters in a statement to the database driver.
+
+    .. versionadded:: 2.0
+
+    """
+
+    NONE = 1
+    """No steps are taken to pass typing information to the database driver.
+
+    This is the default behavior for databases such as SQLite, MySQL / MariaDB,
+    SQL Server.
+
+    """
+
+    SETINPUTSIZES = 2
+    """Use the pep-249 setinputsizes method.
+
+    This is only implemented for DBAPIs that support this method and for which
+    the SQLAlchemy dialect has the appropriate infrastructure for that dialect
+    set up.  Current dialects include python-oracledb, cx_Oracle as well as
+    optional support for SQL Server using pyodbc.
+
+    When using setinputsizes, dialects also have a means of only using the
+    method for certain datatypes using include/exclude lists.
+
+    When SETINPUTSIZES is used, the :meth:`.Dialect.do_set_input_sizes` method
+    is called for each statement executed which has bound parameters.
+
+    """
+
+    RENDER_CASTS = 3
+    """Render casts or other directives in the SQL string.
+
+    This method is used for all PostgreSQL dialects, including asyncpg,
+    pg8000, psycopg, psycopg2.   Dialects which implement this can choose
+    which kinds of datatypes are explicitly cast in SQL statements and which
+    aren't.
+
+    When RENDER_CASTS is used, the compiler will invoke the
+    :meth:`.SQLCompiler.render_bind_cast` method for the rendered
+    string representation of each :class:`.BindParameter` object whose
+    dialect-level type sets the :attr:`.TypeEngine.render_bind_cast` attribute.
+
+    The :meth:`.SQLCompiler.render_bind_cast` is also used to render casts
+    for one form of "insertmanyvalues" query, when both
+    :attr:`.InsertmanyvaluesSentinelOpts.USE_INSERT_FROM_SELECT` and
+    :attr:`.InsertmanyvaluesSentinelOpts.RENDER_SELECT_COL_CASTS` are set,
+    where the casts are applied to the intermediary columns e.g.
+    "INSERT INTO t (a, b, c) SELECT p0::TYP, p1::TYP, p2::TYP "
+    "FROM (VALUES (?, ?), (?, ?), ...)".
+
+    .. versionadded:: 2.0.10 - :meth:`.SQLCompiler.render_bind_cast` is now
+       used within some elements of the "insertmanyvalues" implementation.
+
+
+    """
+
+
+VersionInfoType = Tuple[Union[int, str], ...]
+TableKey = Tuple[Optional[str], str]
+
+
+class Dialect(EventTarget):
+    """Define the behavior of a specific database and DB-API combination.
+
+    Any aspect of metadata definition, SQL query generation,
+    execution, result-set handling, or anything else which varies
+    between databases is defined under the general category of the
+    Dialect.  The Dialect acts as a factory for other
+    database-specific object implementations including
+    ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
+
+    .. note:: Third party dialects should not subclass :class:`.Dialect`
+       directly.  Instead, subclass :class:`.default.DefaultDialect` or
+       descendant class.
+
+    """
+
+    CACHE_HIT = CacheStats.CACHE_HIT
+    CACHE_MISS = CacheStats.CACHE_MISS
+    CACHING_DISABLED = CacheStats.CACHING_DISABLED
+    NO_CACHE_KEY = CacheStats.NO_CACHE_KEY
+    NO_DIALECT_SUPPORT = CacheStats.NO_DIALECT_SUPPORT
+
+    dispatch: dispatcher[Dialect]
+
+    name: str
+    """identifying name for the dialect from a DBAPI-neutral point of view
+      (i.e. 'sqlite')
+    """
+
+    driver: str
+    """identifying name for the dialect's DBAPI"""
+
+    dialect_description: str
+
+    dbapi: Optional[ModuleType]
+    """A reference to the DBAPI module object itself.
+
+    SQLAlchemy dialects import DBAPI modules using the classmethod
+    :meth:`.Dialect.import_dbapi`. The rationale is so that any dialect
+    module can be imported and used to generate SQL statements without the
+    need for the actual DBAPI driver to be installed.  Only when an
+    :class:`.Engine` is constructed using :func:`.create_engine` does the
+    DBAPI get imported; at that point, the creation process will assign
+    the DBAPI module to this attribute.
+
+    Dialects should therefore implement :meth:`.Dialect.import_dbapi`
+    which will import the necessary module and return it, and then refer
+    to ``self.dbapi`` in dialect code in order to refer to the DBAPI module
+    contents.
+
+    .. versionchanged:: The :attr:`.Dialect.dbapi` attribute is exclusively
+       used as the per-:class:`.Dialect`-instance reference to the DBAPI
+       module.   The previous not-fully-documented ``.Dialect.dbapi()``
+       classmethod is deprecated and replaced by :meth:`.Dialect.import_dbapi`.
+
+    """
+
+    @util.non_memoized_property
+    def loaded_dbapi(self) -> ModuleType:
+        """same as .dbapi, but is never None; will raise an error if no
+        DBAPI was set up.
+
+        .. versionadded:: 2.0
+
+        """
+        raise NotImplementedError()
+
+    positional: bool
+    """True if the paramstyle for this Dialect is positional."""
+
+    paramstyle: str
+    """the paramstyle to be used (some DB-APIs support multiple
+      paramstyles).
+    """
+
+    compiler_linting: Linting
+
+    statement_compiler: Type[SQLCompiler]
+    """a :class:`.Compiled` class used to compile SQL statements"""
+
+    ddl_compiler: Type[DDLCompiler]
+    """a :class:`.Compiled` class used to compile DDL statements"""
+
+    type_compiler_cls: ClassVar[Type[TypeCompiler]]
+    """a :class:`.Compiled` class used to compile SQL type objects
+
+    .. versionadded:: 2.0
+
+    """
+
+    type_compiler_instance: TypeCompiler
+    """instance of a :class:`.Compiled` class used to compile SQL type
+    objects
+
+    .. versionadded:: 2.0
+
+    """
+
+    type_compiler: Any
+    """legacy; this is a TypeCompiler class at the class level, a
+    TypeCompiler instance at the instance level.
+
+    Refer to type_compiler_instance instead.
+
+    """
+
+    preparer: Type[IdentifierPreparer]
+    """a :class:`.IdentifierPreparer` class used to
+    quote identifiers.
+    """
+
+    identifier_preparer: IdentifierPreparer
+    """This element will refer to an instance of :class:`.IdentifierPreparer`
+    once a :class:`.DefaultDialect` has been constructed.
+
+    """
+
+    server_version_info: Optional[Tuple[Any, ...]]
+    """a tuple containing a version number for the DB backend in use.
+
+    This value is only available for supporting dialects, and is
+    typically populated during the initial connection to the database.
+    """
+
+    default_schema_name: Optional[str]
+    """the name of the default schema.  This value is only available for
+    supporting dialects, and is typically populated during the
+    initial connection to the database.
+
+    """
+
+    # NOTE: this does not take into effect engine-level isolation level.
+    # not clear if this should be changed, seems like it should
+    default_isolation_level: Optional[IsolationLevel]
+    """the isolation that is implicitly present on new connections"""
+
+    # create_engine()  -> isolation_level  currently goes here
+    _on_connect_isolation_level: Optional[IsolationLevel]
+
+    execution_ctx_cls: Type[ExecutionContext]
+    """a :class:`.ExecutionContext` class used to handle statement execution"""
+
+    execute_sequence_format: Union[
+        Type[Tuple[Any, ...]], Type[Tuple[List[Any]]]
+    ]
+    """either the 'tuple' or 'list' type, depending on what cursor.execute()
+    accepts for the second argument (they vary)."""
+
+    supports_alter: bool
+    """``True`` if the database supports ``ALTER TABLE`` - used only for
+    generating foreign key constraints in certain circumstances
+    """
+
+    max_identifier_length: int
+    """The maximum length of identifier names."""
+
+    supports_server_side_cursors: bool
+    """indicates if the dialect supports server side cursors"""
+
+    server_side_cursors: bool
+    """deprecated; indicates if the dialect should attempt to use server
+    side cursors by default"""
+
+    supports_sane_rowcount: bool
+    """Indicate whether the dialect properly implements rowcount for
+      ``UPDATE`` and ``DELETE`` statements.
+    """
+
+    supports_sane_multi_rowcount: bool
+    """Indicate whether the dialect properly implements rowcount for
+      ``UPDATE`` and ``DELETE`` statements when executed via
+      executemany.
+    """
+
+    supports_empty_insert: bool
+    """dialect supports INSERT () VALUES (), i.e. a plain INSERT with no
+    columns in it.
+
+    This is not usually supported; an "empty" insert is typically
+    suited using either "INSERT..DEFAULT VALUES" or
+    "INSERT ... (col) VALUES (DEFAULT)".
+
+    """
+
+    supports_default_values: bool
+    """dialect supports INSERT... DEFAULT VALUES syntax"""
+
+    supports_default_metavalue: bool
+    """dialect supports INSERT...(col) VALUES (DEFAULT) syntax.
+
+    Most databases support this in some way, e.g. SQLite supports it using
+    ``VALUES (NULL)``.    MS SQL Server supports the syntax also however
+    is the only included dialect where we have this disabled, as
+    MSSQL does not support the field for the IDENTITY column, which is
+    usually where we like to make use of the feature.
+
+    """
+
+    default_metavalue_token: str = "DEFAULT"
+    """for INSERT... VALUES (DEFAULT) syntax, the token to put in the
+    parenthesis.
+
+    E.g. for SQLite this is the keyword "NULL".
+
+    """
+
+    supports_multivalues_insert: bool
+    """Target database supports INSERT...VALUES with multiple value
+    sets, i.e. INSERT INTO table (cols) VALUES (...), (...), (...), ...
+
+    """
+
+    insert_executemany_returning: bool
+    """dialect / driver / database supports some means of providing
+    INSERT...RETURNING support when dialect.do_executemany() is used.
+
+    """
+
+    insert_executemany_returning_sort_by_parameter_order: bool
+    """dialect / driver / database supports some means of providing
+    INSERT...RETURNING support when dialect.do_executemany() is used
+    along with the :paramref:`_dml.Insert.returning.sort_by_parameter_order`
+    parameter being set.
+
+    """
+
+    update_executemany_returning: bool
+    """dialect supports UPDATE..RETURNING with executemany."""
+
+    delete_executemany_returning: bool
+    """dialect supports DELETE..RETURNING with executemany."""
+
+    use_insertmanyvalues: bool
+    """if True, indicates "insertmanyvalues" functionality should be used
+    to allow for ``insert_executemany_returning`` behavior, if possible.
+
+    In practice, setting this to True means:
+
+    if ``supports_multivalues_insert``, ``insert_returning`` and
+    ``use_insertmanyvalues`` are all True, the SQL compiler will produce
+    an INSERT that will be interpreted by the :class:`.DefaultDialect`
+    as an :attr:`.ExecuteStyle.INSERTMANYVALUES` execution that allows
+    for INSERT of many rows with RETURNING by rewriting a single-row
+    INSERT statement to have multiple VALUES clauses, also executing
+    the statement multiple times for a series of batches when large numbers
+    of rows are given.
+
+    The parameter is False for the default dialect, and is set to True for
+    SQLAlchemy internal dialects SQLite, MySQL/MariaDB, PostgreSQL, SQL Server.
+    It remains at False for Oracle Database, which provides native "executemany
+    with RETURNING" support and also does not support
+    ``supports_multivalues_insert``.  For MySQL/MariaDB, those MySQL dialects
+    that don't support RETURNING will not report
+    ``insert_executemany_returning`` as True.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`engine_insertmanyvalues`
+
+    """
+
+    use_insertmanyvalues_wo_returning: bool
+    """if True, and use_insertmanyvalues is also True, INSERT statements
+    that don't include RETURNING will also use "insertmanyvalues".
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`engine_insertmanyvalues`
+
+    """
+
+    insertmanyvalues_implicit_sentinel: InsertmanyvaluesSentinelOpts
+    """Options indicating the database supports a form of bulk INSERT where
+    the autoincrement integer primary key can be reliably used as an ordering
+    for INSERTed rows.
+
+    .. versionadded:: 2.0.10
+
+    .. seealso::
+
+        :ref:`engine_insertmanyvalues_returning_order`
+
+    """
+
+    insertmanyvalues_page_size: int
+    """Number of rows to render into an individual INSERT..VALUES() statement
+    for :attr:`.ExecuteStyle.INSERTMANYVALUES` executions.
+
+    The default dialect defaults this to 1000.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :paramref:`_engine.Connection.execution_options.insertmanyvalues_page_size` -
+        execution option available on :class:`_engine.Connection`, statements
+
+    """  # noqa: E501
+
+    insertmanyvalues_max_parameters: int
+    """Alternate to insertmanyvalues_page_size, will additionally limit
+    page size based on number of parameters total in the statement.
+
+
+    """
+
+    preexecute_autoincrement_sequences: bool
+    """True if 'implicit' primary key functions must be executed separately
+      in order to get their value, if RETURNING is not used.
+
+      This is currently oriented towards PostgreSQL when the
+      ``implicit_returning=False`` parameter is used on a :class:`.Table`
+      object.
+
+    """
+
+    insert_returning: bool
+    """if the dialect supports RETURNING with INSERT
+
+    .. versionadded:: 2.0
+
+    """
+
+    update_returning: bool
+    """if the dialect supports RETURNING with UPDATE
+
+    .. versionadded:: 2.0
+
+    """
+
+    update_returning_multifrom: bool
+    """if the dialect supports RETURNING with UPDATE..FROM
+
+    .. versionadded:: 2.0
+
+    """
+
+    delete_returning: bool
+    """if the dialect supports RETURNING with DELETE
+
+    .. versionadded:: 2.0
+
+    """
+
+    delete_returning_multifrom: bool
+    """if the dialect supports RETURNING with DELETE..FROM
+
+    .. versionadded:: 2.0
+
+    """
+
+    favor_returning_over_lastrowid: bool
+    """for backends that support both a lastrowid and a RETURNING insert
+    strategy, favor RETURNING for simple single-int pk inserts.
+
+    cursor.lastrowid tends to be more performant on most backends.
+
+    """
+
+    supports_identity_columns: bool
+    """target database supports IDENTITY"""
+
+    cte_follows_insert: bool
+    """target database, when given a CTE with an INSERT statement, needs
+    the CTE to be below the INSERT"""
+
+    colspecs: MutableMapping[Type[TypeEngine[Any]], Type[TypeEngine[Any]]]
+    """A dictionary of TypeEngine classes from sqlalchemy.types mapped
+      to subclasses that are specific to the dialect class.  This
+      dictionary is class-level only and is not accessed from the
+      dialect instance itself.
+    """
+
+    supports_sequences: bool
+    """Indicates if the dialect supports CREATE SEQUENCE or similar."""
+
+    sequences_optional: bool
+    """If True, indicates if the :paramref:`_schema.Sequence.optional`
+      parameter on the :class:`_schema.Sequence` construct
+      should signal to not generate a CREATE SEQUENCE. Applies only to
+      dialects that support sequences. Currently used only to allow PostgreSQL
+      SERIAL to be used on a column that specifies Sequence() for usage on
+      other backends.
+    """
+
+    default_sequence_base: int
+    """the default value that will be rendered as the "START WITH" portion of
+    a CREATE SEQUENCE DDL statement.
+
+    """
+
+    supports_native_enum: bool
+    """Indicates if the dialect supports a native ENUM construct.
+      This will prevent :class:`_types.Enum` from generating a CHECK
+      constraint when that type is used in "native" mode.
+    """
+
+    supports_native_boolean: bool
+    """Indicates if the dialect supports a native boolean construct.
+      This will prevent :class:`_types.Boolean` from generating a CHECK
+      constraint when that type is used.
+    """
+
+    supports_native_decimal: bool
+    """indicates if Decimal objects are handled and returned for precision
+    numeric types, or if floats are returned"""
+
+    supports_native_uuid: bool
+    """indicates if Python UUID() objects are handled natively by the
+    driver for SQL UUID datatypes.
+
+    .. versionadded:: 2.0
+
+    """
+
+    returns_native_bytes: bool
+    """indicates if Python bytes() objects are returned natively by the
+    driver for SQL "binary" datatypes.
+
+    .. versionadded:: 2.0.11
+
+    """
+
+    construct_arguments: Optional[
+        List[Tuple[Type[Union[SchemaItem, ClauseElement]], Mapping[str, Any]]]
+    ] = None
+    """Optional set of argument specifiers for various SQLAlchemy
+    constructs, typically schema items.
+
+    To implement, establish as a series of tuples, as in::
+
+        construct_arguments = [
+            (schema.Index, {"using": False, "where": None, "ops": None}),
+        ]
+
+    If the above construct is established on the PostgreSQL dialect,
+    the :class:`.Index` construct will now accept the keyword arguments
+    ``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``.
+    Any other argument specified to the constructor of :class:`.Index`
+    which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`.
+
+    A dialect which does not include a ``construct_arguments`` member will
+    not participate in the argument validation system.  For such a dialect,
+    any argument name is accepted by all participating constructs, within
+    the namespace of arguments prefixed with that dialect name.  The rationale
+    here is so that third-party dialects that haven't yet implemented this
+    feature continue to function in the old way.
+
+    .. seealso::
+
+        :class:`.DialectKWArgs` - implementing base class which consumes
+        :attr:`.DefaultDialect.construct_arguments`
+
+
+    """
+
+    reflection_options: Sequence[str] = ()
+    """Sequence of string names indicating keyword arguments that can be
+    established on a :class:`.Table` object which will be passed as
+    "reflection options" when using :paramref:`.Table.autoload_with`.
+
+    Current example is "oracle_resolve_synonyms" in the Oracle Database
+    dialects.
+
+    """
+
+    dbapi_exception_translation_map: Mapping[str, str] = util.EMPTY_DICT
+    """A dictionary of names that will contain as values the names of
+       pep-249 exceptions ("IntegrityError", "OperationalError", etc)
+       keyed to alternate class names, to support the case where a
+       DBAPI has exception classes that aren't named as they are
+       referred to (e.g. IntegrityError = MyException).   In the vast
+       majority of cases this dictionary is empty.
+    """
+
+    supports_comments: bool
+    """Indicates the dialect supports comment DDL on tables and columns."""
+
+    inline_comments: bool
+    """Indicates the dialect supports comment DDL that's inline with the
+    definition of a Table or Column.  If False, this implies that ALTER must
+    be used to set table and column comments."""
+
+    supports_constraint_comments: bool
+    """Indicates if the dialect supports comment DDL on constraints.
+
+    .. versionadded:: 2.0
+    """
+
+    _has_events = False
+
+    supports_statement_cache: bool = True
+    """indicates if this dialect supports caching.
+
+    All dialects that are compatible with statement caching should set this
+    flag to True directly on each dialect class and subclass that supports
+    it.  SQLAlchemy tests that this flag is locally present on each dialect
+    subclass before it will use statement caching.  This is to provide
+    safety for legacy or new dialects that are not yet fully tested to be
+    compliant with SQL statement caching.
+
+    .. versionadded:: 1.4.5
+
+    .. seealso::
+
+        :ref:`engine_thirdparty_caching`
+
+    """
+
+    _supports_statement_cache: bool
+    """internal evaluation for supports_statement_cache"""
+
+    bind_typing = BindTyping.NONE
+    """define a means of passing typing information to the database and/or
+    driver for bound parameters.
+
+    See :class:`.BindTyping` for values.
+
+    .. versionadded:: 2.0
+
+    """
+
+    is_async: bool
+    """Whether or not this dialect is intended for asyncio use."""
+
+    has_terminate: bool
+    """Whether or not this dialect has a separate "terminate" implementation
+    that does not block or require awaiting."""
+
+    engine_config_types: Mapping[str, Any]
+    """a mapping of string keys that can be in an engine config linked to
+    type conversion functions.
+
+    """
+
+    label_length: Optional[int]
+    """optional user-defined max length for SQL labels"""
+
+    include_set_input_sizes: Optional[Set[Any]]
+    """set of DBAPI type objects that should be included in
+    automatic cursor.setinputsizes() calls.
+
+    This is only used if bind_typing is BindTyping.SET_INPUT_SIZES
+
+    """
+
+    exclude_set_input_sizes: Optional[Set[Any]]
+    """set of DBAPI type objects that should be excluded in
+    automatic cursor.setinputsizes() calls.
+
+    This is only used if bind_typing is BindTyping.SET_INPUT_SIZES
+
+    """
+
+    supports_simple_order_by_label: bool
+    """target database supports ORDER BY <labelname>, where <labelname>
+    refers to a label in the columns clause of the SELECT"""
+
+    div_is_floordiv: bool
+    """target database treats the / division operator as "floor division" """
+
+    tuple_in_values: bool
+    """target database supports tuple IN, i.e. (x, y) IN ((q, p), (r, z))"""
+
+    _bind_typing_render_casts: bool
+
+    _type_memos: MutableMapping[TypeEngine[Any], _TypeMemoDict]
+
+    def _builtin_onconnect(self) -> Optional[_ListenerFnType]:
+        raise NotImplementedError()
+
+    def create_connect_args(self, url: URL) -> ConnectArgsType:
+        """Build DB-API compatible connection arguments.
+
+        Given a :class:`.URL` object, returns a tuple
+        consisting of a ``(*args, **kwargs)`` suitable to send directly
+        to the dbapi's connect function.   The arguments are sent to the
+        :meth:`.Dialect.connect` method which then runs the DBAPI-level
+        ``connect()`` function.
+
+        The method typically makes use of the
+        :meth:`.URL.translate_connect_args`
+        method in order to generate a dictionary of options.
+
+        The default implementation is::
+
+            def create_connect_args(self, url):
+                opts = url.translate_connect_args()
+                opts.update(url.query)
+                return ([], opts)
+
+        :param url: a :class:`.URL` object
+
+        :return: a tuple of ``(*args, **kwargs)`` which will be passed to the
+         :meth:`.Dialect.connect` method.
+
+        .. seealso::
+
+            :meth:`.URL.translate_connect_args`
+
+        """
+
+        raise NotImplementedError()
+
+    @classmethod
+    def import_dbapi(cls) -> ModuleType:
+        """Import the DBAPI module that is used by this dialect.
+
+        The Python module object returned here will be assigned as an
+        instance variable to a constructed dialect under the name
+        ``.dbapi``.
+
+        .. versionchanged:: 2.0  The :meth:`.Dialect.import_dbapi` class
+           method is renamed from the previous method ``.Dialect.dbapi()``,
+           which would be replaced at dialect instantiation time by the
+           DBAPI module itself, thus using the same name in two different ways.
+           If a ``.Dialect.dbapi()`` classmethod is present on a third-party
+           dialect, it will be used and a deprecation warning will be emitted.
+
+        """
+        raise NotImplementedError()
+
+    def type_descriptor(self, typeobj: TypeEngine[_T]) -> TypeEngine[_T]:
+        """Transform a generic type to a dialect-specific type.
+
+        Dialect classes will usually use the
+        :func:`_types.adapt_type` function in the types module to
+        accomplish this.
+
+        The returned result is cached *per dialect class* so can
+        contain no dialect-instance state.
+
+        """
+
+        raise NotImplementedError()
+
+    def initialize(self, connection: Connection) -> None:
+        """Called during strategized creation of the dialect with a
+        connection.
+
+        Allows dialects to configure options based on server version info or
+        other properties.
+
+        The connection passed here is a SQLAlchemy Connection object,
+        with full capabilities.
+
+        The initialize() method of the base dialect should be called via
+        super().
+
+        .. note:: as of SQLAlchemy 1.4, this method is called **before**
+           any :meth:`_engine.Dialect.on_connect` hooks are called.
+
+        """
+
+        pass
+
+    if TYPE_CHECKING:
+
+        def _overrides_default(self, method_name: str) -> bool: ...
+
+    def get_columns(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> List[ReflectedColumn]:
+        """Return information about columns in ``table_name``.
+
+        Given a :class:`_engine.Connection`, a string
+        ``table_name``, and an optional string ``schema``, return column
+        information as a list of dictionaries
+        corresponding to the :class:`.ReflectedColumn` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_columns`.
+
+        """
+
+        raise NotImplementedError()
+
+    def get_multi_columns(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, List[ReflectedColumn]]]:
+        """Return information about columns in all tables in the
+        given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_multi_columns`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def get_pk_constraint(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> ReflectedPrimaryKeyConstraint:
+        """Return information about the primary key constraint on
+        table_name`.
+
+        Given a :class:`_engine.Connection`, a string
+        ``table_name``, and an optional string ``schema``, return primary
+        key information as a dictionary corresponding to the
+        :class:`.ReflectedPrimaryKeyConstraint` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_pk_constraint`.
+
+        """
+        raise NotImplementedError()
+
+    def get_multi_pk_constraint(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, ReflectedPrimaryKeyConstraint]]:
+        """Return information about primary key constraints in
+        all tables in the given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_multi_pk_constraint`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+        raise NotImplementedError()
+
+    def get_foreign_keys(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> List[ReflectedForeignKeyConstraint]:
+        """Return information about foreign_keys in ``table_name``.
+
+        Given a :class:`_engine.Connection`, a string
+        ``table_name``, and an optional string ``schema``, return foreign
+        key information as a list of dicts corresponding to the
+        :class:`.ReflectedForeignKeyConstraint` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_foreign_keys`.
+        """
+
+        raise NotImplementedError()
+
+    def get_multi_foreign_keys(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, List[ReflectedForeignKeyConstraint]]]:
+        """Return information about foreign_keys in all tables
+        in the given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_multi_foreign_keys`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def get_table_names(
+        self, connection: Connection, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        """Return a list of table names for ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_table_names`.
+
+        """
+
+        raise NotImplementedError()
+
+    def get_temp_table_names(
+        self, connection: Connection, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        """Return a list of temporary table names on the given connection,
+        if supported by the underlying backend.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_temp_table_names`.
+
+        """
+
+        raise NotImplementedError()
+
+    def get_view_names(
+        self, connection: Connection, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        """Return a list of all non-materialized view names available in the
+        database.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_view_names`.
+
+        :param schema: schema name to query, if not the default schema.
+
+        """
+
+        raise NotImplementedError()
+
+    def get_materialized_view_names(
+        self, connection: Connection, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        """Return a list of all materialized view names available in the
+        database.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_materialized_view_names`.
+
+        :param schema: schema name to query, if not the default schema.
+
+         .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def get_sequence_names(
+        self, connection: Connection, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        """Return a list of all sequence names available in the database.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_sequence_names`.
+
+        :param schema: schema name to query, if not the default schema.
+
+        .. versionadded:: 1.4
+        """
+
+        raise NotImplementedError()
+
+    def get_temp_view_names(
+        self, connection: Connection, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        """Return a list of temporary view names on the given connection,
+        if supported by the underlying backend.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_temp_view_names`.
+
+        """
+
+        raise NotImplementedError()
+
+    def get_schema_names(self, connection: Connection, **kw: Any) -> List[str]:
+        """Return a list of all schema names available in the database.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_schema_names`.
+        """
+        raise NotImplementedError()
+
+    def get_view_definition(
+        self,
+        connection: Connection,
+        view_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> str:
+        """Return plain or materialized view definition.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_view_definition`.
+
+        Given a :class:`_engine.Connection`, a string
+        ``view_name``, and an optional string ``schema``, return the view
+        definition.
+        """
+
+        raise NotImplementedError()
+
+    def get_indexes(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> List[ReflectedIndex]:
+        """Return information about indexes in ``table_name``.
+
+        Given a :class:`_engine.Connection`, a string
+        ``table_name`` and an optional string ``schema``, return index
+        information as a list of dictionaries corresponding to the
+        :class:`.ReflectedIndex` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_indexes`.
+        """
+
+        raise NotImplementedError()
+
+    def get_multi_indexes(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, List[ReflectedIndex]]]:
+        """Return information about indexes in in all tables
+        in the given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_multi_indexes`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def get_unique_constraints(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> List[ReflectedUniqueConstraint]:
+        r"""Return information about unique constraints in ``table_name``.
+
+        Given a string ``table_name`` and an optional string ``schema``, return
+        unique constraint information as a list of dicts corresponding
+        to the :class:`.ReflectedUniqueConstraint` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_unique_constraints`.
+        """
+
+        raise NotImplementedError()
+
+    def get_multi_unique_constraints(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, List[ReflectedUniqueConstraint]]]:
+        """Return information about unique constraints in all tables
+        in the given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_multi_unique_constraints`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def get_check_constraints(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> List[ReflectedCheckConstraint]:
+        r"""Return information about check constraints in ``table_name``.
+
+        Given a string ``table_name`` and an optional string ``schema``, return
+        check constraint information as a list of dicts corresponding
+        to the :class:`.ReflectedCheckConstraint` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_check_constraints`.
+
+        """
+
+        raise NotImplementedError()
+
+    def get_multi_check_constraints(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, List[ReflectedCheckConstraint]]]:
+        """Return information about check constraints in all tables
+        in the given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_multi_check_constraints`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def get_table_options(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> Dict[str, Any]:
+        """Return a dictionary of options specified when ``table_name``
+        was created.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_table_options`.
+        """
+        raise NotImplementedError()
+
+    def get_multi_table_options(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, Dict[str, Any]]]:
+        """Return a dictionary of options specified when the tables in the
+        given schema were created.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_multi_table_options`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+        raise NotImplementedError()
+
+    def get_table_comment(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> ReflectedTableComment:
+        r"""Return the "comment" for the table identified by ``table_name``.
+
+        Given a string ``table_name`` and an optional string ``schema``, return
+        table comment information as a dictionary corresponding to the
+        :class:`.ReflectedTableComment` dictionary.
+
+        This is an internal dialect method. Applications should use
+        :meth:`.Inspector.get_table_comment`.
+
+        :raise: ``NotImplementedError`` for dialects that don't support
+         comments.
+
+        .. versionadded:: 1.2
+
+        """
+
+        raise NotImplementedError()
+
+    def get_multi_table_comment(
+        self,
+        connection: Connection,
+        *,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        **kw: Any,
+    ) -> Iterable[Tuple[TableKey, ReflectedTableComment]]:
+        """Return information about the table comment in all tables
+        in the given ``schema``.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.get_multi_table_comment`.
+
+        .. note:: The :class:`_engine.DefaultDialect` provides a default
+          implementation that will call the single table method for
+          each object returned by :meth:`Dialect.get_table_names`,
+          :meth:`Dialect.get_view_names` or
+          :meth:`Dialect.get_materialized_view_names` depending on the
+          provided ``kind``. Dialects that want to support a faster
+          implementation should implement this method.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def normalize_name(self, name: str) -> str:
+        """convert the given name to lowercase if it is detected as
+        case insensitive.
+
+        This method is only used if the dialect defines
+        requires_name_normalize=True.
+
+        """
+        raise NotImplementedError()
+
+    def denormalize_name(self, name: str) -> str:
+        """convert the given name to a case insensitive identifier
+        for the backend if it is an all-lowercase name.
+
+        This method is only used if the dialect defines
+        requires_name_normalize=True.
+
+        """
+        raise NotImplementedError()
+
+    def has_table(
+        self,
+        connection: Connection,
+        table_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> bool:
+        """For internal dialect use, check the existence of a particular table
+        or view in the database.
+
+        Given a :class:`_engine.Connection` object, a string table_name and
+        optional schema name, return True if the given table exists in the
+        database, False otherwise.
+
+        This method serves as the underlying implementation of the
+        public facing :meth:`.Inspector.has_table` method, and is also used
+        internally to implement the "checkfirst" behavior for methods like
+        :meth:`_schema.Table.create` and :meth:`_schema.MetaData.create_all`.
+
+        .. note:: This method is used internally by SQLAlchemy, and is
+           published so that third-party dialects may provide an
+           implementation. It is **not** the public API for checking for table
+           presence. Please use the :meth:`.Inspector.has_table` method.
+
+        .. versionchanged:: 2.0:: :meth:`_engine.Dialect.has_table` now
+           formally supports checking for additional table-like objects:
+
+           * any type of views (plain or materialized)
+           * temporary tables of any kind
+
+           Previously, these two checks were not formally specified and
+           different dialects would vary in their behavior.   The dialect
+           testing suite now includes tests for all of these object types,
+           and dialects to the degree that the backing database supports views
+           or temporary tables should seek to support locating these objects
+           for full compliance.
+
+        """
+
+        raise NotImplementedError()
+
+    def has_index(
+        self,
+        connection: Connection,
+        table_name: str,
+        index_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> bool:
+        """Check the existence of a particular index name in the database.
+
+        Given a :class:`_engine.Connection` object, a string
+        ``table_name`` and string index name, return ``True`` if an index of
+        the given name on the given table exists, ``False`` otherwise.
+
+        The :class:`.DefaultDialect` implements this in terms of the
+        :meth:`.Dialect.has_table` and :meth:`.Dialect.get_indexes` methods,
+        however dialects can implement a more performant version.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.has_index`.
+
+        .. versionadded:: 1.4
+
+        """
+
+        raise NotImplementedError()
+
+    def has_sequence(
+        self,
+        connection: Connection,
+        sequence_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> bool:
+        """Check the existence of a particular sequence in the database.
+
+        Given a :class:`_engine.Connection` object and a string
+        `sequence_name`, return ``True`` if the given sequence exists in
+        the database, ``False`` otherwise.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.has_sequence`.
+        """
+
+        raise NotImplementedError()
+
+    def has_schema(
+        self, connection: Connection, schema_name: str, **kw: Any
+    ) -> bool:
+        """Check the existence of a particular schema name in the database.
+
+        Given a :class:`_engine.Connection` object, a string
+        ``schema_name``, return ``True`` if a schema of the
+        given exists, ``False`` otherwise.
+
+        The :class:`.DefaultDialect` implements this by checking
+        the presence of ``schema_name`` among the schemas returned by
+        :meth:`.Dialect.get_schema_names`,
+        however dialects can implement a more performant version.
+
+        This is an internal dialect method. Applications should use
+        :meth:`_engine.Inspector.has_schema`.
+
+        .. versionadded:: 2.0
+
+        """
+
+        raise NotImplementedError()
+
+    def _get_server_version_info(self, connection: Connection) -> Any:
+        """Retrieve the server version info from the given connection.
+
+        This is used by the default implementation to populate the
+        "server_version_info" attribute and is called exactly
+        once upon first connect.
+
+        """
+
+        raise NotImplementedError()
+
+    def _get_default_schema_name(self, connection: Connection) -> str:
+        """Return the string name of the currently selected schema from
+        the given connection.
+
+        This is used by the default implementation to populate the
+        "default_schema_name" attribute and is called exactly
+        once upon first connect.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_begin(self, dbapi_connection: PoolProxiedConnection) -> None:
+        """Provide an implementation of ``connection.begin()``, given a
+        DB-API connection.
+
+        The DBAPI has no dedicated "begin" method and it is expected
+        that transactions are implicit.  This hook is provided for those
+        DBAPIs that might need additional help in this area.
+
+        :param dbapi_connection: a DBAPI connection, typically
+         proxied within a :class:`.ConnectionFairy`.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_rollback(self, dbapi_connection: PoolProxiedConnection) -> None:
+        """Provide an implementation of ``connection.rollback()``, given
+        a DB-API connection.
+
+        :param dbapi_connection: a DBAPI connection, typically
+         proxied within a :class:`.ConnectionFairy`.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_commit(self, dbapi_connection: PoolProxiedConnection) -> None:
+        """Provide an implementation of ``connection.commit()``, given a
+        DB-API connection.
+
+        :param dbapi_connection: a DBAPI connection, typically
+         proxied within a :class:`.ConnectionFairy`.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_terminate(self, dbapi_connection: DBAPIConnection) -> None:
+        """Provide an implementation of ``connection.close()`` that tries as
+        much as possible to not block, given a DBAPI
+        connection.
+
+        In the vast majority of cases this just calls .close(), however
+        for some asyncio dialects may call upon different API features.
+
+        This hook is called by the :class:`_pool.Pool`
+        when a connection is being recycled or has been invalidated.
+
+        .. versionadded:: 1.4.41
+
+        """
+
+        raise NotImplementedError()
+
+    def do_close(self, dbapi_connection: DBAPIConnection) -> None:
+        """Provide an implementation of ``connection.close()``, given a DBAPI
+        connection.
+
+        This hook is called by the :class:`_pool.Pool`
+        when a connection has been
+        detached from the pool, or is being returned beyond the normal
+        capacity of the pool.
+
+        """
+
+        raise NotImplementedError()
+
+    def _do_ping_w_event(self, dbapi_connection: DBAPIConnection) -> bool:
+        raise NotImplementedError()
+
+    def do_ping(self, dbapi_connection: DBAPIConnection) -> bool:
+        """ping the DBAPI connection and return True if the connection is
+        usable."""
+        raise NotImplementedError()
+
+    def do_set_input_sizes(
+        self,
+        cursor: DBAPICursor,
+        list_of_tuples: _GenericSetInputSizesType,
+        context: ExecutionContext,
+    ) -> Any:
+        """invoke the cursor.setinputsizes() method with appropriate arguments
+
+        This hook is called if the :attr:`.Dialect.bind_typing` attribute is
+        set to the
+        :attr:`.BindTyping.SETINPUTSIZES` value.
+        Parameter data is passed in a list of tuples (paramname, dbtype,
+        sqltype), where ``paramname`` is the key of the parameter in the
+        statement, ``dbtype`` is the DBAPI datatype and ``sqltype`` is the
+        SQLAlchemy type. The order of tuples is in the correct parameter order.
+
+        .. versionadded:: 1.4
+
+        .. versionchanged:: 2.0  - setinputsizes mode is now enabled by
+           setting :attr:`.Dialect.bind_typing` to
+           :attr:`.BindTyping.SETINPUTSIZES`.  Dialects which accept
+           a ``use_setinputsizes`` parameter should set this value
+           appropriately.
+
+
+        """
+        raise NotImplementedError()
+
+    def create_xid(self) -> Any:
+        """Create a two-phase transaction ID.
+
+        This id will be passed to do_begin_twophase(),
+        do_rollback_twophase(), do_commit_twophase().  Its format is
+        unspecified.
+        """
+
+        raise NotImplementedError()
+
+    def do_savepoint(self, connection: Connection, name: str) -> None:
+        """Create a savepoint with the given name.
+
+        :param connection: a :class:`_engine.Connection`.
+        :param name: savepoint name.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_rollback_to_savepoint(
+        self, connection: Connection, name: str
+    ) -> None:
+        """Rollback a connection to the named savepoint.
+
+        :param connection: a :class:`_engine.Connection`.
+        :param name: savepoint name.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_release_savepoint(self, connection: Connection, name: str) -> None:
+        """Release the named savepoint on a connection.
+
+        :param connection: a :class:`_engine.Connection`.
+        :param name: savepoint name.
+        """
+
+        raise NotImplementedError()
+
+    def do_begin_twophase(self, connection: Connection, xid: Any) -> None:
+        """Begin a two phase transaction on the given connection.
+
+        :param connection: a :class:`_engine.Connection`.
+        :param xid: xid
+
+        """
+
+        raise NotImplementedError()
+
+    def do_prepare_twophase(self, connection: Connection, xid: Any) -> None:
+        """Prepare a two phase transaction on the given connection.
+
+        :param connection: a :class:`_engine.Connection`.
+        :param xid: xid
+
+        """
+
+        raise NotImplementedError()
+
+    def do_rollback_twophase(
+        self,
+        connection: Connection,
+        xid: Any,
+        is_prepared: bool = True,
+        recover: bool = False,
+    ) -> None:
+        """Rollback a two phase transaction on the given connection.
+
+        :param connection: a :class:`_engine.Connection`.
+        :param xid: xid
+        :param is_prepared: whether or not
+         :meth:`.TwoPhaseTransaction.prepare` was called.
+        :param recover: if the recover flag was passed.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_commit_twophase(
+        self,
+        connection: Connection,
+        xid: Any,
+        is_prepared: bool = True,
+        recover: bool = False,
+    ) -> None:
+        """Commit a two phase transaction on the given connection.
+
+
+        :param connection: a :class:`_engine.Connection`.
+        :param xid: xid
+        :param is_prepared: whether or not
+         :meth:`.TwoPhaseTransaction.prepare` was called.
+        :param recover: if the recover flag was passed.
+
+        """
+
+        raise NotImplementedError()
+
+    def do_recover_twophase(self, connection: Connection) -> List[Any]:
+        """Recover list of uncommitted prepared two phase transaction
+        identifiers on the given connection.
+
+        :param connection: a :class:`_engine.Connection`.
+
+        """
+
+        raise NotImplementedError()
+
+    def _deliver_insertmanyvalues_batches(
+        self,
+        connection: Connection,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPIMultiExecuteParams,
+        generic_setinputsizes: Optional[_GenericSetInputSizesType],
+        context: ExecutionContext,
+    ) -> Iterator[_InsertManyValuesBatch]:
+        """convert executemany parameters for an INSERT into an iterator
+        of statement/single execute values, used by the insertmanyvalues
+        feature.
+
+        """
+        raise NotImplementedError()
+
+    def do_executemany(
+        self,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: _DBAPIMultiExecuteParams,
+        context: Optional[ExecutionContext] = None,
+    ) -> None:
+        """Provide an implementation of ``cursor.executemany(statement,
+        parameters)``."""
+
+        raise NotImplementedError()
+
+    def do_execute(
+        self,
+        cursor: DBAPICursor,
+        statement: str,
+        parameters: Optional[_DBAPISingleExecuteParams],
+        context: Optional[ExecutionContext] = None,
+    ) -> None:
+        """Provide an implementation of ``cursor.execute(statement,
+        parameters)``."""
+
+        raise NotImplementedError()
+
+    def do_execute_no_params(
+        self,
+        cursor: DBAPICursor,
+        statement: str,
+        context: Optional[ExecutionContext] = None,
+    ) -> None:
+        """Provide an implementation of ``cursor.execute(statement)``.
+
+        The parameter collection should not be sent.
+
+        """
+
+        raise NotImplementedError()
+
+    def is_disconnect(
+        self,
+        e: Exception,
+        connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]],
+        cursor: Optional[DBAPICursor],
+    ) -> bool:
+        """Return True if the given DB-API error indicates an invalid
+        connection"""
+
+        raise NotImplementedError()
+
+    def connect(self, *cargs: Any, **cparams: Any) -> DBAPIConnection:
+        r"""Establish a connection using this dialect's DBAPI.
+
+        The default implementation of this method is::
+
+            def connect(self, *cargs, **cparams):
+                return self.dbapi.connect(*cargs, **cparams)
+
+        The ``*cargs, **cparams`` parameters are generated directly
+        from this dialect's :meth:`.Dialect.create_connect_args` method.
+
+        This method may be used for dialects that need to perform programmatic
+        per-connection steps when a new connection is procured from the
+        DBAPI.
+
+
+        :param \*cargs: positional parameters returned from the
+         :meth:`.Dialect.create_connect_args` method
+
+        :param \*\*cparams: keyword parameters returned from the
+         :meth:`.Dialect.create_connect_args` method.
+
+        :return: a DBAPI connection, typically from the :pep:`249` module
+         level ``.connect()`` function.
+
+        .. seealso::
+
+            :meth:`.Dialect.create_connect_args`
+
+            :meth:`.Dialect.on_connect`
+
+        """
+        raise NotImplementedError()
+
+    def on_connect_url(self, url: URL) -> Optional[Callable[[Any], Any]]:
+        """return a callable which sets up a newly created DBAPI connection.
+
+        This method is a new hook that supersedes the
+        :meth:`_engine.Dialect.on_connect` method when implemented by a
+        dialect.   When not implemented by a dialect, it invokes the
+        :meth:`_engine.Dialect.on_connect` method directly to maintain
+        compatibility with existing dialects.   There is no deprecation
+        for :meth:`_engine.Dialect.on_connect` expected.
+
+        The callable should accept a single argument "conn" which is the
+        DBAPI connection itself.  The inner callable has no
+        return value.
+
+        E.g.::
+
+            class MyDialect(default.DefaultDialect):
+                # ...
+
+                def on_connect_url(self, url):
+                    def do_on_connect(connection):
+                        connection.execute("SET SPECIAL FLAGS etc")
+
+                    return do_on_connect
+
+        This is used to set dialect-wide per-connection options such as
+        isolation modes, Unicode modes, etc.
+
+        This method differs from :meth:`_engine.Dialect.on_connect` in that
+        it is passed the :class:`_engine.URL` object that's relevant to the
+        connect args.  Normally the only way to get this is from the
+        :meth:`_engine.Dialect.on_connect` hook is to look on the
+        :class:`_engine.Engine` itself, however this URL object may have been
+        replaced by plugins.
+
+        .. note::
+
+            The default implementation of
+            :meth:`_engine.Dialect.on_connect_url` is to invoke the
+            :meth:`_engine.Dialect.on_connect` method. Therefore if a dialect
+            implements this method, the :meth:`_engine.Dialect.on_connect`
+            method **will not be called** unless the overriding dialect calls
+            it directly from here.
+
+        .. versionadded:: 1.4.3 added :meth:`_engine.Dialect.on_connect_url`
+           which normally calls into :meth:`_engine.Dialect.on_connect`.
+
+        :param url: a :class:`_engine.URL` object representing the
+         :class:`_engine.URL` that was passed to the
+         :meth:`_engine.Dialect.create_connect_args` method.
+
+        :return: a callable that accepts a single DBAPI connection as an
+         argument, or None.
+
+        .. seealso::
+
+            :meth:`_engine.Dialect.on_connect`
+
+        """
+        return self.on_connect()
+
+    def on_connect(self) -> Optional[Callable[[Any], Any]]:
+        """return a callable which sets up a newly created DBAPI connection.
+
+        The callable should accept a single argument "conn" which is the
+        DBAPI connection itself.  The inner callable has no
+        return value.
+
+        E.g.::
+
+            class MyDialect(default.DefaultDialect):
+                # ...
+
+                def on_connect(self):
+                    def do_on_connect(connection):
+                        connection.execute("SET SPECIAL FLAGS etc")
+
+                    return do_on_connect
+
+        This is used to set dialect-wide per-connection options such as
+        isolation modes, Unicode modes, etc.
+
+        The "do_on_connect" callable is invoked by using the
+        :meth:`_events.PoolEvents.connect` event
+        hook, then unwrapping the DBAPI connection and passing it into the
+        callable.
+
+        .. versionchanged:: 1.4 the on_connect hook is no longer called twice
+           for the first connection of a dialect.  The on_connect hook is still
+           called before the :meth:`_engine.Dialect.initialize` method however.
+
+        .. versionchanged:: 1.4.3 the on_connect hook is invoked from a new
+           method on_connect_url that passes the URL that was used to create
+           the connect args.   Dialects can implement on_connect_url instead
+           of on_connect if they need the URL object that was used for the
+           connection in order to get additional context.
+
+        If None is returned, no event listener is generated.
+
+        :return: a callable that accepts a single DBAPI connection as an
+         argument, or None.
+
+        .. seealso::
+
+            :meth:`.Dialect.connect` - allows the DBAPI ``connect()`` sequence
+            itself to be controlled.
+
+            :meth:`.Dialect.on_connect_url` - supersedes
+            :meth:`.Dialect.on_connect` to also receive the
+            :class:`_engine.URL` object in context.
+
+        """
+        return None
+
+    def reset_isolation_level(self, dbapi_connection: DBAPIConnection) -> None:
+        """Given a DBAPI connection, revert its isolation to the default.
+
+        Note that this is a dialect-level method which is used as part
+        of the implementation of the :class:`_engine.Connection` and
+        :class:`_engine.Engine`
+        isolation level facilities; these APIs should be preferred for
+        most typical use cases.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.get_isolation_level`
+            - view current level
+
+            :attr:`_engine.Connection.default_isolation_level`
+            - view default level
+
+            :paramref:`.Connection.execution_options.isolation_level` -
+            set per :class:`_engine.Connection` isolation level
+
+            :paramref:`_sa.create_engine.isolation_level` -
+            set per :class:`_engine.Engine` isolation level
+
+        """
+
+        raise NotImplementedError()
+
+    def set_isolation_level(
+        self, dbapi_connection: DBAPIConnection, level: IsolationLevel
+    ) -> None:
+        """Given a DBAPI connection, set its isolation level.
+
+        Note that this is a dialect-level method which is used as part
+        of the implementation of the :class:`_engine.Connection` and
+        :class:`_engine.Engine`
+        isolation level facilities; these APIs should be preferred for
+        most typical use cases.
+
+        If the dialect also implements the
+        :meth:`.Dialect.get_isolation_level_values` method, then the given
+        level is guaranteed to be one of the string names within that sequence,
+        and the method will not need to anticipate a lookup failure.
+
+        .. seealso::
+
+            :meth:`_engine.Connection.get_isolation_level`
+            - view current level
+
+            :attr:`_engine.Connection.default_isolation_level`
+            - view default level
+
+            :paramref:`.Connection.execution_options.isolation_level` -
+            set per :class:`_engine.Connection` isolation level
+
+            :paramref:`_sa.create_engine.isolation_level` -
+            set per :class:`_engine.Engine` isolation level
+
+        """
+
+        raise NotImplementedError()
+
+    def get_isolation_level(
+        self, dbapi_connection: DBAPIConnection
+    ) -> IsolationLevel:
+        """Given a DBAPI connection, return its isolation level.
+
+        When working with a :class:`_engine.Connection` object,
+        the corresponding
+        DBAPI connection may be procured using the
+        :attr:`_engine.Connection.connection` accessor.
+
+        Note that this is a dialect-level method which is used as part
+        of the implementation of the :class:`_engine.Connection` and
+        :class:`_engine.Engine` isolation level facilities;
+        these APIs should be preferred for most typical use cases.
+
+
+        .. seealso::
+
+            :meth:`_engine.Connection.get_isolation_level`
+            - view current level
+
+            :attr:`_engine.Connection.default_isolation_level`
+            - view default level
+
+            :paramref:`.Connection.execution_options.isolation_level` -
+            set per :class:`_engine.Connection` isolation level
+
+            :paramref:`_sa.create_engine.isolation_level` -
+            set per :class:`_engine.Engine` isolation level
+
+
+        """
+
+        raise NotImplementedError()
+
+    def get_default_isolation_level(
+        self, dbapi_conn: DBAPIConnection
+    ) -> IsolationLevel:
+        """Given a DBAPI connection, return its isolation level, or
+        a default isolation level if one cannot be retrieved.
+
+        This method may only raise NotImplementedError and
+        **must not raise any other exception**, as it is used implicitly upon
+        first connect.
+
+        The method **must return a value** for a dialect that supports
+        isolation level settings, as this level is what will be reverted
+        towards when a per-connection isolation level change is made.
+
+        The method defaults to using the :meth:`.Dialect.get_isolation_level`
+        method unless overridden by a dialect.
+
+        .. versionadded:: 1.3.22
+
+        """
+        raise NotImplementedError()
+
+    def get_isolation_level_values(
+        self, dbapi_conn: DBAPIConnection
+    ) -> List[IsolationLevel]:
+        """return a sequence of string isolation level names that are accepted
+        by this dialect.
+
+        The available names should use the following conventions:
+
+        * use UPPERCASE names.   isolation level methods will accept lowercase
+          names but these are normalized into UPPERCASE before being passed
+          along to the dialect.
+        * separate words should be separated by spaces, not underscores, e.g.
+          ``REPEATABLE READ``.  isolation level names will have underscores
+          converted to spaces before being passed along to the dialect.
+        * The names for the four standard isolation names to the extent that
+          they are supported by the backend should be ``READ UNCOMMITTED``,
+          ``READ COMMITTED``, ``REPEATABLE READ``, ``SERIALIZABLE``
+        * if the dialect supports an autocommit option it should be provided
+          using the isolation level name ``AUTOCOMMIT``.
+        * Other isolation modes may also be present, provided that they
+          are named in UPPERCASE and use spaces not underscores.
+
+        This function is used so that the default dialect can check that
+        a given isolation level parameter is valid, else raises an
+        :class:`_exc.ArgumentError`.
+
+        A DBAPI connection is passed to the method, in the unlikely event that
+        the dialect needs to interrogate the connection itself to determine
+        this list, however it is expected that most backends will return
+        a hardcoded list of values.  If the dialect supports "AUTOCOMMIT",
+        that value should also be present in the sequence returned.
+
+        The method raises ``NotImplementedError`` by default.  If a dialect
+        does not implement this method, then the default dialect will not
+        perform any checking on a given isolation level value before passing
+        it onto the :meth:`.Dialect.set_isolation_level` method.  This is
+        to allow backwards-compatibility with third party dialects that may
+        not yet be implementing this method.
+
+        .. versionadded:: 2.0
+
+        """
+        raise NotImplementedError()
+
+    def _assert_and_set_isolation_level(
+        self, dbapi_conn: DBAPIConnection, level: IsolationLevel
+    ) -> None:
+        raise NotImplementedError()
+
+    @classmethod
+    def get_dialect_cls(cls, url: URL) -> Type[Dialect]:
+        """Given a URL, return the :class:`.Dialect` that will be used.
+
+        This is a hook that allows an external plugin to provide functionality
+        around an existing dialect, by allowing the plugin to be loaded
+        from the url based on an entrypoint, and then the plugin returns
+        the actual dialect to be used.
+
+        By default this just returns the cls.
+
+        """
+        return cls
+
+    @classmethod
+    def get_async_dialect_cls(cls, url: URL) -> Type[Dialect]:
+        """Given a URL, return the :class:`.Dialect` that will be used by
+        an async engine.
+
+        By default this is an alias of :meth:`.Dialect.get_dialect_cls` and
+        just returns the cls. It may be used if a dialect provides
+        both a sync and async version under the same name, like the
+        ``psycopg`` driver.
+
+        .. versionadded:: 2
+
+        .. seealso::
+
+            :meth:`.Dialect.get_dialect_cls`
+
+        """
+        return cls.get_dialect_cls(url)
+
+    @classmethod
+    def load_provisioning(cls) -> None:
+        """set up the provision.py module for this dialect.
+
+        For dialects that include a provision.py module that sets up
+        provisioning followers, this method should initiate that process.
+
+        A typical implementation would be::
+
+            @classmethod
+            def load_provisioning(cls):
+                __import__("mydialect.provision")
+
+        The default method assumes a module named ``provision.py`` inside
+        the owning package of the current dialect, based on the ``__module__``
+        attribute::
+
+            @classmethod
+            def load_provisioning(cls):
+                package = ".".join(cls.__module__.split(".")[0:-1])
+                try:
+                    __import__(package + ".provision")
+                except ImportError:
+                    pass
+
+        .. versionadded:: 1.3.14
+
+        """
+
+    @classmethod
+    def engine_created(cls, engine: Engine) -> None:
+        """A convenience hook called before returning the final
+        :class:`_engine.Engine`.
+
+        If the dialect returned a different class from the
+        :meth:`.get_dialect_cls`
+        method, then the hook is called on both classes, first on
+        the dialect class returned by the :meth:`.get_dialect_cls` method and
+        then on the class on which the method was called.
+
+        The hook should be used by dialects and/or wrappers to apply special
+        events to the engine or its components.   In particular, it allows
+        a dialect-wrapping class to apply dialect-level events.
+
+        """
+
+    def get_driver_connection(self, connection: DBAPIConnection) -> Any:
+        """Returns the connection object as returned by the external driver
+        package.
+
+        For normal dialects that use a DBAPI compliant driver this call
+        will just return the ``connection`` passed as argument.
+        For dialects that instead adapt a non DBAPI compliant driver, like
+        when adapting an asyncio driver, this call will return the
+        connection-like object as returned by the driver.
+
+        .. versionadded:: 1.4.24
+
+        """
+        raise NotImplementedError()
+
+    def set_engine_execution_options(
+        self, engine: Engine, opts: CoreExecuteOptionsParameter
+    ) -> None:
+        """Establish execution options for a given engine.
+
+        This is implemented by :class:`.DefaultDialect` to establish
+        event hooks for new :class:`.Connection` instances created
+        by the given :class:`.Engine` which will then invoke the
+        :meth:`.Dialect.set_connection_execution_options` method for that
+        connection.
+
+        """
+        raise NotImplementedError()
+
+    def set_connection_execution_options(
+        self, connection: Connection, opts: CoreExecuteOptionsParameter
+    ) -> None:
+        """Establish execution options for a given connection.
+
+        This is implemented by :class:`.DefaultDialect` in order to implement
+        the :paramref:`_engine.Connection.execution_options.isolation_level`
+        execution option.  Dialects can intercept various execution options
+        which may need to modify state on a particular DBAPI connection.
+
+        .. versionadded:: 1.4
+
+        """
+        raise NotImplementedError()
+
+    def get_dialect_pool_class(self, url: URL) -> Type[Pool]:
+        """return a Pool class to use for a given URL"""
+        raise NotImplementedError()
+
+
+class CreateEnginePlugin:
+    """A set of hooks intended to augment the construction of an
+    :class:`_engine.Engine` object based on entrypoint names in a URL.
+
+    The purpose of :class:`_engine.CreateEnginePlugin` is to allow third-party
+    systems to apply engine, pool and dialect level event listeners without
+    the need for the target application to be modified; instead, the plugin
+    names can be added to the database URL.  Target applications for
+    :class:`_engine.CreateEnginePlugin` include:
+
+    * connection and SQL performance tools, e.g. which use events to track
+      number of checkouts and/or time spent with statements
+
+    * connectivity plugins such as proxies
+
+    A rudimentary :class:`_engine.CreateEnginePlugin` that attaches a logger
+    to an :class:`_engine.Engine` object might look like::
+
+
+        import logging
+
+        from sqlalchemy.engine import CreateEnginePlugin
+        from sqlalchemy import event
+
+
+        class LogCursorEventsPlugin(CreateEnginePlugin):
+            def __init__(self, url, kwargs):
+                # consume the parameter "log_cursor_logging_name" from the
+                # URL query
+                logging_name = url.query.get(
+                    "log_cursor_logging_name", "log_cursor"
+                )
+
+                self.log = logging.getLogger(logging_name)
+
+            def update_url(self, url):
+                "update the URL to one that no longer includes our parameters"
+                return url.difference_update_query(["log_cursor_logging_name"])
+
+            def engine_created(self, engine):
+                "attach an event listener after the new Engine is constructed"
+                event.listen(engine, "before_cursor_execute", self._log_event)
+
+            def _log_event(
+                self,
+                conn,
+                cursor,
+                statement,
+                parameters,
+                context,
+                executemany,
+            ):
+
+                self.log.info("Plugin logged cursor event: %s", statement)
+
+    Plugins are registered using entry points in a similar way as that
+    of dialects::
+
+        entry_points = {
+            "sqlalchemy.plugins": [
+                "log_cursor_plugin = myapp.plugins:LogCursorEventsPlugin"
+            ]
+        }
+
+    A plugin that uses the above names would be invoked from a database
+    URL as in::
+
+        from sqlalchemy import create_engine
+
+        engine = create_engine(
+            "mysql+pymysql://scott:tiger@localhost/test?"
+            "plugin=log_cursor_plugin&log_cursor_logging_name=mylogger"
+        )
+
+    The ``plugin`` URL parameter supports multiple instances, so that a URL
+    may specify multiple plugins; they are loaded in the order stated
+    in the URL::
+
+        engine = create_engine(
+            "mysql+pymysql://scott:tiger@localhost/test?"
+            "plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three"
+        )
+
+    The plugin names may also be passed directly to :func:`_sa.create_engine`
+    using the :paramref:`_sa.create_engine.plugins` argument::
+
+        engine = create_engine(
+            "mysql+pymysql://scott:tiger@localhost/test", plugins=["myplugin"]
+        )
+
+    .. versionadded:: 1.2.3  plugin names can also be specified
+       to :func:`_sa.create_engine` as a list
+
+    A plugin may consume plugin-specific arguments from the
+    :class:`_engine.URL` object as well as the ``kwargs`` dictionary, which is
+    the dictionary of arguments passed to the :func:`_sa.create_engine`
+    call.  "Consuming" these arguments includes that they must be removed
+    when the plugin initializes, so that the arguments are not passed along
+    to the :class:`_engine.Dialect` constructor, where they will raise an
+    :class:`_exc.ArgumentError` because they are not known by the dialect.
+
+    As of version 1.4 of SQLAlchemy, arguments should continue to be consumed
+    from the ``kwargs`` dictionary directly, by removing the values with a
+    method such as ``dict.pop``. Arguments from the :class:`_engine.URL` object
+    should be consumed by implementing the
+    :meth:`_engine.CreateEnginePlugin.update_url` method, returning a new copy
+    of the :class:`_engine.URL` with plugin-specific parameters removed::
+
+        class MyPlugin(CreateEnginePlugin):
+            def __init__(self, url, kwargs):
+                self.my_argument_one = url.query["my_argument_one"]
+                self.my_argument_two = url.query["my_argument_two"]
+                self.my_argument_three = kwargs.pop("my_argument_three", None)
+
+            def update_url(self, url):
+                return url.difference_update_query(
+                    ["my_argument_one", "my_argument_two"]
+                )
+
+    Arguments like those illustrated above would be consumed from a
+    :func:`_sa.create_engine` call such as::
+
+        from sqlalchemy import create_engine
+
+        engine = create_engine(
+            "mysql+pymysql://scott:tiger@localhost/test?"
+            "plugin=myplugin&my_argument_one=foo&my_argument_two=bar",
+            my_argument_three="bat",
+        )
+
+    .. versionchanged:: 1.4
+
+        The :class:`_engine.URL` object is now immutable; a
+        :class:`_engine.CreateEnginePlugin` that needs to alter the
+        :class:`_engine.URL` should implement the newly added
+        :meth:`_engine.CreateEnginePlugin.update_url` method, which
+        is invoked after the plugin is constructed.
+
+        For migration, construct the plugin in the following way, checking
+        for the existence of the :meth:`_engine.CreateEnginePlugin.update_url`
+        method to detect which version is running::
+
+            class MyPlugin(CreateEnginePlugin):
+                def __init__(self, url, kwargs):
+                    if hasattr(CreateEnginePlugin, "update_url"):
+                        # detect the 1.4 API
+                        self.my_argument_one = url.query["my_argument_one"]
+                        self.my_argument_two = url.query["my_argument_two"]
+                    else:
+                        # detect the 1.3 and earlier API - mutate the
+                        # URL directly
+                        self.my_argument_one = url.query.pop("my_argument_one")
+                        self.my_argument_two = url.query.pop("my_argument_two")
+
+                    self.my_argument_three = kwargs.pop("my_argument_three", None)
+
+                def update_url(self, url):
+                    # this method is only called in the 1.4 version
+                    return url.difference_update_query(
+                        ["my_argument_one", "my_argument_two"]
+                    )
+
+        .. seealso::
+
+            :ref:`change_5526` - overview of the :class:`_engine.URL` change which
+            also includes notes regarding :class:`_engine.CreateEnginePlugin`.
+
+
+    When the engine creation process completes and produces the
+    :class:`_engine.Engine` object, it is again passed to the plugin via the
+    :meth:`_engine.CreateEnginePlugin.engine_created` hook.  In this hook, additional
+    changes can be made to the engine, most typically involving setup of
+    events (e.g. those defined in :ref:`core_event_toplevel`).
+
+    """  # noqa: E501
+
+    def __init__(self, url: URL, kwargs: Dict[str, Any]):
+        """Construct a new :class:`.CreateEnginePlugin`.
+
+        The plugin object is instantiated individually for each call
+        to :func:`_sa.create_engine`.  A single :class:`_engine.
+        Engine` will be
+        passed to the :meth:`.CreateEnginePlugin.engine_created` method
+        corresponding to this URL.
+
+        :param url: the :class:`_engine.URL` object.  The plugin may inspect
+         the :class:`_engine.URL` for arguments.  Arguments used by the
+         plugin should be removed, by returning an updated :class:`_engine.URL`
+         from the :meth:`_engine.CreateEnginePlugin.update_url` method.
+
+         .. versionchanged::  1.4
+
+            The :class:`_engine.URL` object is now immutable, so a
+            :class:`_engine.CreateEnginePlugin` that needs to alter the
+            :class:`_engine.URL` object should implement the
+            :meth:`_engine.CreateEnginePlugin.update_url` method.
+
+        :param kwargs: The keyword arguments passed to
+         :func:`_sa.create_engine`.
+
+        """
+        self.url = url
+
+    def update_url(self, url: URL) -> URL:
+        """Update the :class:`_engine.URL`.
+
+        A new :class:`_engine.URL` should be returned.   This method is
+        typically used to consume configuration arguments from the
+        :class:`_engine.URL` which must be removed, as they will not be
+        recognized by the dialect.  The
+        :meth:`_engine.URL.difference_update_query` method is available
+        to remove these arguments.   See the docstring at
+        :class:`_engine.CreateEnginePlugin` for an example.
+
+
+        .. versionadded:: 1.4
+
+        """
+        raise NotImplementedError()
+
+    def handle_dialect_kwargs(
+        self, dialect_cls: Type[Dialect], dialect_args: Dict[str, Any]
+    ) -> None:
+        """parse and modify dialect kwargs"""
+
+    def handle_pool_kwargs(
+        self, pool_cls: Type[Pool], pool_args: Dict[str, Any]
+    ) -> None:
+        """parse and modify pool kwargs"""
+
+    def engine_created(self, engine: Engine) -> None:
+        """Receive the :class:`_engine.Engine`
+        object when it is fully constructed.
+
+        The plugin may make additional changes to the engine, such as
+        registering engine or connection pool events.
+
+        """
+
+
+class ExecutionContext:
+    """A messenger object for a Dialect that corresponds to a single
+    execution.
+
+    """
+
+    engine: Engine
+    """engine which the Connection is associated with"""
+
+    connection: Connection
+    """Connection object which can be freely used by default value
+      generators to execute SQL.  This Connection should reference the
+      same underlying connection/transactional resources of
+      root_connection."""
+
+    root_connection: Connection
+    """Connection object which is the source of this ExecutionContext."""
+
+    dialect: Dialect
+    """dialect which created this ExecutionContext."""
+
+    cursor: DBAPICursor
+    """DB-API cursor procured from the connection"""
+
+    compiled: Optional[Compiled]
+    """if passed to constructor, sqlalchemy.engine.base.Compiled object
+      being executed"""
+
+    statement: str
+    """string version of the statement to be executed.  Is either
+      passed to the constructor, or must be created from the
+      sql.Compiled object by the time pre_exec() has completed."""
+
+    invoked_statement: Optional[Executable]
+    """The Executable statement object that was given in the first place.
+
+    This should be structurally equivalent to compiled.statement, but not
+    necessarily the same object as in a caching scenario the compiled form
+    will have been extracted from the cache.
+
+    """
+
+    parameters: _AnyMultiExecuteParams
+    """bind parameters passed to the execute() or exec_driver_sql() methods.
+
+    These are always stored as a list of parameter entries.  A single-element
+    list corresponds to a ``cursor.execute()`` call and a multiple-element
+    list corresponds to ``cursor.executemany()``, except in the case
+    of :attr:`.ExecuteStyle.INSERTMANYVALUES` which will use
+    ``cursor.execute()`` one or more times.
+
+    """
+
+    no_parameters: bool
+    """True if the execution style does not use parameters"""
+
+    isinsert: bool
+    """True if the statement is an INSERT."""
+
+    isupdate: bool
+    """True if the statement is an UPDATE."""
+
+    execute_style: ExecuteStyle
+    """the style of DBAPI cursor method that will be used to execute
+    a statement.
+
+    .. versionadded:: 2.0
+
+    """
+
+    executemany: bool
+    """True if the context has a list of more than one parameter set.
+
+    Historically this attribute links to whether ``cursor.execute()`` or
+    ``cursor.executemany()`` will be used.  It also can now mean that
+    "insertmanyvalues" may be used which indicates one or more
+    ``cursor.execute()`` calls.
+
+    """
+
+    prefetch_cols: util.generic_fn_descriptor[Optional[Sequence[Column[Any]]]]
+    """a list of Column objects for which a client-side default
+      was fired off.  Applies to inserts and updates."""
+
+    postfetch_cols: util.generic_fn_descriptor[Optional[Sequence[Column[Any]]]]
+    """a list of Column objects for which a server-side default or
+      inline SQL expression value was fired off.  Applies to inserts
+      and updates."""
+
+    execution_options: _ExecuteOptions
+    """Execution options associated with the current statement execution"""
+
+    @classmethod
+    def _init_ddl(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+        compiled_ddl: DDLCompiler,
+    ) -> ExecutionContext:
+        raise NotImplementedError()
+
+    @classmethod
+    def _init_compiled(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+        compiled: SQLCompiler,
+        parameters: _CoreMultiExecuteParams,
+        invoked_statement: Executable,
+        extracted_parameters: Optional[Sequence[BindParameter[Any]]],
+        cache_hit: CacheStats = CacheStats.CACHING_DISABLED,
+    ) -> ExecutionContext:
+        raise NotImplementedError()
+
+    @classmethod
+    def _init_statement(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+        statement: str,
+        parameters: _DBAPIMultiExecuteParams,
+    ) -> ExecutionContext:
+        raise NotImplementedError()
+
+    @classmethod
+    def _init_default(
+        cls,
+        dialect: Dialect,
+        connection: Connection,
+        dbapi_connection: PoolProxiedConnection,
+        execution_options: _ExecuteOptions,
+    ) -> ExecutionContext:
+        raise NotImplementedError()
+
+    def _exec_default(
+        self,
+        column: Optional[Column[Any]],
+        default: DefaultGenerator,
+        type_: Optional[TypeEngine[Any]],
+    ) -> Any:
+        raise NotImplementedError()
+
+    def _prepare_set_input_sizes(
+        self,
+    ) -> Optional[List[Tuple[str, Any, TypeEngine[Any]]]]:
+        raise NotImplementedError()
+
+    def _get_cache_stats(self) -> str:
+        raise NotImplementedError()
+
+    def _setup_result_proxy(self) -> CursorResult[Any]:
+        raise NotImplementedError()
+
+    def fire_sequence(self, seq: Sequence_SchemaItem, type_: Integer) -> int:
+        """given a :class:`.Sequence`, invoke it and return the next int
+        value"""
+        raise NotImplementedError()
+
+    def create_cursor(self) -> DBAPICursor:
+        """Return a new cursor generated from this ExecutionContext's
+        connection.
+
+        Some dialects may wish to change the behavior of
+        connection.cursor(), such as postgresql which may return a PG
+        "server side" cursor.
+        """
+
+        raise NotImplementedError()
+
+    def pre_exec(self) -> None:
+        """Called before an execution of a compiled statement.
+
+        If a compiled statement was passed to this ExecutionContext,
+        the `statement` and `parameters` datamembers must be
+        initialized after this statement is complete.
+        """
+
+        raise NotImplementedError()
+
+    def get_out_parameter_values(
+        self, out_param_names: Sequence[str]
+    ) -> Sequence[Any]:
+        """Return a sequence of OUT parameter values from a cursor.
+
+        For dialects that support OUT parameters, this method will be called
+        when there is a :class:`.SQLCompiler` object which has the
+        :attr:`.SQLCompiler.has_out_parameters` flag set.  This flag in turn
+        will be set to True if the statement itself has :class:`.BindParameter`
+        objects that have the ``.isoutparam`` flag set which are consumed by
+        the :meth:`.SQLCompiler.visit_bindparam` method.  If the dialect
+        compiler produces :class:`.BindParameter` objects with ``.isoutparam``
+        set which are not handled by :meth:`.SQLCompiler.visit_bindparam`, it
+        should set this flag explicitly.
+
+        The list of names that were rendered for each bound parameter
+        is passed to the method.  The method should then return a sequence of
+        values corresponding to the list of parameter objects. Unlike in
+        previous SQLAlchemy versions, the values can be the **raw values** from
+        the DBAPI; the execution context will apply the appropriate type
+        handler based on what's present in self.compiled.binds and update the
+        values.  The processed dictionary will then be made available via the
+        ``.out_parameters`` collection on the result object.  Note that
+        SQLAlchemy 1.4 has multiple kinds of result object as part of the 2.0
+        transition.
+
+        .. versionadded:: 1.4 - added
+           :meth:`.ExecutionContext.get_out_parameter_values`, which is invoked
+           automatically by the :class:`.DefaultExecutionContext` when there
+           are :class:`.BindParameter` objects with the ``.isoutparam`` flag
+           set.  This replaces the practice of setting out parameters within
+           the now-removed ``get_result_proxy()`` method.
+
+        """
+        raise NotImplementedError()
+
+    def post_exec(self) -> None:
+        """Called after the execution of a compiled statement.
+
+        If a compiled statement was passed to this ExecutionContext,
+        the `last_insert_ids`, `last_inserted_params`, etc.
+        datamembers should be available after this method completes.
+        """
+
+        raise NotImplementedError()
+
+    def handle_dbapi_exception(self, e: BaseException) -> None:
+        """Receive a DBAPI exception which occurred upon execute, result
+        fetch, etc."""
+
+        raise NotImplementedError()
+
+    def lastrow_has_defaults(self) -> bool:
+        """Return True if the last INSERT or UPDATE row contained
+        inlined or database-side defaults.
+        """
+
+        raise NotImplementedError()
+
+    def get_rowcount(self) -> Optional[int]:
+        """Return the DBAPI ``cursor.rowcount`` value, or in some
+        cases an interpreted value.
+
+        See :attr:`_engine.CursorResult.rowcount` for details on this.
+
+        """
+
+        raise NotImplementedError()
+
+    def fetchall_for_returning(self, cursor: DBAPICursor) -> Sequence[Any]:
+        """For a RETURNING result, deliver cursor.fetchall() from the
+        DBAPI cursor.
+
+        This is a dialect-specific hook for dialects that have special
+        considerations when calling upon the rows delivered for a
+        "RETURNING" statement.   Default implementation is
+        ``cursor.fetchall()``.
+
+        This hook is currently used only by the :term:`insertmanyvalues`
+        feature.   Dialects that don't set ``use_insertmanyvalues=True``
+        don't need to consider this hook.
+
+        .. versionadded:: 2.0.10
+
+        """
+        raise NotImplementedError()
+
+
+class ConnectionEventsTarget(EventTarget):
+    """An object which can accept events from :class:`.ConnectionEvents`.
+
+    Includes :class:`_engine.Connection` and :class:`_engine.Engine`.
+
+    .. versionadded:: 2.0
+
+    """
+
+    dispatch: dispatcher[ConnectionEventsTarget]
+
+
+Connectable = ConnectionEventsTarget
+
+
+class ExceptionContext:
+    """Encapsulate information about an error condition in progress.
+
+    This object exists solely to be passed to the
+    :meth:`_events.DialectEvents.handle_error` event,
+    supporting an interface that
+    can be extended without backwards-incompatibility.
+
+
+    """
+
+    __slots__ = ()
+
+    dialect: Dialect
+    """The :class:`_engine.Dialect` in use.
+
+    This member is present for all invocations of the event hook.
+
+    .. versionadded:: 2.0
+
+    """
+
+    connection: Optional[Connection]
+    """The :class:`_engine.Connection` in use during the exception.
+
+    This member is present, except in the case of a failure when
+    first connecting.
+
+    .. seealso::
+
+        :attr:`.ExceptionContext.engine`
+
+
+    """
+
+    engine: Optional[Engine]
+    """The :class:`_engine.Engine` in use during the exception.
+
+    This member is present in all cases except for when handling an error
+    within the connection pool "pre-ping" process.
+
+    """
+
+    cursor: Optional[DBAPICursor]
+    """The DBAPI cursor object.
+
+    May be None.
+
+    """
+
+    statement: Optional[str]
+    """String SQL statement that was emitted directly to the DBAPI.
+
+    May be None.
+
+    """
+
+    parameters: Optional[_DBAPIAnyExecuteParams]
+    """Parameter collection that was emitted directly to the DBAPI.
+
+    May be None.
+
+    """
+
+    original_exception: BaseException
+    """The exception object which was caught.
+
+    This member is always present.
+
+    """
+
+    sqlalchemy_exception: Optional[StatementError]
+    """The :class:`sqlalchemy.exc.StatementError` which wraps the original,
+    and will be raised if exception handling is not circumvented by the event.
+
+    May be None, as not all exception types are wrapped by SQLAlchemy.
+    For DBAPI-level exceptions that subclass the dbapi's Error class, this
+    field will always be present.
+
+    """
+
+    chained_exception: Optional[BaseException]
+    """The exception that was returned by the previous handler in the
+    exception chain, if any.
+
+    If present, this exception will be the one ultimately raised by
+    SQLAlchemy unless a subsequent handler replaces it.
+
+    May be None.
+
+    """
+
+    execution_context: Optional[ExecutionContext]
+    """The :class:`.ExecutionContext` corresponding to the execution
+    operation in progress.
+
+    This is present for statement execution operations, but not for
+    operations such as transaction begin/end.  It also is not present when
+    the exception was raised before the :class:`.ExecutionContext`
+    could be constructed.
+
+    Note that the :attr:`.ExceptionContext.statement` and
+    :attr:`.ExceptionContext.parameters` members may represent a
+    different value than that of the :class:`.ExecutionContext`,
+    potentially in the case where a
+    :meth:`_events.ConnectionEvents.before_cursor_execute` event or similar
+    modified the statement/parameters to be sent.
+
+    May be None.
+
+    """
+
+    is_disconnect: bool
+    """Represent whether the exception as occurred represents a "disconnect"
+    condition.
+
+    This flag will always be True or False within the scope of the
+    :meth:`_events.DialectEvents.handle_error` handler.
+
+    SQLAlchemy will defer to this flag in order to determine whether or not
+    the connection should be invalidated subsequently.    That is, by
+    assigning to this flag, a "disconnect" event which then results in
+    a connection and pool invalidation can be invoked or prevented by
+    changing this flag.
+
+
+    .. note:: The pool "pre_ping" handler enabled using the
+        :paramref:`_sa.create_engine.pool_pre_ping` parameter does **not**
+        consult this event before deciding if the "ping" returned false,
+        as opposed to receiving an unhandled error.   For this use case, the
+        :ref:`legacy recipe based on engine_connect() may be used
+        <pool_disconnects_pessimistic_custom>`.  A future API allow more
+        comprehensive customization of the "disconnect" detection mechanism
+        across all functions.
+
+    """
+
+    invalidate_pool_on_disconnect: bool
+    """Represent whether all connections in the pool should be invalidated
+    when a "disconnect" condition is in effect.
+
+    Setting this flag to False within the scope of the
+    :meth:`_events.DialectEvents.handle_error`
+    event will have the effect such
+    that the full collection of connections in the pool will not be
+    invalidated during a disconnect; only the current connection that is the
+    subject of the error will actually be invalidated.
+
+    The purpose of this flag is for custom disconnect-handling schemes where
+    the invalidation of other connections in the pool is to be performed
+    based on other conditions, or even on a per-connection basis.
+
+    """
+
+    is_pre_ping: bool
+    """Indicates if this error is occurring within the "pre-ping" step
+    performed when :paramref:`_sa.create_engine.pool_pre_ping` is set to
+    ``True``.  In this mode, the :attr:`.ExceptionContext.engine` attribute
+    will be ``None``.  The dialect in use is accessible via the
+    :attr:`.ExceptionContext.dialect` attribute.
+
+    .. versionadded:: 2.0.5
+
+    """
+
+
+class AdaptedConnection:
+    """Interface of an adapted connection object to support the DBAPI protocol.
+
+    Used by asyncio dialects to provide a sync-style pep-249 facade on top
+    of the asyncio connection/cursor API provided by the driver.
+
+    .. versionadded:: 1.4.24
+
+    """
+
+    __slots__ = ("_connection",)
+
+    _connection: Any
+
+    @property
+    def driver_connection(self) -> Any:
+        """The connection object as returned by the driver after a connect."""
+        return self._connection
+
+    def run_async(self, fn: Callable[[Any], Awaitable[_T]]) -> _T:
+        """Run the awaitable returned by the given function, which is passed
+        the raw asyncio driver connection.
+
+        This is used to invoke awaitable-only methods on the driver connection
+        within the context of a "synchronous" method, like a connection
+        pool event handler.
+
+        E.g.::
+
+            engine = create_async_engine(...)
+
+
+            @event.listens_for(engine.sync_engine, "connect")
+            def register_custom_types(
+                dbapi_connection,  # ...
+            ):
+                dbapi_connection.run_async(
+                    lambda connection: connection.set_type_codec(
+                        "MyCustomType", encoder, decoder, ...
+                    )
+                )
+
+        .. versionadded:: 1.4.30
+
+        .. seealso::
+
+            :ref:`asyncio_events_run_async`
+
+        """
+        return await_only(fn(self._connection))
+
+    def __repr__(self) -> str:
+        return "<AdaptedConnection %s>" % self._connection
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py
new file mode 100644
index 00000000..08dba5a6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/mock.py
@@ -0,0 +1,133 @@
+# engine/mock.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+from operator import attrgetter
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Optional
+from typing import Type
+from typing import Union
+
+from . import url as _url
+from .. import util
+
+
+if typing.TYPE_CHECKING:
+    from .base import Engine
+    from .interfaces import _CoreAnyExecuteParams
+    from .interfaces import CoreExecuteOptionsParameter
+    from .interfaces import Dialect
+    from .url import URL
+    from ..sql.base import Executable
+    from ..sql.ddl import SchemaDropper
+    from ..sql.ddl import SchemaGenerator
+    from ..sql.schema import HasSchemaAttr
+    from ..sql.schema import SchemaItem
+
+
+class MockConnection:
+    def __init__(self, dialect: Dialect, execute: Callable[..., Any]):
+        self._dialect = dialect
+        self._execute_impl = execute
+
+    engine: Engine = cast(Any, property(lambda s: s))
+    dialect: Dialect = cast(Any, property(attrgetter("_dialect")))
+    name: str = cast(Any, property(lambda s: s._dialect.name))
+
+    def connect(self, **kwargs: Any) -> MockConnection:
+        return self
+
+    def schema_for_object(self, obj: HasSchemaAttr) -> Optional[str]:
+        return obj.schema
+
+    def execution_options(self, **kw: Any) -> MockConnection:
+        return self
+
+    def _run_ddl_visitor(
+        self,
+        visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]],
+        element: SchemaItem,
+        **kwargs: Any,
+    ) -> None:
+        kwargs["checkfirst"] = False
+        visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
+
+    def execute(
+        self,
+        obj: Executable,
+        parameters: Optional[_CoreAnyExecuteParams] = None,
+        execution_options: Optional[CoreExecuteOptionsParameter] = None,
+    ) -> Any:
+        return self._execute_impl(obj, parameters)
+
+
+def create_mock_engine(
+    url: Union[str, URL], executor: Any, **kw: Any
+) -> MockConnection:
+    """Create a "mock" engine used for echoing DDL.
+
+    This is a utility function used for debugging or storing the output of DDL
+    sequences as generated by :meth:`_schema.MetaData.create_all`
+    and related methods.
+
+    The function accepts a URL which is used only to determine the kind of
+    dialect to be used, as well as an "executor" callable function which
+    will receive a SQL expression object and parameters, which can then be
+    echoed or otherwise printed.   The executor's return value is not handled,
+    nor does the engine allow regular string statements to be invoked, and
+    is therefore only useful for DDL that is sent to the database without
+    receiving any results.
+
+    E.g.::
+
+        from sqlalchemy import create_mock_engine
+
+
+        def dump(sql, *multiparams, **params):
+            print(sql.compile(dialect=engine.dialect))
+
+
+        engine = create_mock_engine("postgresql+psycopg2://", dump)
+        metadata.create_all(engine, checkfirst=False)
+
+    :param url: A string URL which typically needs to contain only the
+     database backend name.
+
+    :param executor: a callable which receives the arguments ``sql``,
+     ``*multiparams`` and ``**params``.  The ``sql`` parameter is typically
+     an instance of :class:`.ExecutableDDLElement`, which can then be compiled
+     into a string using :meth:`.ExecutableDDLElement.compile`.
+
+    .. versionadded:: 1.4 - the :func:`.create_mock_engine` function replaces
+       the previous "mock" engine strategy used with
+       :func:`_sa.create_engine`.
+
+    .. seealso::
+
+        :ref:`faq_ddl_as_string`
+
+    """
+
+    # create url.URL object
+    u = _url.make_url(url)
+
+    dialect_cls = u.get_dialect()
+
+    dialect_args = {}
+    # consume dialect arguments from kwargs
+    for k in util.get_cls_kwargs(dialect_cls):
+        if k in kw:
+            dialect_args[k] = kw.pop(k)
+
+    # create dialect
+    dialect = dialect_cls(**dialect_args)
+
+    return MockConnection(dialect, executor)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py
new file mode 100644
index 00000000..b3f93308
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/processors.py
@@ -0,0 +1,61 @@
+# engine/processors.py
+# Copyright (C) 2010-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""defines generic type conversion functions, as used in bind and result
+processors.
+
+They all share one common characteristic: None is passed through unchanged.
+
+"""
+from __future__ import annotations
+
+import typing
+
+from ._py_processors import str_to_datetime_processor_factory  # noqa
+from ..util._has_cy import HAS_CYEXTENSION
+
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
+    from ._py_processors import int_to_boolean as int_to_boolean
+    from ._py_processors import str_to_date as str_to_date
+    from ._py_processors import str_to_datetime as str_to_datetime
+    from ._py_processors import str_to_time as str_to_time
+    from ._py_processors import (
+        to_decimal_processor_factory as to_decimal_processor_factory,
+    )
+    from ._py_processors import to_float as to_float
+    from ._py_processors import to_str as to_str
+else:
+    from sqlalchemy.cyextension.processors import (
+        DecimalResultProcessor,
+    )
+    from sqlalchemy.cyextension.processors import (  # noqa: F401
+        int_to_boolean as int_to_boolean,
+    )
+    from sqlalchemy.cyextension.processors import (  # noqa: F401,E501
+        str_to_date as str_to_date,
+    )
+    from sqlalchemy.cyextension.processors import (  # noqa: F401
+        str_to_datetime as str_to_datetime,
+    )
+    from sqlalchemy.cyextension.processors import (  # noqa: F401,E501
+        str_to_time as str_to_time,
+    )
+    from sqlalchemy.cyextension.processors import (  # noqa: F401,E501
+        to_float as to_float,
+    )
+    from sqlalchemy.cyextension.processors import (  # noqa: F401,E501
+        to_str as to_str,
+    )
+
+    def to_decimal_processor_factory(target_class, scale):
+        # Note that the scale argument is not taken into account for integer
+        # values in the C implementation while it is in the Python one.
+        # For example, the Python implementation might return
+        # Decimal('5.00000') whereas the C implementation will
+        # return Decimal('5'). These are equivalent of course.
+        return DecimalResultProcessor(target_class, "%%.%df" % scale).process
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py
new file mode 100644
index 00000000..5d754c67
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/reflection.py
@@ -0,0 +1,2099 @@
+# engine/reflection.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Provides an abstraction for obtaining database schema information.
+
+Usage Notes:
+
+Here are some general conventions when accessing the low level inspector
+methods such as get_table_names, get_columns, etc.
+
+1. Inspector methods return lists of dicts in most cases for the following
+   reasons:
+
+   * They're both standard types that can be serialized.
+   * Using a dict instead of a tuple allows easy expansion of attributes.
+   * Using a list for the outer structure maintains order and is easy to work
+     with (e.g. list comprehension [d['name'] for d in cols]).
+
+2. Records that contain a name, such as the column name in a column record
+   use the key 'name'. So for most return values, each record will have a
+   'name' attribute..
+"""
+from __future__ import annotations
+
+import contextlib
+from dataclasses import dataclass
+from enum import auto
+from enum import Flag
+from enum import unique
+from typing import Any
+from typing import Callable
+from typing import Collection
+from typing import Dict
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .base import Connection
+from .base import Engine
+from .. import exc
+from .. import inspection
+from .. import sql
+from .. import util
+from ..sql import operators
+from ..sql import schema as sa_schema
+from ..sql.cache_key import _ad_hoc_cache_key_from_args
+from ..sql.elements import quoted_name
+from ..sql.elements import TextClause
+from ..sql.type_api import TypeEngine
+from ..sql.visitors import InternalTraversal
+from ..util import topological
+from ..util.typing import final
+
+if TYPE_CHECKING:
+    from .interfaces import Dialect
+    from .interfaces import ReflectedCheckConstraint
+    from .interfaces import ReflectedColumn
+    from .interfaces import ReflectedForeignKeyConstraint
+    from .interfaces import ReflectedIndex
+    from .interfaces import ReflectedPrimaryKeyConstraint
+    from .interfaces import ReflectedTableComment
+    from .interfaces import ReflectedUniqueConstraint
+    from .interfaces import TableKey
+
+_R = TypeVar("_R")
+
+
+@util.decorator
+def cache(
+    fn: Callable[..., _R],
+    self: Dialect,
+    con: Connection,
+    *args: Any,
+    **kw: Any,
+) -> _R:
+    info_cache = kw.get("info_cache", None)
+    if info_cache is None:
+        return fn(self, con, *args, **kw)
+    exclude = {"info_cache", "unreflectable"}
+    key = (
+        fn.__name__,
+        tuple(
+            (str(a), a.quote) if isinstance(a, quoted_name) else a
+            for a in args
+            if isinstance(a, str)
+        ),
+        tuple(
+            (k, (str(v), v.quote) if isinstance(v, quoted_name) else v)
+            for k, v in kw.items()
+            if k not in exclude
+        ),
+    )
+    ret: _R = info_cache.get(key)
+    if ret is None:
+        ret = fn(self, con, *args, **kw)
+        info_cache[key] = ret
+    return ret
+
+
+def flexi_cache(
+    *traverse_args: Tuple[str, InternalTraversal]
+) -> Callable[[Callable[..., _R]], Callable[..., _R]]:
+    @util.decorator
+    def go(
+        fn: Callable[..., _R],
+        self: Dialect,
+        con: Connection,
+        *args: Any,
+        **kw: Any,
+    ) -> _R:
+        info_cache = kw.get("info_cache", None)
+        if info_cache is None:
+            return fn(self, con, *args, **kw)
+        key = _ad_hoc_cache_key_from_args((fn.__name__,), traverse_args, args)
+        ret: _R = info_cache.get(key)
+        if ret is None:
+            ret = fn(self, con, *args, **kw)
+            info_cache[key] = ret
+        return ret
+
+    return go
+
+
+@unique
+class ObjectKind(Flag):
+    """Enumerator that indicates which kind of object to return when calling
+    the ``get_multi`` methods.
+
+    This is a Flag enum, so custom combinations can be passed. For example,
+    to reflect tables and plain views ``ObjectKind.TABLE | ObjectKind.VIEW``
+    may be used.
+
+    .. note::
+      Not all dialect may support all kind of object. If a dialect does
+      not support a particular object an empty dict is returned.
+      In case a dialect supports an object, but the requested method
+      is not applicable for the specified kind the default value
+      will be returned for each reflected object. For example reflecting
+      check constraints of view return a dict with all the views with
+      empty lists as values.
+    """
+
+    TABLE = auto()
+    "Reflect table objects"
+    VIEW = auto()
+    "Reflect plain view objects"
+    MATERIALIZED_VIEW = auto()
+    "Reflect materialized view object"
+
+    ANY_VIEW = VIEW | MATERIALIZED_VIEW
+    "Reflect any kind of view objects"
+    ANY = TABLE | VIEW | MATERIALIZED_VIEW
+    "Reflect all type of objects"
+
+
+@unique
+class ObjectScope(Flag):
+    """Enumerator that indicates which scope to use when calling
+    the ``get_multi`` methods.
+    """
+
+    DEFAULT = auto()
+    "Include default scope"
+    TEMPORARY = auto()
+    "Include only temp scope"
+    ANY = DEFAULT | TEMPORARY
+    "Include both default and temp scope"
+
+
+@inspection._self_inspects
+class Inspector(inspection.Inspectable["Inspector"]):
+    """Performs database schema inspection.
+
+    The Inspector acts as a proxy to the reflection methods of the
+    :class:`~sqlalchemy.engine.interfaces.Dialect`, providing a
+    consistent interface as well as caching support for previously
+    fetched metadata.
+
+    A :class:`_reflection.Inspector` object is usually created via the
+    :func:`_sa.inspect` function, which may be passed an
+    :class:`_engine.Engine`
+    or a :class:`_engine.Connection`::
+
+        from sqlalchemy import inspect, create_engine
+
+        engine = create_engine("...")
+        insp = inspect(engine)
+
+    Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` associated
+    with the engine may opt to return an :class:`_reflection.Inspector`
+    subclass that
+    provides additional methods specific to the dialect's target database.
+
+    """
+
+    bind: Union[Engine, Connection]
+    engine: Engine
+    _op_context_requires_connect: bool
+    dialect: Dialect
+    info_cache: Dict[Any, Any]
+
+    @util.deprecated(
+        "1.4",
+        "The __init__() method on :class:`_reflection.Inspector` "
+        "is deprecated and "
+        "will be removed in a future release.  Please use the "
+        ":func:`.sqlalchemy.inspect` "
+        "function on an :class:`_engine.Engine` or "
+        ":class:`_engine.Connection` "
+        "in order to "
+        "acquire an :class:`_reflection.Inspector`.",
+    )
+    def __init__(self, bind: Union[Engine, Connection]):
+        """Initialize a new :class:`_reflection.Inspector`.
+
+        :param bind: a :class:`~sqlalchemy.engine.Connection`,
+          which is typically an instance of
+          :class:`~sqlalchemy.engine.Engine` or
+          :class:`~sqlalchemy.engine.Connection`.
+
+        For a dialect-specific instance of :class:`_reflection.Inspector`, see
+        :meth:`_reflection.Inspector.from_engine`
+
+        """
+        self._init_legacy(bind)
+
+    @classmethod
+    def _construct(
+        cls, init: Callable[..., Any], bind: Union[Engine, Connection]
+    ) -> Inspector:
+        if hasattr(bind.dialect, "inspector"):
+            cls = bind.dialect.inspector
+
+        self = cls.__new__(cls)
+        init(self, bind)
+        return self
+
+    def _init_legacy(self, bind: Union[Engine, Connection]) -> None:
+        if hasattr(bind, "exec_driver_sql"):
+            self._init_connection(bind)  # type: ignore[arg-type]
+        else:
+            self._init_engine(bind)
+
+    def _init_engine(self, engine: Engine) -> None:
+        self.bind = self.engine = engine
+        engine.connect().close()
+        self._op_context_requires_connect = True
+        self.dialect = self.engine.dialect
+        self.info_cache = {}
+
+    def _init_connection(self, connection: Connection) -> None:
+        self.bind = connection
+        self.engine = connection.engine
+        self._op_context_requires_connect = False
+        self.dialect = self.engine.dialect
+        self.info_cache = {}
+
+    def clear_cache(self) -> None:
+        """reset the cache for this :class:`.Inspector`.
+
+        Inspection methods that have data cached will emit SQL queries
+        when next called to get new data.
+
+        .. versionadded:: 2.0
+
+        """
+        self.info_cache.clear()
+
+    @classmethod
+    @util.deprecated(
+        "1.4",
+        "The from_engine() method on :class:`_reflection.Inspector` "
+        "is deprecated and "
+        "will be removed in a future release.  Please use the "
+        ":func:`.sqlalchemy.inspect` "
+        "function on an :class:`_engine.Engine` or "
+        ":class:`_engine.Connection` "
+        "in order to "
+        "acquire an :class:`_reflection.Inspector`.",
+    )
+    def from_engine(cls, bind: Engine) -> Inspector:
+        """Construct a new dialect-specific Inspector object from the given
+        engine or connection.
+
+        :param bind: a :class:`~sqlalchemy.engine.Connection`
+         or :class:`~sqlalchemy.engine.Engine`.
+
+        This method differs from direct a direct constructor call of
+        :class:`_reflection.Inspector` in that the
+        :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to
+        provide a dialect-specific :class:`_reflection.Inspector` instance,
+        which may
+        provide additional methods.
+
+        See the example at :class:`_reflection.Inspector`.
+
+        """
+        return cls._construct(cls._init_legacy, bind)
+
+    @inspection._inspects(Engine)
+    def _engine_insp(bind: Engine) -> Inspector:  # type: ignore[misc]
+        return Inspector._construct(Inspector._init_engine, bind)
+
+    @inspection._inspects(Connection)
+    def _connection_insp(bind: Connection) -> Inspector:  # type: ignore[misc]
+        return Inspector._construct(Inspector._init_connection, bind)
+
+    @contextlib.contextmanager
+    def _operation_context(self) -> Generator[Connection, None, None]:
+        """Return a context that optimizes for multiple operations on a single
+        transaction.
+
+        This essentially allows connect()/close() to be called if we detected
+        that we're against an :class:`_engine.Engine` and not a
+        :class:`_engine.Connection`.
+
+        """
+        conn: Connection
+        if self._op_context_requires_connect:
+            conn = self.bind.connect()  # type: ignore[union-attr]
+        else:
+            conn = self.bind  # type: ignore[assignment]
+        try:
+            yield conn
+        finally:
+            if self._op_context_requires_connect:
+                conn.close()
+
+    @contextlib.contextmanager
+    def _inspection_context(self) -> Generator[Inspector, None, None]:
+        """Return an :class:`_reflection.Inspector`
+        from this one that will run all
+        operations on a single connection.
+
+        """
+
+        with self._operation_context() as conn:
+            sub_insp = self._construct(self.__class__._init_connection, conn)
+            sub_insp.info_cache = self.info_cache
+            yield sub_insp
+
+    @property
+    def default_schema_name(self) -> Optional[str]:
+        """Return the default schema name presented by the dialect
+        for the current engine's database user.
+
+        E.g. this is typically ``public`` for PostgreSQL and ``dbo``
+        for SQL Server.
+
+        """
+        return self.dialect.default_schema_name
+
+    def get_schema_names(self, **kw: Any) -> List[str]:
+        r"""Return all schema names.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_schema_names(
+                conn, info_cache=self.info_cache, **kw
+            )
+
+    def get_table_names(
+        self, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        r"""Return all table names within a particular schema.
+
+        The names are expected to be real tables only, not views.
+        Views are instead returned using the
+        :meth:`_reflection.Inspector.get_view_names` and/or
+        :meth:`_reflection.Inspector.get_materialized_view_names`
+        methods.
+
+        :param schema: Schema name. If ``schema`` is left at ``None``, the
+         database's default schema is
+         used, else the named schema is searched.  If the database does not
+         support named schemas, behavior is undefined if ``schema`` is not
+         passed as ``None``.  For special quoting, use :class:`.quoted_name`.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. seealso::
+
+            :meth:`_reflection.Inspector.get_sorted_table_and_fkc_names`
+
+            :attr:`_schema.MetaData.sorted_tables`
+
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_table_names(
+                conn, schema, info_cache=self.info_cache, **kw
+            )
+
+    def has_table(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> bool:
+        r"""Return True if the backend has a table, view, or temporary
+        table of the given name.
+
+        :param table_name: name of the table to check
+        :param schema: schema name to query, if not the default schema.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. versionadded:: 1.4 - the :meth:`.Inspector.has_table` method
+           replaces the :meth:`_engine.Engine.has_table` method.
+
+        .. versionchanged:: 2.0:: :meth:`.Inspector.has_table` now formally
+           supports checking for additional table-like objects:
+
+           * any type of views (plain or materialized)
+           * temporary tables of any kind
+
+           Previously, these two checks were not formally specified and
+           different dialects would vary in their behavior.   The dialect
+           testing suite now includes tests for all of these object types
+           and should be supported by all SQLAlchemy-included dialects.
+           Support among third party dialects may be lagging, however.
+
+        """
+        with self._operation_context() as conn:
+            return self.dialect.has_table(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def has_sequence(
+        self, sequence_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> bool:
+        r"""Return True if the backend has a sequence with the given name.
+
+        :param sequence_name: name of the sequence to check
+        :param schema: schema name to query, if not the default schema.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. versionadded:: 1.4
+
+        """
+        with self._operation_context() as conn:
+            return self.dialect.has_sequence(
+                conn, sequence_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def has_index(
+        self,
+        table_name: str,
+        index_name: str,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> bool:
+        r"""Check the existence of a particular index name in the database.
+
+        :param table_name: the name of the table the index belongs to
+        :param index_name: the name of the index to check
+        :param schema: schema name to query, if not the default schema.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. versionadded:: 2.0
+
+        """
+        with self._operation_context() as conn:
+            return self.dialect.has_index(
+                conn,
+                table_name,
+                index_name,
+                schema,
+                info_cache=self.info_cache,
+                **kw,
+            )
+
+    def has_schema(self, schema_name: str, **kw: Any) -> bool:
+        r"""Return True if the backend has a schema with the given name.
+
+        :param schema_name: name of the schema to check
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. versionadded:: 2.0
+
+        """
+        with self._operation_context() as conn:
+            return self.dialect.has_schema(
+                conn, schema_name, info_cache=self.info_cache, **kw
+            )
+
+    def get_sorted_table_and_fkc_names(
+        self,
+        schema: Optional[str] = None,
+        **kw: Any,
+    ) -> List[Tuple[Optional[str], List[Tuple[str, Optional[str]]]]]:
+        r"""Return dependency-sorted table and foreign key constraint names in
+        referred to within a particular schema.
+
+        This will yield 2-tuples of
+        ``(tablename, [(tname, fkname), (tname, fkname), ...])``
+        consisting of table names in CREATE order grouped with the foreign key
+        constraint names that are not detected as belonging to a cycle.
+        The final element
+        will be ``(None, [(tname, fkname), (tname, fkname), ..])``
+        which will consist of remaining
+        foreign key constraint names that would require a separate CREATE
+        step after-the-fact, based on dependencies between tables.
+
+        :param schema: schema name to query, if not the default schema.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. seealso::
+
+            :meth:`_reflection.Inspector.get_table_names`
+
+            :func:`.sort_tables_and_constraints` - similar method which works
+            with an already-given :class:`_schema.MetaData`.
+
+        """
+
+        return [
+            (
+                table_key[1] if table_key else None,
+                [(tname, fks) for (_, tname), fks in fk_collection],
+            )
+            for (
+                table_key,
+                fk_collection,
+            ) in self.sort_tables_on_foreign_key_dependency(
+                consider_schemas=(schema,)
+            )
+        ]
+
+    def sort_tables_on_foreign_key_dependency(
+        self,
+        consider_schemas: Collection[Optional[str]] = (None,),
+        **kw: Any,
+    ) -> List[
+        Tuple[
+            Optional[Tuple[Optional[str], str]],
+            List[Tuple[Tuple[Optional[str], str], Optional[str]]],
+        ]
+    ]:
+        r"""Return dependency-sorted table and foreign key constraint names
+        referred to within multiple schemas.
+
+        This method may be compared to
+        :meth:`.Inspector.get_sorted_table_and_fkc_names`, which
+        works on one schema at a time; here, the method is a generalization
+        that will consider multiple schemas at once including that it will
+        resolve for cross-schema foreign keys.
+
+        .. versionadded:: 2.0
+
+        """
+        SchemaTab = Tuple[Optional[str], str]
+
+        tuples: Set[Tuple[SchemaTab, SchemaTab]] = set()
+        remaining_fkcs: Set[Tuple[SchemaTab, Optional[str]]] = set()
+        fknames_for_table: Dict[SchemaTab, Set[Optional[str]]] = {}
+        tnames: List[SchemaTab] = []
+
+        for schname in consider_schemas:
+            schema_fkeys = self.get_multi_foreign_keys(schname, **kw)
+            tnames.extend(schema_fkeys)
+            for (_, tname), fkeys in schema_fkeys.items():
+                fknames_for_table[(schname, tname)] = {
+                    fk["name"] for fk in fkeys
+                }
+                for fkey in fkeys:
+                    if (
+                        tname != fkey["referred_table"]
+                        or schname != fkey["referred_schema"]
+                    ):
+                        tuples.add(
+                            (
+                                (
+                                    fkey["referred_schema"],
+                                    fkey["referred_table"],
+                                ),
+                                (schname, tname),
+                            )
+                        )
+        try:
+            candidate_sort = list(topological.sort(tuples, tnames))
+        except exc.CircularDependencyError as err:
+            edge: Tuple[SchemaTab, SchemaTab]
+            for edge in err.edges:
+                tuples.remove(edge)
+                remaining_fkcs.update(
+                    (edge[1], fkc) for fkc in fknames_for_table[edge[1]]
+                )
+
+            candidate_sort = list(topological.sort(tuples, tnames))
+        ret: List[
+            Tuple[Optional[SchemaTab], List[Tuple[SchemaTab, Optional[str]]]]
+        ]
+        ret = [
+            (
+                (schname, tname),
+                [
+                    ((schname, tname), fk)
+                    for fk in fknames_for_table[(schname, tname)].difference(
+                        name for _, name in remaining_fkcs
+                    )
+                ],
+            )
+            for (schname, tname) in candidate_sort
+        ]
+        return ret + [(None, list(remaining_fkcs))]
+
+    def get_temp_table_names(self, **kw: Any) -> List[str]:
+        r"""Return a list of temporary table names for the current bind.
+
+        This method is unsupported by most dialects; currently
+        only Oracle Database, PostgreSQL and SQLite implements it.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_temp_table_names(
+                conn, info_cache=self.info_cache, **kw
+            )
+
+    def get_temp_view_names(self, **kw: Any) -> List[str]:
+        r"""Return a list of temporary view names for the current bind.
+
+        This method is unsupported by most dialects; currently
+        only PostgreSQL and SQLite implements it.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        """
+        with self._operation_context() as conn:
+            return self.dialect.get_temp_view_names(
+                conn, info_cache=self.info_cache, **kw
+            )
+
+    def get_table_options(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> Dict[str, Any]:
+        r"""Return a dictionary of options specified when the table of the
+        given name was created.
+
+        This currently includes some options that apply to MySQL and Oracle
+        Database tables.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dict with the table options. The returned keys depend on the
+         dialect in use. Each one is prefixed with the dialect name.
+
+        .. seealso:: :meth:`Inspector.get_multi_table_options`
+
+        """
+        with self._operation_context() as conn:
+            return self.dialect.get_table_options(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_table_options(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, Dict[str, Any]]:
+        r"""Return a dictionary of options specified when the tables in the
+        given schema were created.
+
+        The tables can be filtered by passing the names to use to
+        ``filter_names``.
+
+        This currently includes some options that apply to MySQL and Oracle
+        tables.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if options of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are dictionaries with the table options.
+         The returned keys in each dict depend on the
+         dialect in use. Each one is prefixed with the dialect name.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_table_options`
+        """
+        with self._operation_context() as conn:
+            res = self.dialect.get_multi_table_options(
+                conn,
+                schema=schema,
+                filter_names=filter_names,
+                kind=kind,
+                scope=scope,
+                info_cache=self.info_cache,
+                **kw,
+            )
+            return dict(res)
+
+    def get_view_names(
+        self, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        r"""Return all non-materialized view names in `schema`.
+
+        :param schema: Optional, retrieve names from a non-default schema.
+         For special quoting, use :class:`.quoted_name`.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+
+        .. versionchanged:: 2.0  For those dialects that previously included
+           the names of materialized views in this list (currently PostgreSQL),
+           this method no longer returns the names of materialized views.
+           the :meth:`.Inspector.get_materialized_view_names` method should
+           be used instead.
+
+        .. seealso::
+
+            :meth:`.Inspector.get_materialized_view_names`
+
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_view_names(
+                conn, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_materialized_view_names(
+        self, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        r"""Return all materialized view names in `schema`.
+
+        :param schema: Optional, retrieve names from a non-default schema.
+         For special quoting, use :class:`.quoted_name`.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :meth:`.Inspector.get_view_names`
+
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_materialized_view_names(
+                conn, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_sequence_names(
+        self, schema: Optional[str] = None, **kw: Any
+    ) -> List[str]:
+        r"""Return all sequence names in `schema`.
+
+        :param schema: Optional, retrieve names from a non-default schema.
+         For special quoting, use :class:`.quoted_name`.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_sequence_names(
+                conn, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_view_definition(
+        self, view_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> str:
+        r"""Return definition for the plain or materialized view called
+        ``view_name``.
+
+        :param view_name: Name of the view.
+        :param schema: Optional, retrieve names from a non-default schema.
+         For special quoting, use :class:`.quoted_name`.
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_view_definition(
+                conn, view_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_columns(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> List[ReflectedColumn]:
+        r"""Return information about columns in ``table_name``.
+
+        Given a string ``table_name`` and an optional string ``schema``,
+        return column information as a list of :class:`.ReflectedColumn`.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: list of dictionaries, each representing the definition of
+         a database column.
+
+        .. seealso:: :meth:`Inspector.get_multi_columns`.
+
+        """
+
+        with self._operation_context() as conn:
+            col_defs = self.dialect.get_columns(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+        if col_defs:
+            self._instantiate_types([col_defs])
+        return col_defs
+
+    def _instantiate_types(
+        self, data: Iterable[List[ReflectedColumn]]
+    ) -> None:
+        # make this easy and only return instances for coltype
+        for col_defs in data:
+            for col_def in col_defs:
+                coltype = col_def["type"]
+                if not isinstance(coltype, TypeEngine):
+                    col_def["type"] = coltype()
+
+    def get_multi_columns(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, List[ReflectedColumn]]:
+        r"""Return information about columns in all objects in the given
+        schema.
+
+        The objects can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a list of :class:`.ReflectedColumn`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if columns of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are list of dictionaries, each representing the
+         definition of a database column.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_columns`
+        """
+
+        with self._operation_context() as conn:
+            table_col_defs = dict(
+                self.dialect.get_multi_columns(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+        self._instantiate_types(table_col_defs.values())
+        return table_col_defs
+
+    def get_pk_constraint(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> ReflectedPrimaryKeyConstraint:
+        r"""Return information about primary key constraint in ``table_name``.
+
+        Given a string ``table_name``, and an optional string `schema`, return
+        primary key information as a :class:`.ReflectedPrimaryKeyConstraint`.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary representing the definition of
+         a primary key constraint.
+
+        .. seealso:: :meth:`Inspector.get_multi_pk_constraint`
+        """
+        with self._operation_context() as conn:
+            return self.dialect.get_pk_constraint(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_pk_constraint(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, ReflectedPrimaryKeyConstraint]:
+        r"""Return information about primary key constraints in
+        all tables in the given schema.
+
+        The tables can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a :class:`.ReflectedPrimaryKeyConstraint`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if primary keys of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are dictionaries, each representing the
+         definition of a primary key constraint.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_pk_constraint`
+        """
+        with self._operation_context() as conn:
+            return dict(
+                self.dialect.get_multi_pk_constraint(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+
+    def get_foreign_keys(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> List[ReflectedForeignKeyConstraint]:
+        r"""Return information about foreign_keys in ``table_name``.
+
+        Given a string ``table_name``, and an optional string `schema`, return
+        foreign key information as a list of
+        :class:`.ReflectedForeignKeyConstraint`.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a list of dictionaries, each representing the
+         a foreign key definition.
+
+        .. seealso:: :meth:`Inspector.get_multi_foreign_keys`
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_foreign_keys(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_foreign_keys(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, List[ReflectedForeignKeyConstraint]]:
+        r"""Return information about foreign_keys in all tables
+        in the given schema.
+
+        The tables can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a list of
+        :class:`.ReflectedForeignKeyConstraint`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if foreign keys of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are list of dictionaries, each representing
+         a foreign key definition.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_foreign_keys`
+        """
+
+        with self._operation_context() as conn:
+            return dict(
+                self.dialect.get_multi_foreign_keys(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+
+    def get_indexes(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> List[ReflectedIndex]:
+        r"""Return information about indexes in ``table_name``.
+
+        Given a string ``table_name`` and an optional string `schema`, return
+        index information as a list of :class:`.ReflectedIndex`.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a list of dictionaries, each representing the
+         definition of an index.
+
+        .. seealso:: :meth:`Inspector.get_multi_indexes`
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_indexes(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_indexes(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, List[ReflectedIndex]]:
+        r"""Return information about indexes in in all objects
+        in the given schema.
+
+        The objects can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a list of :class:`.ReflectedIndex`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if indexes of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are list of dictionaries, each representing the
+         definition of an index.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_indexes`
+        """
+
+        with self._operation_context() as conn:
+            return dict(
+                self.dialect.get_multi_indexes(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+
+    def get_unique_constraints(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> List[ReflectedUniqueConstraint]:
+        r"""Return information about unique constraints in ``table_name``.
+
+        Given a string ``table_name`` and an optional string `schema`, return
+        unique constraint information as a list of
+        :class:`.ReflectedUniqueConstraint`.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a list of dictionaries, each representing the
+         definition of an unique constraint.
+
+        .. seealso:: :meth:`Inspector.get_multi_unique_constraints`
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_unique_constraints(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_unique_constraints(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, List[ReflectedUniqueConstraint]]:
+        r"""Return information about unique constraints in all tables
+        in the given schema.
+
+        The tables can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a list of
+        :class:`.ReflectedUniqueConstraint`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if constraints of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are list of dictionaries, each representing the
+         definition of an unique constraint.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_unique_constraints`
+        """
+
+        with self._operation_context() as conn:
+            return dict(
+                self.dialect.get_multi_unique_constraints(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+
+    def get_table_comment(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> ReflectedTableComment:
+        r"""Return information about the table comment for ``table_name``.
+
+        Given a string ``table_name`` and an optional string ``schema``,
+        return table comment information as a :class:`.ReflectedTableComment`.
+
+        Raises ``NotImplementedError`` for a dialect that does not support
+        comments.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary, with the table comment.
+
+        .. versionadded:: 1.2
+
+        .. seealso:: :meth:`Inspector.get_multi_table_comment`
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_table_comment(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_table_comment(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, ReflectedTableComment]:
+        r"""Return information about the table comment in all objects
+        in the given schema.
+
+        The objects can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a :class:`.ReflectedTableComment`.
+
+        Raises ``NotImplementedError`` for a dialect that does not support
+        comments.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if comments of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are dictionaries, representing the
+         table comments.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_table_comment`
+        """
+
+        with self._operation_context() as conn:
+            return dict(
+                self.dialect.get_multi_table_comment(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+
+    def get_check_constraints(
+        self, table_name: str, schema: Optional[str] = None, **kw: Any
+    ) -> List[ReflectedCheckConstraint]:
+        r"""Return information about check constraints in ``table_name``.
+
+        Given a string ``table_name`` and an optional string `schema`, return
+        check constraint information as a list of
+        :class:`.ReflectedCheckConstraint`.
+
+        :param table_name: string name of the table.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a list of dictionaries, each representing the
+         definition of a check constraints.
+
+        .. seealso:: :meth:`Inspector.get_multi_check_constraints`
+        """
+
+        with self._operation_context() as conn:
+            return self.dialect.get_check_constraints(
+                conn, table_name, schema, info_cache=self.info_cache, **kw
+            )
+
+    def get_multi_check_constraints(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Sequence[str]] = None,
+        kind: ObjectKind = ObjectKind.TABLE,
+        scope: ObjectScope = ObjectScope.DEFAULT,
+        **kw: Any,
+    ) -> Dict[TableKey, List[ReflectedCheckConstraint]]:
+        r"""Return information about check constraints in all tables
+        in the given schema.
+
+        The tables can be filtered by passing the names to use to
+        ``filter_names``.
+
+        For each table the value is a list of
+        :class:`.ReflectedCheckConstraint`.
+
+        :param schema: string schema name; if omitted, uses the default schema
+         of the database connection.  For special quoting,
+         use :class:`.quoted_name`.
+
+        :param filter_names: optionally return information only for the
+         objects listed here.
+
+        :param kind: a :class:`.ObjectKind` that specifies the type of objects
+         to reflect. Defaults to ``ObjectKind.TABLE``.
+
+        :param scope: a :class:`.ObjectScope` that specifies if constraints of
+         default, temporary or any tables should be reflected.
+         Defaults to ``ObjectScope.DEFAULT``.
+
+        :param \**kw: Additional keyword argument to pass to the dialect
+         specific implementation. See the documentation of the dialect
+         in use for more information.
+
+        :return: a dictionary where the keys are two-tuple schema,table-name
+         and the values are list of dictionaries, each representing the
+         definition of a check constraints.
+         The schema is ``None`` if no schema is provided.
+
+        .. versionadded:: 2.0
+
+        .. seealso:: :meth:`Inspector.get_check_constraints`
+        """
+
+        with self._operation_context() as conn:
+            return dict(
+                self.dialect.get_multi_check_constraints(
+                    conn,
+                    schema=schema,
+                    filter_names=filter_names,
+                    kind=kind,
+                    scope=scope,
+                    info_cache=self.info_cache,
+                    **kw,
+                )
+            )
+
+    def reflect_table(
+        self,
+        table: sa_schema.Table,
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str] = (),
+        resolve_fks: bool = True,
+        _extend_on: Optional[Set[sa_schema.Table]] = None,
+        _reflect_info: Optional[_ReflectionInfo] = None,
+    ) -> None:
+        """Given a :class:`_schema.Table` object, load its internal
+        constructs based on introspection.
+
+        This is the underlying method used by most dialects to produce
+        table reflection.  Direct usage is like::
+
+            from sqlalchemy import create_engine, MetaData, Table
+            from sqlalchemy import inspect
+
+            engine = create_engine("...")
+            meta = MetaData()
+            user_table = Table("user", meta)
+            insp = inspect(engine)
+            insp.reflect_table(user_table, None)
+
+        .. versionchanged:: 1.4 Renamed from ``reflecttable`` to
+           ``reflect_table``
+
+        :param table: a :class:`~sqlalchemy.schema.Table` instance.
+        :param include_columns: a list of string column names to include
+          in the reflection process.  If ``None``, all columns are reflected.
+
+        """
+
+        if _extend_on is not None:
+            if table in _extend_on:
+                return
+            else:
+                _extend_on.add(table)
+
+        dialect = self.bind.dialect
+
+        with self._operation_context() as conn:
+            schema = conn.schema_for_object(table)
+
+        table_name = table.name
+
+        # get table-level arguments that are specifically
+        # intended for reflection, e.g. oracle_resolve_synonyms.
+        # these are unconditionally passed to related Table
+        # objects
+        reflection_options = {
+            k: table.dialect_kwargs.get(k)
+            for k in dialect.reflection_options
+            if k in table.dialect_kwargs
+        }
+
+        table_key = (schema, table_name)
+        if _reflect_info is None or table_key not in _reflect_info.columns:
+            _reflect_info = self._get_reflection_info(
+                schema,
+                filter_names=[table_name],
+                kind=ObjectKind.ANY,
+                scope=ObjectScope.ANY,
+                _reflect_info=_reflect_info,
+                **table.dialect_kwargs,
+            )
+        if table_key in _reflect_info.unreflectable:
+            raise _reflect_info.unreflectable[table_key]
+
+        if table_key not in _reflect_info.columns:
+            raise exc.NoSuchTableError(table_name)
+
+        # reflect table options, like mysql_engine
+        if _reflect_info.table_options:
+            tbl_opts = _reflect_info.table_options.get(table_key)
+            if tbl_opts:
+                # add additional kwargs to the Table if the dialect
+                # returned them
+                table._validate_dialect_kwargs(tbl_opts)
+
+        found_table = False
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]] = {}
+
+        for col_d in _reflect_info.columns[table_key]:
+            found_table = True
+
+            self._reflect_column(
+                table,
+                col_d,
+                include_columns,
+                exclude_columns,
+                cols_by_orig_name,
+            )
+
+        # NOTE: support tables/views with no columns
+        if not found_table and not self.has_table(table_name, schema):
+            raise exc.NoSuchTableError(table_name)
+
+        self._reflect_pk(
+            _reflect_info, table_key, table, cols_by_orig_name, exclude_columns
+        )
+
+        self._reflect_fk(
+            _reflect_info,
+            table_key,
+            table,
+            cols_by_orig_name,
+            include_columns,
+            exclude_columns,
+            resolve_fks,
+            _extend_on,
+            reflection_options,
+        )
+
+        self._reflect_indexes(
+            _reflect_info,
+            table_key,
+            table,
+            cols_by_orig_name,
+            include_columns,
+            exclude_columns,
+            reflection_options,
+        )
+
+        self._reflect_unique_constraints(
+            _reflect_info,
+            table_key,
+            table,
+            cols_by_orig_name,
+            include_columns,
+            exclude_columns,
+            reflection_options,
+        )
+
+        self._reflect_check_constraints(
+            _reflect_info,
+            table_key,
+            table,
+            cols_by_orig_name,
+            include_columns,
+            exclude_columns,
+            reflection_options,
+        )
+
+        self._reflect_table_comment(
+            _reflect_info,
+            table_key,
+            table,
+            reflection_options,
+        )
+
+    def _reflect_column(
+        self,
+        table: sa_schema.Table,
+        col_d: ReflectedColumn,
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str],
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]],
+    ) -> None:
+        orig_name = col_d["name"]
+
+        table.metadata.dispatch.column_reflect(self, table, col_d)
+        table.dispatch.column_reflect(self, table, col_d)
+
+        # fetch name again as column_reflect is allowed to
+        # change it
+        name = col_d["name"]
+        if (include_columns and name not in include_columns) or (
+            exclude_columns and name in exclude_columns
+        ):
+            return
+
+        coltype = col_d["type"]
+
+        col_kw = {
+            k: col_d[k]  # type: ignore[literal-required]
+            for k in [
+                "nullable",
+                "autoincrement",
+                "quote",
+                "info",
+                "key",
+                "comment",
+            ]
+            if k in col_d
+        }
+
+        if "dialect_options" in col_d:
+            col_kw.update(col_d["dialect_options"])
+
+        colargs = []
+        default: Any
+        if col_d.get("default") is not None:
+            default_text = col_d["default"]
+            assert default_text is not None
+            if isinstance(default_text, TextClause):
+                default = sa_schema.DefaultClause(
+                    default_text, _reflected=True
+                )
+            elif not isinstance(default_text, sa_schema.FetchedValue):
+                default = sa_schema.DefaultClause(
+                    sql.text(default_text), _reflected=True
+                )
+            else:
+                default = default_text
+            colargs.append(default)
+
+        if "computed" in col_d:
+            computed = sa_schema.Computed(**col_d["computed"])
+            colargs.append(computed)
+
+        if "identity" in col_d:
+            identity = sa_schema.Identity(**col_d["identity"])
+            colargs.append(identity)
+
+        cols_by_orig_name[orig_name] = col = sa_schema.Column(
+            name, coltype, *colargs, **col_kw
+        )
+
+        if col.key in table.primary_key:
+            col.primary_key = True
+        table.append_column(col, replace_existing=True)
+
+    def _reflect_pk(
+        self,
+        _reflect_info: _ReflectionInfo,
+        table_key: TableKey,
+        table: sa_schema.Table,
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]],
+        exclude_columns: Collection[str],
+    ) -> None:
+        pk_cons = _reflect_info.pk_constraint.get(table_key)
+        if pk_cons:
+            pk_cols = [
+                cols_by_orig_name[pk]
+                for pk in pk_cons["constrained_columns"]
+                if pk in cols_by_orig_name and pk not in exclude_columns
+            ]
+
+            # update pk constraint name and comment
+            table.primary_key.name = pk_cons.get("name")
+            table.primary_key.comment = pk_cons.get("comment", None)
+
+            # tell the PKConstraint to re-initialize
+            # its column collection
+            table.primary_key._reload(pk_cols)
+
+    def _reflect_fk(
+        self,
+        _reflect_info: _ReflectionInfo,
+        table_key: TableKey,
+        table: sa_schema.Table,
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]],
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str],
+        resolve_fks: bool,
+        _extend_on: Optional[Set[sa_schema.Table]],
+        reflection_options: Dict[str, Any],
+    ) -> None:
+        fkeys = _reflect_info.foreign_keys.get(table_key, [])
+        for fkey_d in fkeys:
+            conname = fkey_d["name"]
+            # look for columns by orig name in cols_by_orig_name,
+            # but support columns that are in-Python only as fallback
+            constrained_columns = [
+                cols_by_orig_name[c].key if c in cols_by_orig_name else c
+                for c in fkey_d["constrained_columns"]
+            ]
+
+            if (
+                exclude_columns
+                and set(constrained_columns).intersection(exclude_columns)
+                or (
+                    include_columns
+                    and set(constrained_columns).difference(include_columns)
+                )
+            ):
+                continue
+
+            referred_schema = fkey_d["referred_schema"]
+            referred_table = fkey_d["referred_table"]
+            referred_columns = fkey_d["referred_columns"]
+            refspec = []
+            if referred_schema is not None:
+                if resolve_fks:
+                    sa_schema.Table(
+                        referred_table,
+                        table.metadata,
+                        schema=referred_schema,
+                        autoload_with=self.bind,
+                        _extend_on=_extend_on,
+                        _reflect_info=_reflect_info,
+                        **reflection_options,
+                    )
+                for column in referred_columns:
+                    refspec.append(
+                        ".".join([referred_schema, referred_table, column])
+                    )
+            else:
+                if resolve_fks:
+                    sa_schema.Table(
+                        referred_table,
+                        table.metadata,
+                        autoload_with=self.bind,
+                        schema=sa_schema.BLANK_SCHEMA,
+                        _extend_on=_extend_on,
+                        _reflect_info=_reflect_info,
+                        **reflection_options,
+                    )
+                for column in referred_columns:
+                    refspec.append(".".join([referred_table, column]))
+            if "options" in fkey_d:
+                options = fkey_d["options"]
+            else:
+                options = {}
+
+            try:
+                table.append_constraint(
+                    sa_schema.ForeignKeyConstraint(
+                        constrained_columns,
+                        refspec,
+                        conname,
+                        link_to_name=True,
+                        comment=fkey_d.get("comment"),
+                        **options,
+                    )
+                )
+            except exc.ConstraintColumnNotFoundError:
+                util.warn(
+                    f"On reflected table {table.name}, skipping reflection of "
+                    "foreign key constraint "
+                    f"{conname}; one or more subject columns within "
+                    f"name(s) {', '.join(constrained_columns)} are not "
+                    "present in the table"
+                )
+
+    _index_sort_exprs = {
+        "asc": operators.asc_op,
+        "desc": operators.desc_op,
+        "nulls_first": operators.nulls_first_op,
+        "nulls_last": operators.nulls_last_op,
+    }
+
+    def _reflect_indexes(
+        self,
+        _reflect_info: _ReflectionInfo,
+        table_key: TableKey,
+        table: sa_schema.Table,
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]],
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str],
+        reflection_options: Dict[str, Any],
+    ) -> None:
+        # Indexes
+        indexes = _reflect_info.indexes.get(table_key, [])
+        for index_d in indexes:
+            name = index_d["name"]
+            columns = index_d["column_names"]
+            expressions = index_d.get("expressions")
+            column_sorting = index_d.get("column_sorting", {})
+            unique = index_d["unique"]
+            flavor = index_d.get("type", "index")
+            dialect_options = index_d.get("dialect_options", {})
+
+            duplicates = index_d.get("duplicates_constraint")
+            if include_columns and not set(columns).issubset(include_columns):
+                continue
+            if duplicates:
+                continue
+            # look for columns by orig name in cols_by_orig_name,
+            # but support columns that are in-Python only as fallback
+            idx_element: Any
+            idx_elements = []
+            for index, c in enumerate(columns):
+                if c is None:
+                    if not expressions:
+                        util.warn(
+                            f"Skipping {flavor} {name!r} because key "
+                            f"{index + 1} reflected as None but no "
+                            "'expressions' were returned"
+                        )
+                        break
+                    idx_element = sql.text(expressions[index])
+                else:
+                    try:
+                        if c in cols_by_orig_name:
+                            idx_element = cols_by_orig_name[c]
+                        else:
+                            idx_element = table.c[c]
+                    except KeyError:
+                        util.warn(
+                            f"{flavor} key {c!r} was not located in "
+                            f"columns for table {table.name!r}"
+                        )
+                        continue
+                    for option in column_sorting.get(c, ()):
+                        if option in self._index_sort_exprs:
+                            op = self._index_sort_exprs[option]
+                            idx_element = op(idx_element)
+                idx_elements.append(idx_element)
+            else:
+                sa_schema.Index(
+                    name,
+                    *idx_elements,
+                    _table=table,
+                    unique=unique,
+                    **dialect_options,
+                )
+
+    def _reflect_unique_constraints(
+        self,
+        _reflect_info: _ReflectionInfo,
+        table_key: TableKey,
+        table: sa_schema.Table,
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]],
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str],
+        reflection_options: Dict[str, Any],
+    ) -> None:
+        constraints = _reflect_info.unique_constraints.get(table_key, [])
+        # Unique Constraints
+        for const_d in constraints:
+            conname = const_d["name"]
+            columns = const_d["column_names"]
+            comment = const_d.get("comment")
+            duplicates = const_d.get("duplicates_index")
+            dialect_options = const_d.get("dialect_options", {})
+            if include_columns and not set(columns).issubset(include_columns):
+                continue
+            if duplicates:
+                continue
+            # look for columns by orig name in cols_by_orig_name,
+            # but support columns that are in-Python only as fallback
+            constrained_cols = []
+            for c in columns:
+                try:
+                    constrained_col = (
+                        cols_by_orig_name[c]
+                        if c in cols_by_orig_name
+                        else table.c[c]
+                    )
+                except KeyError:
+                    util.warn(
+                        "unique constraint key '%s' was not located in "
+                        "columns for table '%s'" % (c, table.name)
+                    )
+                else:
+                    constrained_cols.append(constrained_col)
+            table.append_constraint(
+                sa_schema.UniqueConstraint(
+                    *constrained_cols,
+                    name=conname,
+                    comment=comment,
+                    **dialect_options,
+                )
+            )
+
+    def _reflect_check_constraints(
+        self,
+        _reflect_info: _ReflectionInfo,
+        table_key: TableKey,
+        table: sa_schema.Table,
+        cols_by_orig_name: Dict[str, sa_schema.Column[Any]],
+        include_columns: Optional[Collection[str]],
+        exclude_columns: Collection[str],
+        reflection_options: Dict[str, Any],
+    ) -> None:
+        constraints = _reflect_info.check_constraints.get(table_key, [])
+        for const_d in constraints:
+            table.append_constraint(sa_schema.CheckConstraint(**const_d))
+
+    def _reflect_table_comment(
+        self,
+        _reflect_info: _ReflectionInfo,
+        table_key: TableKey,
+        table: sa_schema.Table,
+        reflection_options: Dict[str, Any],
+    ) -> None:
+        comment_dict = _reflect_info.table_comment.get(table_key)
+        if comment_dict:
+            table.comment = comment_dict["text"]
+
+    def _get_reflection_info(
+        self,
+        schema: Optional[str] = None,
+        filter_names: Optional[Collection[str]] = None,
+        available: Optional[Collection[str]] = None,
+        _reflect_info: Optional[_ReflectionInfo] = None,
+        **kw: Any,
+    ) -> _ReflectionInfo:
+        kw["schema"] = schema
+
+        if filter_names and available and len(filter_names) > 100:
+            fraction = len(filter_names) / len(available)
+        else:
+            fraction = None
+
+        unreflectable: Dict[TableKey, exc.UnreflectableTableError]
+        kw["unreflectable"] = unreflectable = {}
+
+        has_result: bool = True
+
+        def run(
+            meth: Any,
+            *,
+            optional: bool = False,
+            check_filter_names_from_meth: bool = False,
+        ) -> Any:
+            nonlocal has_result
+            # simple heuristic to improve reflection performance if a
+            # dialect implements multi_reflection:
+            # if more than 50% of the tables in the db are in filter_names
+            # load all the tables, since it's most likely faster to avoid
+            # a filter on that many tables.
+            if (
+                fraction is None
+                or fraction <= 0.5
+                or not self.dialect._overrides_default(meth.__name__)
+            ):
+                _fn = filter_names
+            else:
+                _fn = None
+            try:
+                if has_result:
+                    res = meth(filter_names=_fn, **kw)
+                    if check_filter_names_from_meth and not res:
+                        # method returned no result data.
+                        # skip any future call methods
+                        has_result = False
+                else:
+                    res = {}
+            except NotImplementedError:
+                if not optional:
+                    raise
+                res = {}
+            return res
+
+        info = _ReflectionInfo(
+            columns=run(
+                self.get_multi_columns, check_filter_names_from_meth=True
+            ),
+            pk_constraint=run(self.get_multi_pk_constraint),
+            foreign_keys=run(self.get_multi_foreign_keys),
+            indexes=run(self.get_multi_indexes),
+            unique_constraints=run(
+                self.get_multi_unique_constraints, optional=True
+            ),
+            table_comment=run(self.get_multi_table_comment, optional=True),
+            check_constraints=run(
+                self.get_multi_check_constraints, optional=True
+            ),
+            table_options=run(self.get_multi_table_options, optional=True),
+            unreflectable=unreflectable,
+        )
+        if _reflect_info:
+            _reflect_info.update(info)
+            return _reflect_info
+        else:
+            return info
+
+
+@final
+class ReflectionDefaults:
+    """provides blank default values for reflection methods."""
+
+    @classmethod
+    def columns(cls) -> List[ReflectedColumn]:
+        return []
+
+    @classmethod
+    def pk_constraint(cls) -> ReflectedPrimaryKeyConstraint:
+        return {
+            "name": None,
+            "constrained_columns": [],
+        }
+
+    @classmethod
+    def foreign_keys(cls) -> List[ReflectedForeignKeyConstraint]:
+        return []
+
+    @classmethod
+    def indexes(cls) -> List[ReflectedIndex]:
+        return []
+
+    @classmethod
+    def unique_constraints(cls) -> List[ReflectedUniqueConstraint]:
+        return []
+
+    @classmethod
+    def check_constraints(cls) -> List[ReflectedCheckConstraint]:
+        return []
+
+    @classmethod
+    def table_options(cls) -> Dict[str, Any]:
+        return {}
+
+    @classmethod
+    def table_comment(cls) -> ReflectedTableComment:
+        return {"text": None}
+
+
+@dataclass
+class _ReflectionInfo:
+    columns: Dict[TableKey, List[ReflectedColumn]]
+    pk_constraint: Dict[TableKey, Optional[ReflectedPrimaryKeyConstraint]]
+    foreign_keys: Dict[TableKey, List[ReflectedForeignKeyConstraint]]
+    indexes: Dict[TableKey, List[ReflectedIndex]]
+    # optionals
+    unique_constraints: Dict[TableKey, List[ReflectedUniqueConstraint]]
+    table_comment: Dict[TableKey, Optional[ReflectedTableComment]]
+    check_constraints: Dict[TableKey, List[ReflectedCheckConstraint]]
+    table_options: Dict[TableKey, Dict[str, Any]]
+    unreflectable: Dict[TableKey, exc.UnreflectableTableError]
+
+    def update(self, other: _ReflectionInfo) -> None:
+        for k, v in self.__dict__.items():
+            ov = getattr(other, k)
+            if ov is not None:
+                if v is None:
+                    setattr(self, k, ov)
+                else:
+                    v.update(ov)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py
new file mode 100644
index 00000000..3c81fc60
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/result.py
@@ -0,0 +1,2380 @@
+# engine/result.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define generic result set constructs."""
+
+from __future__ import annotations
+
+from enum import Enum
+import functools
+import itertools
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from .row import Row
+from .row import RowMapping
+from .. import exc
+from .. import util
+from ..sql.base import _generative
+from ..sql.base import HasMemoized
+from ..sql.base import InPlaceGenerative
+from ..util import HasMemoized_ro_memoized_attribute
+from ..util import NONE_SET
+from ..util._has_cy import HAS_CYEXTENSION
+from ..util.typing import Literal
+from ..util.typing import Self
+
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
+    from ._py_row import tuplegetter as tuplegetter
+else:
+    from sqlalchemy.cyextension.resultproxy import tuplegetter as tuplegetter
+
+if typing.TYPE_CHECKING:
+    from ..sql.elements import SQLCoreOperations
+    from ..sql.type_api import _ResultProcessorType
+
+_KeyType = Union[str, "SQLCoreOperations[Any]"]
+_KeyIndexType = Union[_KeyType, int]
+
+# is overridden in cursor using _CursorKeyMapRecType
+_KeyMapRecType = Any
+
+_KeyMapType = Mapping[_KeyType, _KeyMapRecType]
+
+
+_RowData = Union[Row[Any], RowMapping, Any]
+"""A generic form of "row" that accommodates for the different kinds of
+"rows" that different result objects return, including row, row mapping, and
+scalar values"""
+
+_RawRowType = Tuple[Any, ...]
+"""represents the kind of row we get from a DBAPI cursor"""
+
+_R = TypeVar("_R", bound=_RowData)
+_T = TypeVar("_T", bound=Any)
+_TP = TypeVar("_TP", bound=Tuple[Any, ...])
+
+_InterimRowType = Union[_R, _RawRowType]
+"""a catchall "anything" kind of return type that can be applied
+across all the result types
+
+"""
+
+_InterimSupportsScalarsRowType = Union[Row[Any], Any]
+
+_ProcessorsType = Sequence[Optional["_ResultProcessorType[Any]"]]
+_TupleGetterType = Callable[[Sequence[Any]], Sequence[Any]]
+_UniqueFilterType = Callable[[Any], Any]
+_UniqueFilterStateType = Tuple[Set[Any], Optional[_UniqueFilterType]]
+
+
+class ResultMetaData:
+    """Base for metadata about result rows."""
+
+    __slots__ = ()
+
+    _tuplefilter: Optional[_TupleGetterType] = None
+    _translated_indexes: Optional[Sequence[int]] = None
+    _unique_filters: Optional[Sequence[Callable[[Any], Any]]] = None
+    _keymap: _KeyMapType
+    _keys: Sequence[str]
+    _processors: Optional[_ProcessorsType]
+    _key_to_index: Mapping[_KeyType, int]
+
+    @property
+    def keys(self) -> RMKeyView:
+        return RMKeyView(self)
+
+    def _has_key(self, key: object) -> bool:
+        raise NotImplementedError()
+
+    def _for_freeze(self) -> ResultMetaData:
+        raise NotImplementedError()
+
+    @overload
+    def _key_fallback(
+        self, key: Any, err: Optional[Exception], raiseerr: Literal[True] = ...
+    ) -> NoReturn: ...
+
+    @overload
+    def _key_fallback(
+        self,
+        key: Any,
+        err: Optional[Exception],
+        raiseerr: Literal[False] = ...,
+    ) -> None: ...
+
+    @overload
+    def _key_fallback(
+        self, key: Any, err: Optional[Exception], raiseerr: bool = ...
+    ) -> Optional[NoReturn]: ...
+
+    def _key_fallback(
+        self, key: Any, err: Optional[Exception], raiseerr: bool = True
+    ) -> Optional[NoReturn]:
+        assert raiseerr
+        raise KeyError(key) from err
+
+    def _raise_for_ambiguous_column_name(
+        self, rec: _KeyMapRecType
+    ) -> NoReturn:
+        raise NotImplementedError(
+            "ambiguous column name logic is implemented for "
+            "CursorResultMetaData"
+        )
+
+    def _index_for_key(
+        self, key: _KeyIndexType, raiseerr: bool
+    ) -> Optional[int]:
+        raise NotImplementedError()
+
+    def _indexes_for_keys(
+        self, keys: Sequence[_KeyIndexType]
+    ) -> Sequence[int]:
+        raise NotImplementedError()
+
+    def _metadata_for_keys(
+        self, keys: Sequence[_KeyIndexType]
+    ) -> Iterator[_KeyMapRecType]:
+        raise NotImplementedError()
+
+    def _reduce(self, keys: Sequence[_KeyIndexType]) -> ResultMetaData:
+        raise NotImplementedError()
+
+    def _getter(
+        self, key: Any, raiseerr: bool = True
+    ) -> Optional[Callable[[Row[Any]], Any]]:
+        index = self._index_for_key(key, raiseerr)
+
+        if index is not None:
+            return operator.itemgetter(index)
+        else:
+            return None
+
+    def _row_as_tuple_getter(
+        self, keys: Sequence[_KeyIndexType]
+    ) -> _TupleGetterType:
+        indexes = self._indexes_for_keys(keys)
+        return tuplegetter(*indexes)
+
+    def _make_key_to_index(
+        self, keymap: Mapping[_KeyType, Sequence[Any]], index: int
+    ) -> Mapping[_KeyType, int]:
+        return {
+            key: rec[index]
+            for key, rec in keymap.items()
+            if rec[index] is not None
+        }
+
+    def _key_not_found(self, key: Any, attr_error: bool) -> NoReturn:
+        if key in self._keymap:
+            # the index must be none in this case
+            self._raise_for_ambiguous_column_name(self._keymap[key])
+        else:
+            # unknown key
+            if attr_error:
+                try:
+                    self._key_fallback(key, None)
+                except KeyError as ke:
+                    raise AttributeError(ke.args[0]) from ke
+            else:
+                self._key_fallback(key, None)
+
+    @property
+    def _effective_processors(self) -> Optional[_ProcessorsType]:
+        if not self._processors or NONE_SET.issuperset(self._processors):
+            return None
+        else:
+            return self._processors
+
+
+class RMKeyView(typing.KeysView[Any]):
+    __slots__ = ("_parent", "_keys")
+
+    _parent: ResultMetaData
+    _keys: Sequence[str]
+
+    def __init__(self, parent: ResultMetaData):
+        self._parent = parent
+        self._keys = [k for k in parent._keys if k is not None]
+
+    def __len__(self) -> int:
+        return len(self._keys)
+
+    def __repr__(self) -> str:
+        return "{0.__class__.__name__}({0._keys!r})".format(self)
+
+    def __iter__(self) -> Iterator[str]:
+        return iter(self._keys)
+
+    def __contains__(self, item: Any) -> bool:
+        if isinstance(item, int):
+            return False
+
+        # note this also includes special key fallback behaviors
+        # which also don't seem to be tested in test_resultset right now
+        return self._parent._has_key(item)
+
+    def __eq__(self, other: Any) -> bool:
+        return list(other) == list(self)
+
+    def __ne__(self, other: Any) -> bool:
+        return list(other) != list(self)
+
+
+class SimpleResultMetaData(ResultMetaData):
+    """result metadata for in-memory collections."""
+
+    __slots__ = (
+        "_keys",
+        "_keymap",
+        "_processors",
+        "_tuplefilter",
+        "_translated_indexes",
+        "_unique_filters",
+        "_key_to_index",
+    )
+
+    _keys: Sequence[str]
+
+    def __init__(
+        self,
+        keys: Sequence[str],
+        extra: Optional[Sequence[Any]] = None,
+        _processors: Optional[_ProcessorsType] = None,
+        _tuplefilter: Optional[_TupleGetterType] = None,
+        _translated_indexes: Optional[Sequence[int]] = None,
+        _unique_filters: Optional[Sequence[Callable[[Any], Any]]] = None,
+    ):
+        self._keys = list(keys)
+        self._tuplefilter = _tuplefilter
+        self._translated_indexes = _translated_indexes
+        self._unique_filters = _unique_filters
+        if extra:
+            recs_names = [
+                (
+                    (name,) + (extras if extras else ()),
+                    (index, name, extras),
+                )
+                for index, (name, extras) in enumerate(zip(self._keys, extra))
+            ]
+        else:
+            recs_names = [
+                ((name,), (index, name, ()))
+                for index, name in enumerate(self._keys)
+            ]
+
+        self._keymap = {key: rec for keys, rec in recs_names for key in keys}
+
+        self._processors = _processors
+
+        self._key_to_index = self._make_key_to_index(self._keymap, 0)
+
+    def _has_key(self, key: object) -> bool:
+        return key in self._keymap
+
+    def _for_freeze(self) -> ResultMetaData:
+        unique_filters = self._unique_filters
+        if unique_filters and self._tuplefilter:
+            unique_filters = self._tuplefilter(unique_filters)
+
+        # TODO: are we freezing the result with or without uniqueness
+        # applied?
+        return SimpleResultMetaData(
+            self._keys,
+            extra=[self._keymap[key][2] for key in self._keys],
+            _unique_filters=unique_filters,
+        )
+
+    def __getstate__(self) -> Dict[str, Any]:
+        return {
+            "_keys": self._keys,
+            "_translated_indexes": self._translated_indexes,
+        }
+
+    def __setstate__(self, state: Dict[str, Any]) -> None:
+        if state["_translated_indexes"]:
+            _translated_indexes = state["_translated_indexes"]
+            _tuplefilter = tuplegetter(*_translated_indexes)
+        else:
+            _translated_indexes = _tuplefilter = None
+        self.__init__(  # type: ignore
+            state["_keys"],
+            _translated_indexes=_translated_indexes,
+            _tuplefilter=_tuplefilter,
+        )
+
+    def _index_for_key(self, key: Any, raiseerr: bool = True) -> int:
+        if int in key.__class__.__mro__:
+            key = self._keys[key]
+        try:
+            rec = self._keymap[key]
+        except KeyError as ke:
+            rec = self._key_fallback(key, ke, raiseerr)
+
+        return rec[0]  # type: ignore[no-any-return]
+
+    def _indexes_for_keys(self, keys: Sequence[Any]) -> Sequence[int]:
+        return [self._keymap[key][0] for key in keys]
+
+    def _metadata_for_keys(
+        self, keys: Sequence[Any]
+    ) -> Iterator[_KeyMapRecType]:
+        for key in keys:
+            if int in key.__class__.__mro__:
+                key = self._keys[key]
+
+            try:
+                rec = self._keymap[key]
+            except KeyError as ke:
+                rec = self._key_fallback(key, ke, True)
+
+            yield rec
+
+    def _reduce(self, keys: Sequence[Any]) -> ResultMetaData:
+        try:
+            metadata_for_keys = [
+                self._keymap[
+                    self._keys[key] if int in key.__class__.__mro__ else key
+                ]
+                for key in keys
+            ]
+        except KeyError as ke:
+            self._key_fallback(ke.args[0], ke, True)
+
+        indexes: Sequence[int]
+        new_keys: Sequence[str]
+        extra: Sequence[Any]
+        indexes, new_keys, extra = zip(*metadata_for_keys)
+
+        if self._translated_indexes:
+            indexes = [self._translated_indexes[idx] for idx in indexes]
+
+        tup = tuplegetter(*indexes)
+
+        new_metadata = SimpleResultMetaData(
+            new_keys,
+            extra=extra,
+            _tuplefilter=tup,
+            _translated_indexes=indexes,
+            _processors=self._processors,
+            _unique_filters=self._unique_filters,
+        )
+
+        return new_metadata
+
+
+def result_tuple(
+    fields: Sequence[str], extra: Optional[Any] = None
+) -> Callable[[Iterable[Any]], Row[Any]]:
+    parent = SimpleResultMetaData(fields, extra)
+    return functools.partial(
+        Row, parent, parent._effective_processors, parent._key_to_index
+    )
+
+
+# a symbol that indicates to internal Result methods that
+# "no row is returned".  We can't use None for those cases where a scalar
+# filter is applied to rows.
+class _NoRow(Enum):
+    _NO_ROW = 0
+
+
+_NO_ROW = _NoRow._NO_ROW
+
+
+class ResultInternal(InPlaceGenerative, Generic[_R]):
+    __slots__ = ()
+
+    _real_result: Optional[Result[Any]] = None
+    _generate_rows: bool = True
+    _row_logging_fn: Optional[Callable[[Any], Any]]
+
+    _unique_filter_state: Optional[_UniqueFilterStateType] = None
+    _post_creational_filter: Optional[Callable[[Any], Any]] = None
+    _is_cursor = False
+
+    _metadata: ResultMetaData
+
+    _source_supports_scalars: bool
+
+    def _fetchiter_impl(self) -> Iterator[_InterimRowType[Row[Any]]]:
+        raise NotImplementedError()
+
+    def _fetchone_impl(
+        self, hard_close: bool = False
+    ) -> Optional[_InterimRowType[Row[Any]]]:
+        raise NotImplementedError()
+
+    def _fetchmany_impl(
+        self, size: Optional[int] = None
+    ) -> List[_InterimRowType[Row[Any]]]:
+        raise NotImplementedError()
+
+    def _fetchall_impl(self) -> List[_InterimRowType[Row[Any]]]:
+        raise NotImplementedError()
+
+    def _soft_close(self, hard: bool = False) -> None:
+        raise NotImplementedError()
+
+    @HasMemoized_ro_memoized_attribute
+    def _row_getter(self) -> Optional[Callable[..., _R]]:
+        real_result: Result[Any] = (
+            self._real_result
+            if self._real_result
+            else cast("Result[Any]", self)
+        )
+
+        if real_result._source_supports_scalars:
+            if not self._generate_rows:
+                return None
+            else:
+                _proc = Row
+
+                def process_row(
+                    metadata: ResultMetaData,
+                    processors: Optional[_ProcessorsType],
+                    key_to_index: Mapping[_KeyType, int],
+                    scalar_obj: Any,
+                ) -> Row[Any]:
+                    return _proc(
+                        metadata, processors, key_to_index, (scalar_obj,)
+                    )
+
+        else:
+            process_row = Row  # type: ignore
+
+        metadata = self._metadata
+
+        key_to_index = metadata._key_to_index
+        processors = metadata._effective_processors
+        tf = metadata._tuplefilter
+
+        if tf and not real_result._source_supports_scalars:
+            if processors:
+                processors = tf(processors)
+
+            _make_row_orig: Callable[..., _R] = functools.partial(  # type: ignore  # noqa E501
+                process_row, metadata, processors, key_to_index
+            )
+
+            fixed_tf = tf
+
+            def make_row(row: _InterimRowType[Row[Any]]) -> _R:
+                return _make_row_orig(fixed_tf(row))
+
+        else:
+            make_row = functools.partial(  # type: ignore
+                process_row, metadata, processors, key_to_index
+            )
+
+        if real_result._row_logging_fn:
+            _log_row = real_result._row_logging_fn
+            _make_row = make_row
+
+            def make_row(row: _InterimRowType[Row[Any]]) -> _R:
+                return _log_row(_make_row(row))  # type: ignore
+
+        return make_row
+
+    @HasMemoized_ro_memoized_attribute
+    def _iterator_getter(self) -> Callable[..., Iterator[_R]]:
+        make_row = self._row_getter
+
+        post_creational_filter = self._post_creational_filter
+
+        if self._unique_filter_state:
+            uniques, strategy = self._unique_strategy
+
+            def iterrows(self: Result[Any]) -> Iterator[_R]:
+                for raw_row in self._fetchiter_impl():
+                    obj: _InterimRowType[Any] = (
+                        make_row(raw_row) if make_row else raw_row
+                    )
+                    hashed = strategy(obj) if strategy else obj
+                    if hashed in uniques:
+                        continue
+                    uniques.add(hashed)
+                    if post_creational_filter:
+                        obj = post_creational_filter(obj)
+                    yield obj  # type: ignore
+
+        else:
+
+            def iterrows(self: Result[Any]) -> Iterator[_R]:
+                for raw_row in self._fetchiter_impl():
+                    row: _InterimRowType[Any] = (
+                        make_row(raw_row) if make_row else raw_row
+                    )
+                    if post_creational_filter:
+                        row = post_creational_filter(row)
+                    yield row  # type: ignore
+
+        return iterrows
+
+    def _raw_all_rows(self) -> List[_R]:
+        make_row = self._row_getter
+        assert make_row is not None
+        rows = self._fetchall_impl()
+        return [make_row(row) for row in rows]
+
+    def _allrows(self) -> List[_R]:
+        post_creational_filter = self._post_creational_filter
+
+        make_row = self._row_getter
+
+        rows = self._fetchall_impl()
+        made_rows: List[_InterimRowType[_R]]
+        if make_row:
+            made_rows = [make_row(row) for row in rows]
+        else:
+            made_rows = rows  # type: ignore
+
+        interim_rows: List[_R]
+
+        if self._unique_filter_state:
+            uniques, strategy = self._unique_strategy
+
+            interim_rows = [
+                made_row  # type: ignore
+                for made_row, sig_row in [
+                    (
+                        made_row,
+                        strategy(made_row) if strategy else made_row,
+                    )
+                    for made_row in made_rows
+                ]
+                if sig_row not in uniques and not uniques.add(sig_row)  # type: ignore # noqa: E501
+            ]
+        else:
+            interim_rows = made_rows  # type: ignore
+
+        if post_creational_filter:
+            interim_rows = [
+                post_creational_filter(row) for row in interim_rows
+            ]
+        return interim_rows
+
+    @HasMemoized_ro_memoized_attribute
+    def _onerow_getter(
+        self,
+    ) -> Callable[..., Union[Literal[_NoRow._NO_ROW], _R]]:
+        make_row = self._row_getter
+
+        post_creational_filter = self._post_creational_filter
+
+        if self._unique_filter_state:
+            uniques, strategy = self._unique_strategy
+
+            def onerow(self: Result[Any]) -> Union[_NoRow, _R]:
+                _onerow = self._fetchone_impl
+                while True:
+                    row = _onerow()
+                    if row is None:
+                        return _NO_ROW
+                    else:
+                        obj: _InterimRowType[Any] = (
+                            make_row(row) if make_row else row
+                        )
+                        hashed = strategy(obj) if strategy else obj
+                        if hashed in uniques:
+                            continue
+                        else:
+                            uniques.add(hashed)
+                        if post_creational_filter:
+                            obj = post_creational_filter(obj)
+                        return obj  # type: ignore
+
+        else:
+
+            def onerow(self: Result[Any]) -> Union[_NoRow, _R]:
+                row = self._fetchone_impl()
+                if row is None:
+                    return _NO_ROW
+                else:
+                    interim_row: _InterimRowType[Any] = (
+                        make_row(row) if make_row else row
+                    )
+                    if post_creational_filter:
+                        interim_row = post_creational_filter(interim_row)
+                    return interim_row  # type: ignore
+
+        return onerow
+
+    @HasMemoized_ro_memoized_attribute
+    def _manyrow_getter(self) -> Callable[..., List[_R]]:
+        make_row = self._row_getter
+
+        post_creational_filter = self._post_creational_filter
+
+        if self._unique_filter_state:
+            uniques, strategy = self._unique_strategy
+
+            def filterrows(
+                make_row: Optional[Callable[..., _R]],
+                rows: List[Any],
+                strategy: Optional[Callable[[List[Any]], Any]],
+                uniques: Set[Any],
+            ) -> List[_R]:
+                if make_row:
+                    rows = [make_row(row) for row in rows]
+
+                if strategy:
+                    made_rows = (
+                        (made_row, strategy(made_row)) for made_row in rows
+                    )
+                else:
+                    made_rows = ((made_row, made_row) for made_row in rows)
+                return [
+                    made_row
+                    for made_row, sig_row in made_rows
+                    if sig_row not in uniques and not uniques.add(sig_row)  # type: ignore  # noqa: E501
+                ]
+
+            def manyrows(
+                self: ResultInternal[_R], num: Optional[int]
+            ) -> List[_R]:
+                collect: List[_R] = []
+
+                _manyrows = self._fetchmany_impl
+
+                if num is None:
+                    # if None is passed, we don't know the default
+                    # manyrows number, DBAPI has this as cursor.arraysize
+                    # different DBAPIs / fetch strategies may be different.
+                    # do a fetch to find what the number is.  if there are
+                    # only fewer rows left, then it doesn't matter.
+                    real_result = (
+                        self._real_result
+                        if self._real_result
+                        else cast("Result[Any]", self)
+                    )
+                    if real_result._yield_per:
+                        num_required = num = real_result._yield_per
+                    else:
+                        rows = _manyrows(num)
+                        num = len(rows)
+                        assert make_row is not None
+                        collect.extend(
+                            filterrows(make_row, rows, strategy, uniques)
+                        )
+                        num_required = num - len(collect)
+                else:
+                    num_required = num
+
+                assert num is not None
+
+                while num_required:
+                    rows = _manyrows(num_required)
+                    if not rows:
+                        break
+
+                    collect.extend(
+                        filterrows(make_row, rows, strategy, uniques)
+                    )
+                    num_required = num - len(collect)
+
+                if post_creational_filter:
+                    collect = [post_creational_filter(row) for row in collect]
+                return collect
+
+        else:
+
+            def manyrows(
+                self: ResultInternal[_R], num: Optional[int]
+            ) -> List[_R]:
+                if num is None:
+                    real_result = (
+                        self._real_result
+                        if self._real_result
+                        else cast("Result[Any]", self)
+                    )
+                    num = real_result._yield_per
+
+                rows: List[_InterimRowType[Any]] = self._fetchmany_impl(num)
+                if make_row:
+                    rows = [make_row(row) for row in rows]
+                if post_creational_filter:
+                    rows = [post_creational_filter(row) for row in rows]
+                return rows  # type: ignore
+
+        return manyrows
+
+    @overload
+    def _only_one_row(
+        self,
+        raise_for_second_row: bool,
+        raise_for_none: Literal[True],
+        scalar: bool,
+    ) -> _R: ...
+
+    @overload
+    def _only_one_row(
+        self,
+        raise_for_second_row: bool,
+        raise_for_none: bool,
+        scalar: bool,
+    ) -> Optional[_R]: ...
+
+    def _only_one_row(
+        self,
+        raise_for_second_row: bool,
+        raise_for_none: bool,
+        scalar: bool,
+    ) -> Optional[_R]:
+        onerow = self._fetchone_impl
+
+        row: Optional[_InterimRowType[Any]] = onerow(hard_close=True)
+        if row is None:
+            if raise_for_none:
+                raise exc.NoResultFound(
+                    "No row was found when one was required"
+                )
+            else:
+                return None
+
+        if scalar and self._source_supports_scalars:
+            self._generate_rows = False
+            make_row = None
+        else:
+            make_row = self._row_getter
+
+        try:
+            row = make_row(row) if make_row else row
+        except:
+            self._soft_close(hard=True)
+            raise
+
+        if raise_for_second_row:
+            if self._unique_filter_state:
+                # for no second row but uniqueness, need to essentially
+                # consume the entire result :(
+                uniques, strategy = self._unique_strategy
+
+                existing_row_hash = strategy(row) if strategy else row
+
+                while True:
+                    next_row: Any = onerow(hard_close=True)
+                    if next_row is None:
+                        next_row = _NO_ROW
+                        break
+
+                    try:
+                        next_row = make_row(next_row) if make_row else next_row
+
+                        if strategy:
+                            assert next_row is not _NO_ROW
+                            if existing_row_hash == strategy(next_row):
+                                continue
+                        elif row == next_row:
+                            continue
+                        # here, we have a row and it's different
+                        break
+                    except:
+                        self._soft_close(hard=True)
+                        raise
+            else:
+                next_row = onerow(hard_close=True)
+                if next_row is None:
+                    next_row = _NO_ROW
+
+            if next_row is not _NO_ROW:
+                self._soft_close(hard=True)
+                raise exc.MultipleResultsFound(
+                    "Multiple rows were found when exactly one was required"
+                    if raise_for_none
+                    else "Multiple rows were found when one or none "
+                    "was required"
+                )
+        else:
+            next_row = _NO_ROW
+            # if we checked for second row then that would have
+            # closed us :)
+            self._soft_close(hard=True)
+
+        if not scalar:
+            post_creational_filter = self._post_creational_filter
+            if post_creational_filter:
+                row = post_creational_filter(row)
+
+        if scalar and make_row:
+            return row[0]  # type: ignore
+        else:
+            return row  # type: ignore
+
+    def _iter_impl(self) -> Iterator[_R]:
+        return self._iterator_getter(self)
+
+    def _next_impl(self) -> _R:
+        row = self._onerow_getter(self)
+        if row is _NO_ROW:
+            raise StopIteration()
+        else:
+            return row
+
+    @_generative
+    def _column_slices(self, indexes: Sequence[_KeyIndexType]) -> Self:
+        real_result = (
+            self._real_result
+            if self._real_result
+            else cast("Result[Any]", self)
+        )
+
+        if not real_result._source_supports_scalars or len(indexes) != 1:
+            self._metadata = self._metadata._reduce(indexes)
+
+        assert self._generate_rows
+
+        return self
+
+    @HasMemoized.memoized_attribute
+    def _unique_strategy(self) -> _UniqueFilterStateType:
+        assert self._unique_filter_state is not None
+        uniques, strategy = self._unique_filter_state
+
+        real_result = (
+            self._real_result
+            if self._real_result is not None
+            else cast("Result[Any]", self)
+        )
+
+        if not strategy and self._metadata._unique_filters:
+            if (
+                real_result._source_supports_scalars
+                and not self._generate_rows
+            ):
+                strategy = self._metadata._unique_filters[0]
+            else:
+                filters = self._metadata._unique_filters
+                if self._metadata._tuplefilter:
+                    filters = self._metadata._tuplefilter(filters)
+
+                strategy = operator.methodcaller("_filter_on_values", filters)
+        return uniques, strategy
+
+
+class _WithKeys:
+    __slots__ = ()
+
+    _metadata: ResultMetaData
+
+    # used mainly to share documentation on the keys method.
+    def keys(self) -> RMKeyView:
+        """Return an iterable view which yields the string keys that would
+        be represented by each :class:`_engine.Row`.
+
+        The keys can represent the labels of the columns returned by a core
+        statement or the names of the orm classes returned by an orm
+        execution.
+
+        The view also can be tested for key containment using the Python
+        ``in`` operator, which will test both for the string keys represented
+        in the view, as well as for alternate keys such as column objects.
+
+        .. versionchanged:: 1.4 a key view object is returned rather than a
+           plain list.
+
+
+        """
+        return self._metadata.keys
+
+
+class Result(_WithKeys, ResultInternal[Row[_TP]]):
+    """Represent a set of database results.
+
+    .. versionadded:: 1.4  The :class:`_engine.Result` object provides a
+       completely updated usage model and calling facade for SQLAlchemy
+       Core and SQLAlchemy ORM.   In Core, it forms the basis of the
+       :class:`_engine.CursorResult` object which replaces the previous
+       :class:`_engine.ResultProxy` interface.   When using the ORM, a
+       higher level object called :class:`_engine.ChunkedIteratorResult`
+       is normally used.
+
+    .. note:: In SQLAlchemy 1.4 and above, this object is
+       used for ORM results returned by :meth:`_orm.Session.execute`, which can
+       yield instances of ORM mapped objects either individually or within
+       tuple-like rows. Note that the :class:`_engine.Result` object does not
+       deduplicate instances or rows automatically as is the case with the
+       legacy :class:`_orm.Query` object. For in-Python de-duplication of
+       instances or rows, use the :meth:`_engine.Result.unique` modifier
+       method.
+
+    .. seealso::
+
+        :ref:`tutorial_fetching_rows` - in the :doc:`/tutorial/index`
+
+    """
+
+    __slots__ = ("_metadata", "__dict__")
+
+    _row_logging_fn: Optional[Callable[[Row[Any]], Row[Any]]] = None
+
+    _source_supports_scalars: bool = False
+
+    _yield_per: Optional[int] = None
+
+    _attributes: util.immutabledict[Any, Any] = util.immutabledict()
+
+    def __init__(self, cursor_metadata: ResultMetaData):
+        self._metadata = cursor_metadata
+
+    def __enter__(self) -> Self:
+        return self
+
+    def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
+        self.close()
+
+    def close(self) -> None:
+        """close this :class:`_engine.Result`.
+
+        The behavior of this method is implementation specific, and is
+        not implemented by default.    The method should generally end
+        the resources in use by the result object and also cause any
+        subsequent iteration or row fetching to raise
+        :class:`.ResourceClosedError`.
+
+        .. versionadded:: 1.4.27 - ``.close()`` was previously not generally
+           available for all :class:`_engine.Result` classes, instead only
+           being available on the :class:`_engine.CursorResult` returned for
+           Core statement executions. As most other result objects, namely the
+           ones used by the ORM, are proxying a :class:`_engine.CursorResult`
+           in any case, this allows the underlying cursor result to be closed
+           from the outside facade for the case when the ORM query is using
+           the ``yield_per`` execution option where it does not immediately
+           exhaust and autoclose the database cursor.
+
+        """
+        self._soft_close(hard=True)
+
+    @property
+    def _soft_closed(self) -> bool:
+        raise NotImplementedError()
+
+    @property
+    def closed(self) -> bool:
+        """return ``True`` if this :class:`_engine.Result` reports .closed
+
+        .. versionadded:: 1.4.43
+
+        """
+        raise NotImplementedError()
+
+    @_generative
+    def yield_per(self, num: int) -> Self:
+        """Configure the row-fetching strategy to fetch ``num`` rows at a time.
+
+        This impacts the underlying behavior of the result when iterating over
+        the result object, or otherwise making use of  methods such as
+        :meth:`_engine.Result.fetchone` that return one row at a time.   Data
+        from the underlying cursor or other data source will be buffered up to
+        this many rows in memory, and the buffered collection will then be
+        yielded out one row at a time or as many rows are requested. Each time
+        the buffer clears, it will be refreshed to this many rows or as many
+        rows remain if fewer remain.
+
+        The :meth:`_engine.Result.yield_per` method is generally used in
+        conjunction with the
+        :paramref:`_engine.Connection.execution_options.stream_results`
+        execution option, which will allow the database dialect in use to make
+        use of a server side cursor, if the DBAPI supports a specific "server
+        side cursor" mode separate from its default mode of operation.
+
+        .. tip::
+
+            Consider using the
+            :paramref:`_engine.Connection.execution_options.yield_per`
+            execution option, which will simultaneously set
+            :paramref:`_engine.Connection.execution_options.stream_results`
+            to ensure the use of server side cursors, as well as automatically
+            invoke the :meth:`_engine.Result.yield_per` method to establish
+            a fixed row buffer size at once.
+
+            The :paramref:`_engine.Connection.execution_options.yield_per`
+            execution option is available for ORM operations, with
+            :class:`_orm.Session`-oriented use described at
+            :ref:`orm_queryguide_yield_per`. The Core-only version which works
+            with :class:`_engine.Connection` is new as of SQLAlchemy 1.4.40.
+
+        .. versionadded:: 1.4
+
+        :param num: number of rows to fetch each time the buffer is refilled.
+         If set to a value below 1, fetches all rows for the next buffer.
+
+        .. seealso::
+
+            :ref:`engine_stream_results` - describes Core behavior for
+            :meth:`_engine.Result.yield_per`
+
+            :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+
+        """
+        self._yield_per = num
+        return self
+
+    @_generative
+    def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self:
+        """Apply unique filtering to the objects returned by this
+        :class:`_engine.Result`.
+
+        When this filter is applied with no arguments, the rows or objects
+        returned will filtered such that each row is returned uniquely. The
+        algorithm used to determine this uniqueness is by default the Python
+        hashing identity of the whole tuple.   In some cases a specialized
+        per-entity hashing scheme may be used, such as when using the ORM, a
+        scheme is applied which  works against the primary key identity of
+        returned objects.
+
+        The unique filter is applied **after all other filters**, which means
+        if the columns returned have been refined using a method such as the
+        :meth:`_engine.Result.columns` or :meth:`_engine.Result.scalars`
+        method, the uniquing is applied to **only the column or columns
+        returned**.   This occurs regardless of the order in which these
+        methods have been called upon the :class:`_engine.Result` object.
+
+        The unique filter also changes the calculus used for methods like
+        :meth:`_engine.Result.fetchmany` and :meth:`_engine.Result.partitions`.
+        When using :meth:`_engine.Result.unique`, these methods will continue
+        to yield the number of rows or objects requested, after uniquing
+        has been applied.  However, this necessarily impacts the buffering
+        behavior of the underlying cursor or datasource, such that multiple
+        underlying calls to ``cursor.fetchmany()`` may be necessary in order
+        to accumulate enough objects in order to provide a unique collection
+        of the requested size.
+
+        :param strategy: a callable that will be applied to rows or objects
+         being iterated, which should return an object that represents the
+         unique value of the row.   A Python ``set()`` is used to store
+         these identities.   If not passed, a default uniqueness strategy
+         is used which may have been assembled by the source of this
+         :class:`_engine.Result` object.
+
+        """
+        self._unique_filter_state = (set(), strategy)
+        return self
+
+    def columns(self, *col_expressions: _KeyIndexType) -> Self:
+        r"""Establish the columns that should be returned in each row.
+
+        This method may be used to limit the columns returned as well
+        as to reorder them.   The given list of expressions are normally
+        a series of integers or string key names.   They may also be
+        appropriate :class:`.ColumnElement` objects which correspond to
+        a given statement construct.
+
+        .. versionchanged:: 2.0  Due to a bug in 1.4, the
+           :meth:`_engine.Result.columns` method had an incorrect behavior
+           where calling upon the method with just one index would cause the
+           :class:`_engine.Result` object to yield scalar values rather than
+           :class:`_engine.Row` objects.   In version 2.0, this behavior
+           has been corrected such that calling upon
+           :meth:`_engine.Result.columns` with a single index will
+           produce a :class:`_engine.Result` object that continues
+           to yield :class:`_engine.Row` objects, which include
+           only a single column.
+
+        E.g.::
+
+            statement = select(table.c.x, table.c.y, table.c.z)
+            result = connection.execute(statement)
+
+            for z, y in result.columns("z", "y"):
+                ...
+
+        Example of using the column objects from the statement itself::
+
+            for z, y in result.columns(
+                statement.selected_columns.c.z, statement.selected_columns.c.y
+            ):
+                ...
+
+        .. versionadded:: 1.4
+
+        :param \*col_expressions: indicates columns to be returned.  Elements
+         may be integer row indexes, string column names, or appropriate
+         :class:`.ColumnElement` objects corresponding to a select construct.
+
+        :return: this :class:`_engine.Result` object with the modifications
+         given.
+
+        """
+        return self._column_slices(col_expressions)
+
+    @overload
+    def scalars(self: Result[Tuple[_T]]) -> ScalarResult[_T]: ...
+
+    @overload
+    def scalars(
+        self: Result[Tuple[_T]], index: Literal[0]
+    ) -> ScalarResult[_T]: ...
+
+    @overload
+    def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]: ...
+
+    def scalars(self, index: _KeyIndexType = 0) -> ScalarResult[Any]:
+        """Return a :class:`_engine.ScalarResult` filtering object which
+        will return single elements rather than :class:`_row.Row` objects.
+
+        E.g.::
+
+            >>> result = conn.execute(text("select int_id from table"))
+            >>> result.scalars().all()
+            [1, 2, 3]
+
+        When results are fetched from the :class:`_engine.ScalarResult`
+        filtering object, the single column-row that would be returned by the
+        :class:`_engine.Result` is instead returned as the column's value.
+
+        .. versionadded:: 1.4
+
+        :param index: integer or row key indicating the column to be fetched
+         from each row, defaults to ``0`` indicating the first column.
+
+        :return: a new :class:`_engine.ScalarResult` filtering object referring
+         to this :class:`_engine.Result` object.
+
+        """
+        return ScalarResult(self, index)
+
+    def _getter(
+        self, key: _KeyIndexType, raiseerr: bool = True
+    ) -> Optional[Callable[[Row[Any]], Any]]:
+        """return a callable that will retrieve the given key from a
+        :class:`_engine.Row`.
+
+        """
+        if self._source_supports_scalars:
+            raise NotImplementedError(
+                "can't use this function in 'only scalars' mode"
+            )
+        return self._metadata._getter(key, raiseerr)
+
+    def _tuple_getter(self, keys: Sequence[_KeyIndexType]) -> _TupleGetterType:
+        """return a callable that will retrieve the given keys from a
+        :class:`_engine.Row`.
+
+        """
+        if self._source_supports_scalars:
+            raise NotImplementedError(
+                "can't use this function in 'only scalars' mode"
+            )
+        return self._metadata._row_as_tuple_getter(keys)
+
+    def mappings(self) -> MappingResult:
+        """Apply a mappings filter to returned rows, returning an instance of
+        :class:`_engine.MappingResult`.
+
+        When this filter is applied, fetching rows will return
+        :class:`_engine.RowMapping` objects instead of :class:`_engine.Row`
+        objects.
+
+        .. versionadded:: 1.4
+
+        :return: a new :class:`_engine.MappingResult` filtering object
+         referring to this :class:`_engine.Result` object.
+
+        """
+
+        return MappingResult(self)
+
+    @property
+    def t(self) -> TupleResult[_TP]:
+        """Apply a "typed tuple" typing filter to returned rows.
+
+        The :attr:`_engine.Result.t` attribute is a synonym for
+        calling the :meth:`_engine.Result.tuples` method.
+
+        .. versionadded:: 2.0
+
+        """
+        return self  # type: ignore
+
+    def tuples(self) -> TupleResult[_TP]:
+        """Apply a "typed tuple" typing filter to returned rows.
+
+        This method returns the same :class:`_engine.Result` object
+        at runtime,
+        however annotates as returning a :class:`_engine.TupleResult` object
+        that will indicate to :pep:`484` typing tools that plain typed
+        ``Tuple`` instances are returned rather than rows.  This allows
+        tuple unpacking and ``__getitem__`` access of :class:`_engine.Row`
+        objects to by typed, for those cases where the statement invoked
+        itself included typing information.
+
+        .. versionadded:: 2.0
+
+        :return: the :class:`_engine.TupleResult` type at typing time.
+
+        .. seealso::
+
+            :attr:`_engine.Result.t` - shorter synonym
+
+            :attr:`_engine.Row._t` - :class:`_engine.Row` version
+
+        """
+
+        return self  # type: ignore
+
+    def _raw_row_iterator(self) -> Iterator[_RowData]:
+        """Return a safe iterator that yields raw row data.
+
+        This is used by the :meth:`_engine.Result.merge` method
+        to merge multiple compatible results together.
+
+        """
+        raise NotImplementedError()
+
+    def __iter__(self) -> Iterator[Row[_TP]]:
+        return self._iter_impl()
+
+    def __next__(self) -> Row[_TP]:
+        return self._next_impl()
+
+    def partitions(
+        self, size: Optional[int] = None
+    ) -> Iterator[Sequence[Row[_TP]]]:
+        """Iterate through sub-lists of rows of the size given.
+
+        Each list will be of the size given, excluding the last list to
+        be yielded, which may have a small number of rows.  No empty
+        lists will be yielded.
+
+        The result object is automatically closed when the iterator
+        is fully consumed.
+
+        Note that the backend driver will usually buffer the entire result
+        ahead of time unless the
+        :paramref:`.Connection.execution_options.stream_results` execution
+        option is used indicating that the driver should not pre-buffer
+        results, if possible.   Not all drivers support this option and
+        the option is silently ignored for those who do not.
+
+        When using the ORM, the :meth:`_engine.Result.partitions` method
+        is typically more effective from a memory perspective when it is
+        combined with use of the
+        :ref:`yield_per execution option <orm_queryguide_yield_per>`,
+        which instructs both the DBAPI driver to use server side cursors,
+        if available, as well as instructs the ORM loading internals to only
+        build a certain amount of ORM objects from a result at a time before
+        yielding them out.
+
+        .. versionadded:: 1.4
+
+        :param size: indicate the maximum number of rows to be present
+         in each list yielded.  If None, makes use of the value set by
+         the :meth:`_engine.Result.yield_per`, method, if it were called,
+         or the :paramref:`_engine.Connection.execution_options.yield_per`
+         execution option, which is equivalent in this regard.  If
+         yield_per weren't set, it makes use of the
+         :meth:`_engine.Result.fetchmany` default, which may be backend
+         specific and not well defined.
+
+        :return: iterator of lists
+
+        .. seealso::
+
+            :ref:`engine_stream_results`
+
+            :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+
+        """
+
+        getter = self._manyrow_getter
+
+        while True:
+            partition = getter(self, size)
+            if partition:
+                yield partition
+            else:
+                break
+
+    def fetchall(self) -> Sequence[Row[_TP]]:
+        """A synonym for the :meth:`_engine.Result.all` method."""
+
+        return self._allrows()
+
+    def fetchone(self) -> Optional[Row[_TP]]:
+        """Fetch one row.
+
+        When all rows are exhausted, returns None.
+
+        This method is provided for backwards compatibility with
+        SQLAlchemy 1.x.x.
+
+        To fetch the first row of a result only, use the
+        :meth:`_engine.Result.first` method.  To iterate through all
+        rows, iterate the :class:`_engine.Result` object directly.
+
+        :return: a :class:`_engine.Row` object if no filters are applied,
+         or ``None`` if no rows remain.
+
+        """
+        row = self._onerow_getter(self)
+        if row is _NO_ROW:
+            return None
+        else:
+            return row
+
+    def fetchmany(self, size: Optional[int] = None) -> Sequence[Row[_TP]]:
+        """Fetch many rows.
+
+        When all rows are exhausted, returns an empty sequence.
+
+        This method is provided for backwards compatibility with
+        SQLAlchemy 1.x.x.
+
+        To fetch rows in groups, use the :meth:`_engine.Result.partitions`
+        method.
+
+        :return: a sequence of :class:`_engine.Row` objects.
+
+        .. seealso::
+
+            :meth:`_engine.Result.partitions`
+
+        """
+
+        return self._manyrow_getter(self, size)
+
+    def all(self) -> Sequence[Row[_TP]]:
+        """Return all rows in a sequence.
+
+        Closes the result set after invocation.   Subsequent invocations
+        will return an empty sequence.
+
+        .. versionadded:: 1.4
+
+        :return: a sequence of :class:`_engine.Row` objects.
+
+        .. seealso::
+
+            :ref:`engine_stream_results` - How to stream a large result set
+            without loading it completely in python.
+
+        """
+
+        return self._allrows()
+
+    def first(self) -> Optional[Row[_TP]]:
+        """Fetch the first row or ``None`` if no row is present.
+
+        Closes the result set and discards remaining rows.
+
+        .. note::  This method returns one **row**, e.g. tuple, by default.
+           To return exactly one single scalar value, that is, the first
+           column of the first row, use the
+           :meth:`_engine.Result.scalar` method,
+           or combine :meth:`_engine.Result.scalars` and
+           :meth:`_engine.Result.first`.
+
+           Additionally, in contrast to the behavior of the legacy  ORM
+           :meth:`_orm.Query.first` method, **no limit is applied** to the
+           SQL query which was invoked to produce this
+           :class:`_engine.Result`;
+           for a DBAPI driver that buffers results in memory before yielding
+           rows, all rows will be sent to the Python process and all but
+           the first row will be discarded.
+
+           .. seealso::
+
+                :ref:`migration_20_unify_select`
+
+        :return: a :class:`_engine.Row` object, or None
+         if no rows remain.
+
+        .. seealso::
+
+            :meth:`_engine.Result.scalar`
+
+            :meth:`_engine.Result.one`
+
+        """
+
+        return self._only_one_row(
+            raise_for_second_row=False, raise_for_none=False, scalar=False
+        )
+
+    def one_or_none(self) -> Optional[Row[_TP]]:
+        """Return at most one result or raise an exception.
+
+        Returns ``None`` if the result has no rows.
+        Raises :class:`.MultipleResultsFound`
+        if multiple rows are returned.
+
+        .. versionadded:: 1.4
+
+        :return: The first :class:`_engine.Row` or ``None`` if no row
+         is available.
+
+        :raises: :class:`.MultipleResultsFound`
+
+        .. seealso::
+
+            :meth:`_engine.Result.first`
+
+            :meth:`_engine.Result.one`
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=False, scalar=False
+        )
+
+    @overload
+    def scalar_one(self: Result[Tuple[_T]]) -> _T: ...
+
+    @overload
+    def scalar_one(self) -> Any: ...
+
+    def scalar_one(self) -> Any:
+        """Return exactly one scalar result or raise an exception.
+
+        This is equivalent to calling :meth:`_engine.Result.scalars` and
+        then :meth:`_engine.ScalarResult.one`.
+
+        .. seealso::
+
+            :meth:`_engine.ScalarResult.one`
+
+            :meth:`_engine.Result.scalars`
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=True, scalar=True
+        )
+
+    @overload
+    def scalar_one_or_none(self: Result[Tuple[_T]]) -> Optional[_T]: ...
+
+    @overload
+    def scalar_one_or_none(self) -> Optional[Any]: ...
+
+    def scalar_one_or_none(self) -> Optional[Any]:
+        """Return exactly one scalar result or ``None``.
+
+        This is equivalent to calling :meth:`_engine.Result.scalars` and
+        then :meth:`_engine.ScalarResult.one_or_none`.
+
+        .. seealso::
+
+            :meth:`_engine.ScalarResult.one_or_none`
+
+            :meth:`_engine.Result.scalars`
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=False, scalar=True
+        )
+
+    def one(self) -> Row[_TP]:
+        """Return exactly one row or raise an exception.
+
+        Raises :class:`.NoResultFound` if the result returns no
+        rows, or :class:`.MultipleResultsFound` if multiple rows
+        would be returned.
+
+        .. note::  This method returns one **row**, e.g. tuple, by default.
+           To return exactly one single scalar value, that is, the first
+           column of the first row, use the
+           :meth:`_engine.Result.scalar_one` method, or combine
+           :meth:`_engine.Result.scalars` and
+           :meth:`_engine.Result.one`.
+
+        .. versionadded:: 1.4
+
+        :return: The first :class:`_engine.Row`.
+
+        :raises: :class:`.MultipleResultsFound`, :class:`.NoResultFound`
+
+        .. seealso::
+
+            :meth:`_engine.Result.first`
+
+            :meth:`_engine.Result.one_or_none`
+
+            :meth:`_engine.Result.scalar_one`
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=True, scalar=False
+        )
+
+    @overload
+    def scalar(self: Result[Tuple[_T]]) -> Optional[_T]: ...
+
+    @overload
+    def scalar(self) -> Any: ...
+
+    def scalar(self) -> Any:
+        """Fetch the first column of the first row, and close the result set.
+
+        Returns ``None`` if there are no rows to fetch.
+
+        No validation is performed to test if additional rows remain.
+
+        After calling this method, the object is fully closed,
+        e.g. the :meth:`_engine.CursorResult.close`
+        method will have been called.
+
+        :return: a Python scalar value, or ``None`` if no rows remain.
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=False, raise_for_none=False, scalar=True
+        )
+
+    def freeze(self) -> FrozenResult[_TP]:
+        """Return a callable object that will produce copies of this
+        :class:`_engine.Result` when invoked.
+
+        The callable object returned is an instance of
+        :class:`_engine.FrozenResult`.
+
+        This is used for result set caching.  The method must be called
+        on the result when it has been unconsumed, and calling the method
+        will consume the result fully.   When the :class:`_engine.FrozenResult`
+        is retrieved from a cache, it can be called any number of times where
+        it will produce a new :class:`_engine.Result` object each time
+        against its stored set of rows.
+
+        .. seealso::
+
+            :ref:`do_orm_execute_re_executing` - example usage within the
+            ORM to implement a result-set cache.
+
+        """
+
+        return FrozenResult(self)
+
+    def merge(self, *others: Result[Any]) -> MergedResult[_TP]:
+        """Merge this :class:`_engine.Result` with other compatible result
+        objects.
+
+        The object returned is an instance of :class:`_engine.MergedResult`,
+        which will be composed of iterators from the given result
+        objects.
+
+        The new result will use the metadata from this result object.
+        The subsequent result objects must be against an identical
+        set of result / cursor metadata, otherwise the behavior is
+        undefined.
+
+        """
+        return MergedResult(self._metadata, (self,) + others)
+
+
+class FilterResult(ResultInternal[_R]):
+    """A wrapper for a :class:`_engine.Result` that returns objects other than
+    :class:`_engine.Row` objects, such as dictionaries or scalar objects.
+
+    :class:`_engine.FilterResult` is the common base for additional result
+    APIs including :class:`_engine.MappingResult`,
+    :class:`_engine.ScalarResult` and :class:`_engine.AsyncResult`.
+
+    """
+
+    __slots__ = (
+        "_real_result",
+        "_post_creational_filter",
+        "_metadata",
+        "_unique_filter_state",
+        "__dict__",
+    )
+
+    _post_creational_filter: Optional[Callable[[Any], Any]]
+
+    _real_result: Result[Any]
+
+    def __enter__(self) -> Self:
+        return self
+
+    def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
+        self._real_result.__exit__(type_, value, traceback)
+
+    @_generative
+    def yield_per(self, num: int) -> Self:
+        """Configure the row-fetching strategy to fetch ``num`` rows at a time.
+
+        The :meth:`_engine.FilterResult.yield_per` method is a pass through
+        to the :meth:`_engine.Result.yield_per` method.  See that method's
+        documentation for usage notes.
+
+        .. versionadded:: 1.4.40 - added :meth:`_engine.FilterResult.yield_per`
+           so that the method is available on all result set implementations
+
+        .. seealso::
+
+            :ref:`engine_stream_results` - describes Core behavior for
+            :meth:`_engine.Result.yield_per`
+
+            :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+
+        """
+        self._real_result = self._real_result.yield_per(num)
+        return self
+
+    def _soft_close(self, hard: bool = False) -> None:
+        self._real_result._soft_close(hard=hard)
+
+    @property
+    def _soft_closed(self) -> bool:
+        return self._real_result._soft_closed
+
+    @property
+    def closed(self) -> bool:
+        """Return ``True`` if the underlying :class:`_engine.Result` reports
+        closed
+
+        .. versionadded:: 1.4.43
+
+        """
+        return self._real_result.closed
+
+    def close(self) -> None:
+        """Close this :class:`_engine.FilterResult`.
+
+        .. versionadded:: 1.4.43
+
+        """
+        self._real_result.close()
+
+    @property
+    def _attributes(self) -> Dict[Any, Any]:
+        return self._real_result._attributes
+
+    def _fetchiter_impl(self) -> Iterator[_InterimRowType[Row[Any]]]:
+        return self._real_result._fetchiter_impl()
+
+    def _fetchone_impl(
+        self, hard_close: bool = False
+    ) -> Optional[_InterimRowType[Row[Any]]]:
+        return self._real_result._fetchone_impl(hard_close=hard_close)
+
+    def _fetchall_impl(self) -> List[_InterimRowType[Row[Any]]]:
+        return self._real_result._fetchall_impl()
+
+    def _fetchmany_impl(
+        self, size: Optional[int] = None
+    ) -> List[_InterimRowType[Row[Any]]]:
+        return self._real_result._fetchmany_impl(size=size)
+
+
+class ScalarResult(FilterResult[_R]):
+    """A wrapper for a :class:`_engine.Result` that returns scalar values
+    rather than :class:`_row.Row` values.
+
+    The :class:`_engine.ScalarResult` object is acquired by calling the
+    :meth:`_engine.Result.scalars` method.
+
+    A special limitation of :class:`_engine.ScalarResult` is that it has
+    no ``fetchone()`` method; since the semantics of ``fetchone()`` are that
+    the ``None`` value indicates no more results, this is not compatible
+    with :class:`_engine.ScalarResult` since there is no way to distinguish
+    between ``None`` as a row value versus ``None`` as an indicator.  Use
+    ``next(result)`` to receive values individually.
+
+    """
+
+    __slots__ = ()
+
+    _generate_rows = False
+
+    _post_creational_filter: Optional[Callable[[Any], Any]]
+
+    def __init__(self, real_result: Result[Any], index: _KeyIndexType):
+        self._real_result = real_result
+
+        if real_result._source_supports_scalars:
+            self._metadata = real_result._metadata
+            self._post_creational_filter = None
+        else:
+            self._metadata = real_result._metadata._reduce([index])
+            self._post_creational_filter = operator.itemgetter(0)
+
+        self._unique_filter_state = real_result._unique_filter_state
+
+    def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self:
+        """Apply unique filtering to the objects returned by this
+        :class:`_engine.ScalarResult`.
+
+        See :meth:`_engine.Result.unique` for usage details.
+
+        """
+        self._unique_filter_state = (set(), strategy)
+        return self
+
+    def partitions(self, size: Optional[int] = None) -> Iterator[Sequence[_R]]:
+        """Iterate through sub-lists of elements of the size given.
+
+        Equivalent to :meth:`_engine.Result.partitions` except that
+        scalar values, rather than :class:`_engine.Row` objects,
+        are returned.
+
+        """
+
+        getter = self._manyrow_getter
+
+        while True:
+            partition = getter(self, size)
+            if partition:
+                yield partition
+            else:
+                break
+
+    def fetchall(self) -> Sequence[_R]:
+        """A synonym for the :meth:`_engine.ScalarResult.all` method."""
+
+        return self._allrows()
+
+    def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]:
+        """Fetch many objects.
+
+        Equivalent to :meth:`_engine.Result.fetchmany` except that
+        scalar values, rather than :class:`_engine.Row` objects,
+        are returned.
+
+        """
+        return self._manyrow_getter(self, size)
+
+    def all(self) -> Sequence[_R]:
+        """Return all scalar values in a sequence.
+
+        Equivalent to :meth:`_engine.Result.all` except that
+        scalar values, rather than :class:`_engine.Row` objects,
+        are returned.
+
+        """
+        return self._allrows()
+
+    def __iter__(self) -> Iterator[_R]:
+        return self._iter_impl()
+
+    def __next__(self) -> _R:
+        return self._next_impl()
+
+    def first(self) -> Optional[_R]:
+        """Fetch the first object or ``None`` if no object is present.
+
+        Equivalent to :meth:`_engine.Result.first` except that
+        scalar values, rather than :class:`_engine.Row` objects,
+        are returned.
+
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=False, raise_for_none=False, scalar=False
+        )
+
+    def one_or_none(self) -> Optional[_R]:
+        """Return at most one object or raise an exception.
+
+        Equivalent to :meth:`_engine.Result.one_or_none` except that
+        scalar values, rather than :class:`_engine.Row` objects,
+        are returned.
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=False, scalar=False
+        )
+
+    def one(self) -> _R:
+        """Return exactly one object or raise an exception.
+
+        Equivalent to :meth:`_engine.Result.one` except that
+        scalar values, rather than :class:`_engine.Row` objects,
+        are returned.
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=True, scalar=False
+        )
+
+
+class TupleResult(FilterResult[_R], util.TypingOnly):
+    """A :class:`_engine.Result` that's typed as returning plain
+    Python tuples instead of rows.
+
+    Since :class:`_engine.Row` acts like a tuple in every way already,
+    this class is a typing only class, regular :class:`_engine.Result` is
+    still used at runtime.
+
+    """
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        def partitions(
+            self, size: Optional[int] = None
+        ) -> Iterator[Sequence[_R]]:
+            """Iterate through sub-lists of elements of the size given.
+
+            Equivalent to :meth:`_engine.Result.partitions` except that
+            tuple values, rather than :class:`_engine.Row` objects,
+            are returned.
+
+            """
+            ...
+
+        def fetchone(self) -> Optional[_R]:
+            """Fetch one tuple.
+
+            Equivalent to :meth:`_engine.Result.fetchone` except that
+            tuple values, rather than :class:`_engine.Row`
+            objects, are returned.
+
+            """
+            ...
+
+        def fetchall(self) -> Sequence[_R]:
+            """A synonym for the :meth:`_engine.ScalarResult.all` method."""
+            ...
+
+        def fetchmany(self, size: Optional[int] = None) -> Sequence[_R]:
+            """Fetch many objects.
+
+            Equivalent to :meth:`_engine.Result.fetchmany` except that
+            tuple values, rather than :class:`_engine.Row` objects,
+            are returned.
+
+            """
+            ...
+
+        def all(self) -> Sequence[_R]:  # noqa: A001
+            """Return all scalar values in a sequence.
+
+            Equivalent to :meth:`_engine.Result.all` except that
+            tuple values, rather than :class:`_engine.Row` objects,
+            are returned.
+
+            """
+            ...
+
+        def __iter__(self) -> Iterator[_R]: ...
+
+        def __next__(self) -> _R: ...
+
+        def first(self) -> Optional[_R]:
+            """Fetch the first object or ``None`` if no object is present.
+
+            Equivalent to :meth:`_engine.Result.first` except that
+            tuple values, rather than :class:`_engine.Row` objects,
+            are returned.
+
+
+            """
+            ...
+
+        def one_or_none(self) -> Optional[_R]:
+            """Return at most one object or raise an exception.
+
+            Equivalent to :meth:`_engine.Result.one_or_none` except that
+            tuple values, rather than :class:`_engine.Row` objects,
+            are returned.
+
+            """
+            ...
+
+        def one(self) -> _R:
+            """Return exactly one object or raise an exception.
+
+            Equivalent to :meth:`_engine.Result.one` except that
+            tuple values, rather than :class:`_engine.Row` objects,
+            are returned.
+
+            """
+            ...
+
+        @overload
+        def scalar_one(self: TupleResult[Tuple[_T]]) -> _T: ...
+
+        @overload
+        def scalar_one(self) -> Any: ...
+
+        def scalar_one(self) -> Any:
+            """Return exactly one scalar result or raise an exception.
+
+            This is equivalent to calling :meth:`_engine.Result.scalars`
+            and then :meth:`_engine.ScalarResult.one`.
+
+            .. seealso::
+
+                :meth:`_engine.ScalarResult.one`
+
+                :meth:`_engine.Result.scalars`
+
+            """
+            ...
+
+        @overload
+        def scalar_one_or_none(
+            self: TupleResult[Tuple[_T]],
+        ) -> Optional[_T]: ...
+
+        @overload
+        def scalar_one_or_none(self) -> Optional[Any]: ...
+
+        def scalar_one_or_none(self) -> Optional[Any]:
+            """Return exactly one or no scalar result.
+
+            This is equivalent to calling :meth:`_engine.Result.scalars`
+            and then :meth:`_engine.ScalarResult.one_or_none`.
+
+            .. seealso::
+
+                :meth:`_engine.ScalarResult.one_or_none`
+
+                :meth:`_engine.Result.scalars`
+
+            """
+            ...
+
+        @overload
+        def scalar(self: TupleResult[Tuple[_T]]) -> Optional[_T]: ...
+
+        @overload
+        def scalar(self) -> Any: ...
+
+        def scalar(self) -> Any:
+            """Fetch the first column of the first row, and close the result
+            set.
+
+            Returns ``None`` if there are no rows to fetch.
+
+            No validation is performed to test if additional rows remain.
+
+            After calling this method, the object is fully closed,
+            e.g. the :meth:`_engine.CursorResult.close`
+            method will have been called.
+
+            :return: a Python scalar value , or ``None`` if no rows remain.
+
+            """
+            ...
+
+
+class MappingResult(_WithKeys, FilterResult[RowMapping]):
+    """A wrapper for a :class:`_engine.Result` that returns dictionary values
+    rather than :class:`_engine.Row` values.
+
+    The :class:`_engine.MappingResult` object is acquired by calling the
+    :meth:`_engine.Result.mappings` method.
+
+    """
+
+    __slots__ = ()
+
+    _generate_rows = True
+
+    _post_creational_filter = operator.attrgetter("_mapping")
+
+    def __init__(self, result: Result[Any]):
+        self._real_result = result
+        self._unique_filter_state = result._unique_filter_state
+        self._metadata = result._metadata
+        if result._source_supports_scalars:
+            self._metadata = self._metadata._reduce([0])
+
+    def unique(self, strategy: Optional[_UniqueFilterType] = None) -> Self:
+        """Apply unique filtering to the objects returned by this
+        :class:`_engine.MappingResult`.
+
+        See :meth:`_engine.Result.unique` for usage details.
+
+        """
+        self._unique_filter_state = (set(), strategy)
+        return self
+
+    def columns(self, *col_expressions: _KeyIndexType) -> Self:
+        r"""Establish the columns that should be returned in each row."""
+        return self._column_slices(col_expressions)
+
+    def partitions(
+        self, size: Optional[int] = None
+    ) -> Iterator[Sequence[RowMapping]]:
+        """Iterate through sub-lists of elements of the size given.
+
+        Equivalent to :meth:`_engine.Result.partitions` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+        """
+
+        getter = self._manyrow_getter
+
+        while True:
+            partition = getter(self, size)
+            if partition:
+                yield partition
+            else:
+                break
+
+    def fetchall(self) -> Sequence[RowMapping]:
+        """A synonym for the :meth:`_engine.MappingResult.all` method."""
+
+        return self._allrows()
+
+    def fetchone(self) -> Optional[RowMapping]:
+        """Fetch one object.
+
+        Equivalent to :meth:`_engine.Result.fetchone` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+        """
+
+        row = self._onerow_getter(self)
+        if row is _NO_ROW:
+            return None
+        else:
+            return row
+
+    def fetchmany(self, size: Optional[int] = None) -> Sequence[RowMapping]:
+        """Fetch many objects.
+
+        Equivalent to :meth:`_engine.Result.fetchmany` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+        """
+
+        return self._manyrow_getter(self, size)
+
+    def all(self) -> Sequence[RowMapping]:
+        """Return all scalar values in a sequence.
+
+        Equivalent to :meth:`_engine.Result.all` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+        """
+
+        return self._allrows()
+
+    def __iter__(self) -> Iterator[RowMapping]:
+        return self._iter_impl()
+
+    def __next__(self) -> RowMapping:
+        return self._next_impl()
+
+    def first(self) -> Optional[RowMapping]:
+        """Fetch the first object or ``None`` if no object is present.
+
+        Equivalent to :meth:`_engine.Result.first` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=False, raise_for_none=False, scalar=False
+        )
+
+    def one_or_none(self) -> Optional[RowMapping]:
+        """Return at most one object or raise an exception.
+
+        Equivalent to :meth:`_engine.Result.one_or_none` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=False, scalar=False
+        )
+
+    def one(self) -> RowMapping:
+        """Return exactly one object or raise an exception.
+
+        Equivalent to :meth:`_engine.Result.one` except that
+        :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
+        objects, are returned.
+
+        """
+        return self._only_one_row(
+            raise_for_second_row=True, raise_for_none=True, scalar=False
+        )
+
+
+class FrozenResult(Generic[_TP]):
+    """Represents a :class:`_engine.Result` object in a "frozen" state suitable
+    for caching.
+
+    The :class:`_engine.FrozenResult` object is returned from the
+    :meth:`_engine.Result.freeze` method of any :class:`_engine.Result`
+    object.
+
+    A new iterable :class:`_engine.Result` object is generated from a fixed
+    set of data each time the :class:`_engine.FrozenResult` is invoked as
+    a callable::
+
+
+        result = connection.execute(query)
+
+        frozen = result.freeze()
+
+        unfrozen_result_one = frozen()
+
+        for row in unfrozen_result_one:
+            print(row)
+
+        unfrozen_result_two = frozen()
+        rows = unfrozen_result_two.all()
+
+        # ... etc
+
+    .. versionadded:: 1.4
+
+    .. seealso::
+
+        :ref:`do_orm_execute_re_executing` - example usage within the
+        ORM to implement a result-set cache.
+
+        :func:`_orm.loading.merge_frozen_result` - ORM function to merge
+        a frozen result back into a :class:`_orm.Session`.
+
+    """
+
+    data: Sequence[Any]
+
+    def __init__(self, result: Result[_TP]):
+        self.metadata = result._metadata._for_freeze()
+        self._source_supports_scalars = result._source_supports_scalars
+        self._attributes = result._attributes
+
+        if self._source_supports_scalars:
+            self.data = list(result._raw_row_iterator())
+        else:
+            self.data = result.fetchall()
+
+    def rewrite_rows(self) -> Sequence[Sequence[Any]]:
+        if self._source_supports_scalars:
+            return [[elem] for elem in self.data]
+        else:
+            return [list(row) for row in self.data]
+
+    def with_new_rows(
+        self, tuple_data: Sequence[Row[_TP]]
+    ) -> FrozenResult[_TP]:
+        fr = FrozenResult.__new__(FrozenResult)
+        fr.metadata = self.metadata
+        fr._attributes = self._attributes
+        fr._source_supports_scalars = self._source_supports_scalars
+
+        if self._source_supports_scalars:
+            fr.data = [d[0] for d in tuple_data]
+        else:
+            fr.data = tuple_data
+        return fr
+
+    def __call__(self) -> Result[_TP]:
+        result: IteratorResult[_TP] = IteratorResult(
+            self.metadata, iter(self.data)
+        )
+        result._attributes = self._attributes
+        result._source_supports_scalars = self._source_supports_scalars
+        return result
+
+
+class IteratorResult(Result[_TP]):
+    """A :class:`_engine.Result` that gets data from a Python iterator of
+    :class:`_engine.Row` objects or similar row-like data.
+
+    .. versionadded:: 1.4
+
+    """
+
+    _hard_closed = False
+    _soft_closed = False
+
+    def __init__(
+        self,
+        cursor_metadata: ResultMetaData,
+        iterator: Iterator[_InterimSupportsScalarsRowType],
+        raw: Optional[Result[Any]] = None,
+        _source_supports_scalars: bool = False,
+    ):
+        self._metadata = cursor_metadata
+        self.iterator = iterator
+        self.raw = raw
+        self._source_supports_scalars = _source_supports_scalars
+
+    @property
+    def closed(self) -> bool:
+        """Return ``True`` if this :class:`_engine.IteratorResult` has
+        been closed
+
+        .. versionadded:: 1.4.43
+
+        """
+        return self._hard_closed
+
+    def _soft_close(self, hard: bool = False, **kw: Any) -> None:
+        if hard:
+            self._hard_closed = True
+        if self.raw is not None:
+            self.raw._soft_close(hard=hard, **kw)
+        self.iterator = iter([])
+        self._reset_memoizations()
+        self._soft_closed = True
+
+    def _raise_hard_closed(self) -> NoReturn:
+        raise exc.ResourceClosedError("This result object is closed.")
+
+    def _raw_row_iterator(self) -> Iterator[_RowData]:
+        return self.iterator
+
+    def _fetchiter_impl(self) -> Iterator[_InterimSupportsScalarsRowType]:
+        if self._hard_closed:
+            self._raise_hard_closed()
+        return self.iterator
+
+    def _fetchone_impl(
+        self, hard_close: bool = False
+    ) -> Optional[_InterimRowType[Row[Any]]]:
+        if self._hard_closed:
+            self._raise_hard_closed()
+
+        row = next(self.iterator, _NO_ROW)
+        if row is _NO_ROW:
+            self._soft_close(hard=hard_close)
+            return None
+        else:
+            return row
+
+    def _fetchall_impl(self) -> List[_InterimRowType[Row[Any]]]:
+        if self._hard_closed:
+            self._raise_hard_closed()
+        try:
+            return list(self.iterator)
+        finally:
+            self._soft_close()
+
+    def _fetchmany_impl(
+        self, size: Optional[int] = None
+    ) -> List[_InterimRowType[Row[Any]]]:
+        if self._hard_closed:
+            self._raise_hard_closed()
+
+        return list(itertools.islice(self.iterator, 0, size))
+
+
+def null_result() -> IteratorResult[Any]:
+    return IteratorResult(SimpleResultMetaData([]), iter([]))
+
+
+class ChunkedIteratorResult(IteratorResult[_TP]):
+    """An :class:`_engine.IteratorResult` that works from an
+    iterator-producing callable.
+
+    The given ``chunks`` argument is a function that is given a number of rows
+    to return in each chunk, or ``None`` for all rows.  The function should
+    then return an un-consumed iterator of lists, each list of the requested
+    size.
+
+    The function can be called at any time again, in which case it should
+    continue from the same result set but adjust the chunk size as given.
+
+    .. versionadded:: 1.4
+
+    """
+
+    def __init__(
+        self,
+        cursor_metadata: ResultMetaData,
+        chunks: Callable[
+            [Optional[int]], Iterator[Sequence[_InterimRowType[_R]]]
+        ],
+        source_supports_scalars: bool = False,
+        raw: Optional[Result[Any]] = None,
+        dynamic_yield_per: bool = False,
+    ):
+        self._metadata = cursor_metadata
+        self.chunks = chunks
+        self._source_supports_scalars = source_supports_scalars
+        self.raw = raw
+        self.iterator = itertools.chain.from_iterable(self.chunks(None))
+        self.dynamic_yield_per = dynamic_yield_per
+
+    @_generative
+    def yield_per(self, num: int) -> Self:
+        # TODO: this throws away the iterator which may be holding
+        # onto a chunk.   the yield_per cannot be changed once any
+        # rows have been fetched.   either find a way to enforce this,
+        # or we can't use itertools.chain and will instead have to
+        # keep track.
+
+        self._yield_per = num
+        self.iterator = itertools.chain.from_iterable(self.chunks(num))
+        return self
+
+    def _soft_close(self, hard: bool = False, **kw: Any) -> None:
+        super()._soft_close(hard=hard, **kw)
+        self.chunks = lambda size: []  # type: ignore
+
+    def _fetchmany_impl(
+        self, size: Optional[int] = None
+    ) -> List[_InterimRowType[Row[Any]]]:
+        if self.dynamic_yield_per:
+            self.iterator = itertools.chain.from_iterable(self.chunks(size))
+        return super()._fetchmany_impl(size=size)
+
+
+class MergedResult(IteratorResult[_TP]):
+    """A :class:`_engine.Result` that is merged from any number of
+    :class:`_engine.Result` objects.
+
+    Returned by the :meth:`_engine.Result.merge` method.
+
+    .. versionadded:: 1.4
+
+    """
+
+    closed = False
+    rowcount: Optional[int]
+
+    def __init__(
+        self, cursor_metadata: ResultMetaData, results: Sequence[Result[_TP]]
+    ):
+        self._results = results
+        super().__init__(
+            cursor_metadata,
+            itertools.chain.from_iterable(
+                r._raw_row_iterator() for r in results
+            ),
+        )
+
+        self._unique_filter_state = results[0]._unique_filter_state
+        self._yield_per = results[0]._yield_per
+
+        # going to try something w/ this in next rev
+        self._source_supports_scalars = results[0]._source_supports_scalars
+
+        self._attributes = self._attributes.merge_with(
+            *[r._attributes for r in results]
+        )
+
+    def _soft_close(self, hard: bool = False, **kw: Any) -> None:
+        for r in self._results:
+            r._soft_close(hard=hard, **kw)
+        if hard:
+            self.closed = True
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py
new file mode 100644
index 00000000..da7ae9af
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/row.py
@@ -0,0 +1,400 @@
+# engine/row.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define row constructs including :class:`.Row`."""
+
+from __future__ import annotations
+
+from abc import ABC
+import collections.abc as collections_abc
+import operator
+import typing
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from ..sql import util as sql_util
+from ..util import deprecated
+from ..util._has_cy import HAS_CYEXTENSION
+
+if TYPE_CHECKING or not HAS_CYEXTENSION:
+    from ._py_row import BaseRow as BaseRow
+else:
+    from sqlalchemy.cyextension.resultproxy import BaseRow as BaseRow
+
+if TYPE_CHECKING:
+    from .result import _KeyType
+    from .result import _ProcessorsType
+    from .result import RMKeyView
+
+_T = TypeVar("_T", bound=Any)
+_TP = TypeVar("_TP", bound=Tuple[Any, ...])
+
+
+class Row(BaseRow, Sequence[Any], Generic[_TP]):
+    """Represent a single result row.
+
+    The :class:`.Row` object represents a row of a database result.  It is
+    typically associated in the 1.x series of SQLAlchemy with the
+    :class:`_engine.CursorResult` object, however is also used by the ORM for
+    tuple-like results as of SQLAlchemy 1.4.
+
+    The :class:`.Row` object seeks to act as much like a Python named
+    tuple as possible.   For mapping (i.e. dictionary) behavior on a row,
+    such as testing for containment of keys, refer to the :attr:`.Row._mapping`
+    attribute.
+
+    .. seealso::
+
+        :ref:`tutorial_selecting_data` - includes examples of selecting
+        rows from SELECT statements.
+
+    .. versionchanged:: 1.4
+
+        Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
+        "proxy" object in that it contains the final form of data within it,
+        and now acts mostly like a named tuple. Mapping-like functionality is
+        moved to the :attr:`.Row._mapping` attribute. See
+        :ref:`change_4710_core` for background on this change.
+
+    """
+
+    __slots__ = ()
+
+    def __setattr__(self, name: str, value: Any) -> NoReturn:
+        raise AttributeError("can't set attribute")
+
+    def __delattr__(self, name: str) -> NoReturn:
+        raise AttributeError("can't delete attribute")
+
+    def _tuple(self) -> _TP:
+        """Return a 'tuple' form of this :class:`.Row`.
+
+        At runtime, this method returns "self"; the :class:`.Row` object is
+        already a named tuple. However, at the typing level, if this
+        :class:`.Row` is typed, the "tuple" return type will be a :pep:`484`
+        ``Tuple`` datatype that contains typing information about individual
+        elements, supporting typed unpacking and attribute access.
+
+        .. versionadded:: 2.0.19 - The :meth:`.Row._tuple` method supersedes
+           the previous :meth:`.Row.tuple` method, which is now underscored
+           to avoid name conflicts with column names in the same way as other
+           named-tuple methods on :class:`.Row`.
+
+        .. seealso::
+
+            :attr:`.Row._t` - shorthand attribute notation
+
+            :meth:`.Result.tuples`
+
+
+        """
+        return self  # type: ignore
+
+    @deprecated(
+        "2.0.19",
+        "The :meth:`.Row.tuple` method is deprecated in favor of "
+        ":meth:`.Row._tuple`; all :class:`.Row` "
+        "methods and library-level attributes are intended to be underscored "
+        "to avoid name conflicts.  Please use :meth:`Row._tuple`.",
+    )
+    def tuple(self) -> _TP:
+        """Return a 'tuple' form of this :class:`.Row`.
+
+        .. versionadded:: 2.0
+
+        """
+        return self._tuple()
+
+    @property
+    def _t(self) -> _TP:
+        """A synonym for :meth:`.Row._tuple`.
+
+        .. versionadded:: 2.0.19 - The :attr:`.Row._t` attribute supersedes
+           the previous :attr:`.Row.t` attribute, which is now underscored
+           to avoid name conflicts with column names in the same way as other
+           named-tuple methods on :class:`.Row`.
+
+        .. seealso::
+
+            :attr:`.Result.t`
+        """
+        return self  # type: ignore
+
+    @property
+    @deprecated(
+        "2.0.19",
+        "The :attr:`.Row.t` attribute is deprecated in favor of "
+        ":attr:`.Row._t`; all :class:`.Row` "
+        "methods and library-level attributes are intended to be underscored "
+        "to avoid name conflicts.  Please use :attr:`Row._t`.",
+    )
+    def t(self) -> _TP:
+        """A synonym for :meth:`.Row._tuple`.
+
+        .. versionadded:: 2.0
+
+        """
+        return self._t
+
+    @property
+    def _mapping(self) -> RowMapping:
+        """Return a :class:`.RowMapping` for this :class:`.Row`.
+
+        This object provides a consistent Python mapping (i.e. dictionary)
+        interface for the data contained within the row.   The :class:`.Row`
+        by itself behaves like a named tuple.
+
+        .. seealso::
+
+            :attr:`.Row._fields`
+
+        .. versionadded:: 1.4
+
+        """
+        return RowMapping(self._parent, None, self._key_to_index, self._data)
+
+    def _filter_on_values(
+        self, processor: Optional[_ProcessorsType]
+    ) -> Row[Any]:
+        return Row(self._parent, processor, self._key_to_index, self._data)
+
+    if not TYPE_CHECKING:
+
+        def _special_name_accessor(name: str) -> Any:
+            """Handle ambiguous names such as "count" and "index" """
+
+            @property
+            def go(self: Row) -> Any:
+                if self._parent._has_key(name):
+                    return self.__getattr__(name)
+                else:
+
+                    def meth(*arg: Any, **kw: Any) -> Any:
+                        return getattr(collections_abc.Sequence, name)(
+                            self, *arg, **kw
+                        )
+
+                    return meth
+
+            return go
+
+        count = _special_name_accessor("count")
+        index = _special_name_accessor("index")
+
+    def __contains__(self, key: Any) -> bool:
+        return key in self._data
+
+    def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool:
+        return (
+            op(self._to_tuple_instance(), other._to_tuple_instance())
+            if isinstance(other, Row)
+            else op(self._to_tuple_instance(), other)
+        )
+
+    __hash__ = BaseRow.__hash__
+
+    if TYPE_CHECKING:
+
+        @overload
+        def __getitem__(self, index: int) -> Any: ...
+
+        @overload
+        def __getitem__(self, index: slice) -> Sequence[Any]: ...
+
+        def __getitem__(self, index: Union[int, slice]) -> Any: ...
+
+    def __lt__(self, other: Any) -> bool:
+        return self._op(other, operator.lt)
+
+    def __le__(self, other: Any) -> bool:
+        return self._op(other, operator.le)
+
+    def __ge__(self, other: Any) -> bool:
+        return self._op(other, operator.ge)
+
+    def __gt__(self, other: Any) -> bool:
+        return self._op(other, operator.gt)
+
+    def __eq__(self, other: Any) -> bool:
+        return self._op(other, operator.eq)
+
+    def __ne__(self, other: Any) -> bool:
+        return self._op(other, operator.ne)
+
+    def __repr__(self) -> str:
+        return repr(sql_util._repr_row(self))
+
+    @property
+    def _fields(self) -> Tuple[str, ...]:
+        """Return a tuple of string keys as represented by this
+        :class:`.Row`.
+
+        The keys can represent the labels of the columns returned by a core
+        statement or the names of the orm classes returned by an orm
+        execution.
+
+        This attribute is analogous to the Python named tuple ``._fields``
+        attribute.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`.Row._mapping`
+
+        """
+        return tuple([k for k in self._parent.keys if k is not None])
+
+    def _asdict(self) -> Dict[str, Any]:
+        """Return a new dict which maps field names to their corresponding
+        values.
+
+        This method is analogous to the Python named tuple ``._asdict()``
+        method, and works by applying the ``dict()`` constructor to the
+        :attr:`.Row._mapping` attribute.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`.Row._mapping`
+
+        """
+        return dict(self._mapping)
+
+
+BaseRowProxy = BaseRow
+RowProxy = Row
+
+
+class ROMappingView(ABC):
+    __slots__ = ()
+
+    _items: Sequence[Any]
+    _mapping: Mapping["_KeyType", Any]
+
+    def __init__(
+        self, mapping: Mapping["_KeyType", Any], items: Sequence[Any]
+    ):
+        self._mapping = mapping  # type: ignore[misc]
+        self._items = items  # type: ignore[misc]
+
+    def __len__(self) -> int:
+        return len(self._items)
+
+    def __repr__(self) -> str:
+        return "{0.__class__.__name__}({0._mapping!r})".format(self)
+
+    def __iter__(self) -> Iterator[Any]:
+        return iter(self._items)
+
+    def __contains__(self, item: Any) -> bool:
+        return item in self._items
+
+    def __eq__(self, other: Any) -> bool:
+        return list(other) == list(self)
+
+    def __ne__(self, other: Any) -> bool:
+        return list(other) != list(self)
+
+
+class ROMappingKeysValuesView(
+    ROMappingView, typing.KeysView["_KeyType"], typing.ValuesView[Any]
+):
+    __slots__ = ("_items",)  # mapping slot is provided by KeysView
+
+
+class ROMappingItemsView(ROMappingView, typing.ItemsView["_KeyType", Any]):
+    __slots__ = ("_items",)  # mapping slot is provided by ItemsView
+
+
+class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]):
+    """A ``Mapping`` that maps column names and objects to :class:`.Row`
+    values.
+
+    The :class:`.RowMapping` is available from a :class:`.Row` via the
+    :attr:`.Row._mapping` attribute, as well as from the iterable interface
+    provided by the :class:`.MappingResult` object returned by the
+    :meth:`_engine.Result.mappings` method.
+
+    :class:`.RowMapping` supplies Python mapping (i.e. dictionary) access to
+    the  contents of the row.   This includes support for testing of
+    containment of specific keys (string column names or objects), as well
+    as iteration of keys, values, and items::
+
+        for row in result:
+            if "a" in row._mapping:
+                print("Column 'a': %s" % row._mapping["a"])
+
+            print("Column b: %s" % row._mapping[table.c.b])
+
+    .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
+       mapping-like access previously provided by a database result row,
+       which now seeks to behave mostly like a named tuple.
+
+    """
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        def __getitem__(self, key: _KeyType) -> Any: ...
+
+    else:
+        __getitem__ = BaseRow._get_by_key_impl_mapping
+
+    def _values_impl(self) -> List[Any]:
+        return list(self._data)
+
+    def __iter__(self) -> Iterator[str]:
+        return (k for k in self._parent.keys if k is not None)
+
+    def __len__(self) -> int:
+        return len(self._data)
+
+    def __contains__(self, key: object) -> bool:
+        return self._parent._has_key(key)
+
+    def __repr__(self) -> str:
+        return repr(dict(self))
+
+    def items(self) -> ROMappingItemsView:
+        """Return a view of key/value tuples for the elements in the
+        underlying :class:`.Row`.
+
+        """
+        return ROMappingItemsView(
+            self, [(key, self[key]) for key in self.keys()]
+        )
+
+    def keys(self) -> RMKeyView:
+        """Return a view of 'keys' for string column names represented
+        by the underlying :class:`.Row`.
+
+        """
+
+        return self._parent.keys
+
+    def values(self) -> ROMappingKeysValuesView:
+        """Return a view of values for the values represented in the
+        underlying :class:`.Row`.
+
+        """
+        return ROMappingKeysValuesView(self, self._values_impl())
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py
new file mode 100644
index 00000000..5dd7bca9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/strategies.py
@@ -0,0 +1,19 @@
+# engine/strategies.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Deprecated mock engine strategy used by Alembic.
+
+
+"""
+
+from __future__ import annotations
+
+from .mock import MockConnection  # noqa
+
+
+class MockEngineStrategy:
+    MockConnection = MockConnection
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py
new file mode 100644
index 00000000..bb004f11
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/url.py
@@ -0,0 +1,924 @@
+# engine/url.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
+information about a database connection specification.
+
+The URL object is created automatically when
+:func:`~sqlalchemy.engine.create_engine` is called with a string
+argument; alternatively, the URL is a public-facing construct which can
+be used directly and is also accepted directly by ``create_engine()``.
+"""
+
+from __future__ import annotations
+
+import collections.abc as collections_abc
+import re
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import NamedTuple
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import Union
+from urllib.parse import parse_qsl
+from urllib.parse import quote
+from urllib.parse import quote_plus
+from urllib.parse import unquote
+
+from .interfaces import Dialect
+from .. import exc
+from .. import util
+from ..dialects import plugins
+from ..dialects import registry
+
+
+class URL(NamedTuple):
+    """
+    Represent the components of a URL used to connect to a database.
+
+    URLs are typically constructed from a fully formatted URL string, where the
+    :func:`.make_url` function is used internally by the
+    :func:`_sa.create_engine` function in order to parse the URL string into
+    its individual components, which are then used to construct a new
+    :class:`.URL` object. When parsing from a formatted URL string, the parsing
+    format generally follows
+    `RFC-1738 <https://www.ietf.org/rfc/rfc1738.txt>`_, with some exceptions.
+
+    A :class:`_engine.URL` object may also be produced directly, either by
+    using the :func:`.make_url` function with a fully formed URL string, or
+    by using the :meth:`_engine.URL.create` constructor in order
+    to construct a :class:`_engine.URL` programmatically given individual
+    fields. The resulting :class:`.URL` object may be passed directly to
+    :func:`_sa.create_engine` in place of a string argument, which will bypass
+    the usage of :func:`.make_url` within the engine's creation process.
+
+    .. versionchanged:: 1.4
+
+        The :class:`_engine.URL` object is now an immutable object.  To
+        create a URL, use the :func:`_engine.make_url` or
+        :meth:`_engine.URL.create` function / method.  To modify
+        a :class:`_engine.URL`, use methods like
+        :meth:`_engine.URL.set` and
+        :meth:`_engine.URL.update_query_dict` to return a new
+        :class:`_engine.URL` object with modifications.   See notes for this
+        change at :ref:`change_5526`.
+
+    .. seealso::
+
+        :ref:`database_urls`
+
+    :class:`_engine.URL` contains the following attributes:
+
+    * :attr:`_engine.URL.drivername`: database backend and driver name, such as
+      ``postgresql+psycopg2``
+    * :attr:`_engine.URL.username`: username string
+    * :attr:`_engine.URL.password`: password string
+    * :attr:`_engine.URL.host`: string hostname
+    * :attr:`_engine.URL.port`: integer port number
+    * :attr:`_engine.URL.database`: string database name
+    * :attr:`_engine.URL.query`: an immutable mapping representing the query
+      string.  contains strings for keys and either strings or tuples of
+      strings for values.
+
+
+    """
+
+    drivername: str
+    """database backend and driver name, such as
+    ``postgresql+psycopg2``
+
+    """
+
+    username: Optional[str]
+    "username string"
+
+    password: Optional[str]
+    """password, which is normally a string but may also be any
+    object that has a ``__str__()`` method."""
+
+    host: Optional[str]
+    """hostname or IP number.  May also be a data source name for some
+    drivers."""
+
+    port: Optional[int]
+    """integer port number"""
+
+    database: Optional[str]
+    """database name"""
+
+    query: util.immutabledict[str, Union[Tuple[str, ...], str]]
+    """an immutable mapping representing the query string.  contains strings
+       for keys and either strings or tuples of strings for values, e.g.::
+
+            >>> from sqlalchemy.engine import make_url
+            >>> url = make_url(
+            ...     "postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt"
+            ... )
+            >>> url.query
+            immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'})
+
+         To create a mutable copy of this mapping, use the ``dict`` constructor::
+
+            mutable_query_opts = dict(url.query)
+
+       .. seealso::
+
+          :attr:`_engine.URL.normalized_query` - normalizes all values into sequences
+          for consistent processing
+
+          Methods for altering the contents of :attr:`_engine.URL.query`:
+
+          :meth:`_engine.URL.update_query_dict`
+
+          :meth:`_engine.URL.update_query_string`
+
+          :meth:`_engine.URL.update_query_pairs`
+
+          :meth:`_engine.URL.difference_update_query`
+
+    """  # noqa: E501
+
+    @classmethod
+    def create(
+        cls,
+        drivername: str,
+        username: Optional[str] = None,
+        password: Optional[str] = None,
+        host: Optional[str] = None,
+        port: Optional[int] = None,
+        database: Optional[str] = None,
+        query: Mapping[str, Union[Sequence[str], str]] = util.EMPTY_DICT,
+    ) -> URL:
+        """Create a new :class:`_engine.URL` object.
+
+        .. seealso::
+
+            :ref:`database_urls`
+
+        :param drivername: the name of the database backend. This name will
+          correspond to a module in sqlalchemy/databases or a third party
+          plug-in.
+        :param username: The user name.
+        :param password: database password.  Is typically a string, but may
+          also be an object that can be stringified with ``str()``.
+
+          .. note:: The password string should **not** be URL encoded when
+             passed as an argument to :meth:`_engine.URL.create`; the string
+             should contain the password characters exactly as they would be
+             typed.
+
+          .. note::  A password-producing object will be stringified only
+             **once** per :class:`_engine.Engine` object.  For dynamic password
+             generation per connect, see :ref:`engines_dynamic_tokens`.
+
+        :param host: The name of the host.
+        :param port: The port number.
+        :param database: The database name.
+        :param query: A dictionary of string keys to string values to be passed
+          to the dialect and/or the DBAPI upon connect.   To specify non-string
+          parameters to a Python DBAPI directly, use the
+          :paramref:`_sa.create_engine.connect_args` parameter to
+          :func:`_sa.create_engine`.   See also
+          :attr:`_engine.URL.normalized_query` for a dictionary that is
+          consistently string->list of string.
+        :return: new :class:`_engine.URL` object.
+
+        .. versionadded:: 1.4
+
+            The :class:`_engine.URL` object is now an **immutable named
+            tuple**.  In addition, the ``query`` dictionary is also immutable.
+            To create a URL, use the :func:`_engine.url.make_url` or
+            :meth:`_engine.URL.create` function/ method.  To modify a
+            :class:`_engine.URL`, use the :meth:`_engine.URL.set` and
+            :meth:`_engine.URL.update_query` methods.
+
+        """
+
+        return cls(
+            cls._assert_str(drivername, "drivername"),
+            cls._assert_none_str(username, "username"),
+            password,
+            cls._assert_none_str(host, "host"),
+            cls._assert_port(port),
+            cls._assert_none_str(database, "database"),
+            cls._str_dict(query),
+        )
+
+    @classmethod
+    def _assert_port(cls, port: Optional[int]) -> Optional[int]:
+        if port is None:
+            return None
+        try:
+            return int(port)
+        except TypeError:
+            raise TypeError("Port argument must be an integer or None")
+
+    @classmethod
+    def _assert_str(cls, v: str, paramname: str) -> str:
+        if not isinstance(v, str):
+            raise TypeError("%s must be a string" % paramname)
+        return v
+
+    @classmethod
+    def _assert_none_str(
+        cls, v: Optional[str], paramname: str
+    ) -> Optional[str]:
+        if v is None:
+            return v
+
+        return cls._assert_str(v, paramname)
+
+    @classmethod
+    def _str_dict(
+        cls,
+        dict_: Optional[
+            Union[
+                Sequence[Tuple[str, Union[Sequence[str], str]]],
+                Mapping[str, Union[Sequence[str], str]],
+            ]
+        ],
+    ) -> util.immutabledict[str, Union[Tuple[str, ...], str]]:
+        if dict_ is None:
+            return util.EMPTY_DICT
+
+        @overload
+        def _assert_value(
+            val: str,
+        ) -> str: ...
+
+        @overload
+        def _assert_value(
+            val: Sequence[str],
+        ) -> Union[str, Tuple[str, ...]]: ...
+
+        def _assert_value(
+            val: Union[str, Sequence[str]],
+        ) -> Union[str, Tuple[str, ...]]:
+            if isinstance(val, str):
+                return val
+            elif isinstance(val, collections_abc.Sequence):
+                return tuple(_assert_value(elem) for elem in val)
+            else:
+                raise TypeError(
+                    "Query dictionary values must be strings or "
+                    "sequences of strings"
+                )
+
+        def _assert_str(v: str) -> str:
+            if not isinstance(v, str):
+                raise TypeError("Query dictionary keys must be strings")
+            return v
+
+        dict_items: Iterable[Tuple[str, Union[Sequence[str], str]]]
+        if isinstance(dict_, collections_abc.Sequence):
+            dict_items = dict_
+        else:
+            dict_items = dict_.items()
+
+        return util.immutabledict(
+            {
+                _assert_str(key): _assert_value(
+                    value,
+                )
+                for key, value in dict_items
+            }
+        )
+
+    def set(
+        self,
+        drivername: Optional[str] = None,
+        username: Optional[str] = None,
+        password: Optional[str] = None,
+        host: Optional[str] = None,
+        port: Optional[int] = None,
+        database: Optional[str] = None,
+        query: Optional[Mapping[str, Union[Sequence[str], str]]] = None,
+    ) -> URL:
+        """return a new :class:`_engine.URL` object with modifications.
+
+        Values are used if they are non-None.  To set a value to ``None``
+        explicitly, use the :meth:`_engine.URL._replace` method adapted
+        from ``namedtuple``.
+
+        :param drivername: new drivername
+        :param username: new username
+        :param password: new password
+        :param host: new hostname
+        :param port: new port
+        :param query: new query parameters, passed a dict of string keys
+         referring to string or sequence of string values.  Fully
+         replaces the previous list of arguments.
+
+        :return: new :class:`_engine.URL` object.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :meth:`_engine.URL.update_query_dict`
+
+        """
+
+        kw: Dict[str, Any] = {}
+        if drivername is not None:
+            kw["drivername"] = drivername
+        if username is not None:
+            kw["username"] = username
+        if password is not None:
+            kw["password"] = password
+        if host is not None:
+            kw["host"] = host
+        if port is not None:
+            kw["port"] = port
+        if database is not None:
+            kw["database"] = database
+        if query is not None:
+            kw["query"] = query
+
+        return self._assert_replace(**kw)
+
+    def _assert_replace(self, **kw: Any) -> URL:
+        """argument checks before calling _replace()"""
+
+        if "drivername" in kw:
+            self._assert_str(kw["drivername"], "drivername")
+        for name in "username", "host", "database":
+            if name in kw:
+                self._assert_none_str(kw[name], name)
+        if "port" in kw:
+            self._assert_port(kw["port"])
+        if "query" in kw:
+            kw["query"] = self._str_dict(kw["query"])
+
+        return self._replace(**kw)
+
+    def update_query_string(
+        self, query_string: str, append: bool = False
+    ) -> URL:
+        """Return a new :class:`_engine.URL` object with the :attr:`_engine.URL.query`
+        parameter dictionary updated by the given query string.
+
+        E.g.::
+
+            >>> from sqlalchemy.engine import make_url
+            >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname")
+            >>> url = url.update_query_string(
+            ...     "alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt"
+            ... )
+            >>> str(url)
+            'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
+
+        :param query_string: a URL escaped query string, not including the
+         question mark.
+
+        :param append: if True, parameters in the existing query string will
+         not be removed; new parameters will be in addition to those present.
+         If left at its default of False, keys present in the given query
+         parameters will replace those of the existing query string.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_engine.URL.query`
+
+            :meth:`_engine.URL.update_query_dict`
+
+        """  # noqa: E501
+        return self.update_query_pairs(parse_qsl(query_string), append=append)
+
+    def update_query_pairs(
+        self,
+        key_value_pairs: Iterable[Tuple[str, Union[str, List[str]]]],
+        append: bool = False,
+    ) -> URL:
+        """Return a new :class:`_engine.URL` object with the
+        :attr:`_engine.URL.query`
+        parameter dictionary updated by the given sequence of key/value pairs
+
+        E.g.::
+
+            >>> from sqlalchemy.engine import make_url
+            >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname")
+            >>> url = url.update_query_pairs(
+            ...     [
+            ...         ("alt_host", "host1"),
+            ...         ("alt_host", "host2"),
+            ...         ("ssl_cipher", "/path/to/crt"),
+            ...     ]
+            ... )
+            >>> str(url)
+            'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
+
+        :param key_value_pairs: A sequence of tuples containing two strings
+         each.
+
+        :param append: if True, parameters in the existing query string will
+         not be removed; new parameters will be in addition to those present.
+         If left at its default of False, keys present in the given query
+         parameters will replace those of the existing query string.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_engine.URL.query`
+
+            :meth:`_engine.URL.difference_update_query`
+
+            :meth:`_engine.URL.set`
+
+        """  # noqa: E501
+
+        existing_query = self.query
+        new_keys: Dict[str, Union[str, List[str]]] = {}
+
+        for key, value in key_value_pairs:
+            if key in new_keys:
+                new_keys[key] = util.to_list(new_keys[key])
+                cast("List[str]", new_keys[key]).append(cast(str, value))
+            else:
+                new_keys[key] = (
+                    list(value) if isinstance(value, (list, tuple)) else value
+                )
+
+        new_query: Mapping[str, Union[str, Sequence[str]]]
+        if append:
+            new_query = {}
+
+            for k in new_keys:
+                if k in existing_query:
+                    new_query[k] = tuple(
+                        util.to_list(existing_query[k])
+                        + util.to_list(new_keys[k])
+                    )
+                else:
+                    new_query[k] = new_keys[k]
+
+            new_query.update(
+                {
+                    k: existing_query[k]
+                    for k in set(existing_query).difference(new_keys)
+                }
+            )
+        else:
+            new_query = self.query.union(
+                {
+                    k: tuple(v) if isinstance(v, list) else v
+                    for k, v in new_keys.items()
+                }
+            )
+        return self.set(query=new_query)
+
+    def update_query_dict(
+        self,
+        query_parameters: Mapping[str, Union[str, List[str]]],
+        append: bool = False,
+    ) -> URL:
+        """Return a new :class:`_engine.URL` object with the
+        :attr:`_engine.URL.query` parameter dictionary updated by the given
+        dictionary.
+
+        The dictionary typically contains string keys and string values.
+        In order to represent a query parameter that is expressed multiple
+        times, pass a sequence of string values.
+
+        E.g.::
+
+
+            >>> from sqlalchemy.engine import make_url
+            >>> url = make_url("postgresql+psycopg2://user:pass@host/dbname")
+            >>> url = url.update_query_dict(
+            ...     {"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"}
+            ... )
+            >>> str(url)
+            'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
+
+
+        :param query_parameters: A dictionary with string keys and values
+         that are either strings, or sequences of strings.
+
+        :param append: if True, parameters in the existing query string will
+         not be removed; new parameters will be in addition to those present.
+         If left at its default of False, keys present in the given query
+         parameters will replace those of the existing query string.
+
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_engine.URL.query`
+
+            :meth:`_engine.URL.update_query_string`
+
+            :meth:`_engine.URL.update_query_pairs`
+
+            :meth:`_engine.URL.difference_update_query`
+
+            :meth:`_engine.URL.set`
+
+        """  # noqa: E501
+        return self.update_query_pairs(query_parameters.items(), append=append)
+
+    def difference_update_query(self, names: Iterable[str]) -> URL:
+        """
+        Remove the given names from the :attr:`_engine.URL.query` dictionary,
+        returning the new :class:`_engine.URL`.
+
+        E.g.::
+
+            url = url.difference_update_query(["foo", "bar"])
+
+        Equivalent to using :meth:`_engine.URL.set` as follows::
+
+            url = url.set(
+                query={
+                    key: url.query[key]
+                    for key in set(url.query).difference(["foo", "bar"])
+                }
+            )
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`_engine.URL.query`
+
+            :meth:`_engine.URL.update_query_dict`
+
+            :meth:`_engine.URL.set`
+
+        """
+
+        if not set(names).intersection(self.query):
+            return self
+
+        return URL(
+            self.drivername,
+            self.username,
+            self.password,
+            self.host,
+            self.port,
+            self.database,
+            util.immutabledict(
+                {
+                    key: self.query[key]
+                    for key in set(self.query).difference(names)
+                }
+            ),
+        )
+
+    @property
+    def normalized_query(self) -> Mapping[str, Sequence[str]]:
+        """Return the :attr:`_engine.URL.query` dictionary with values normalized
+        into sequences.
+
+        As the :attr:`_engine.URL.query` dictionary may contain either
+        string values or sequences of string values to differentiate between
+        parameters that are specified multiple times in the query string,
+        code that needs to handle multiple parameters generically will wish
+        to use this attribute so that all parameters present are presented
+        as sequences.   Inspiration is from Python's ``urllib.parse.parse_qs``
+        function.  E.g.::
+
+
+            >>> from sqlalchemy.engine import make_url
+            >>> url = make_url(
+            ...     "postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt"
+            ... )
+            >>> url.query
+            immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'})
+            >>> url.normalized_query
+            immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': ('/path/to/crt',)})
+
+        """  # noqa: E501
+
+        return util.immutabledict(
+            {
+                k: (v,) if not isinstance(v, tuple) else v
+                for k, v in self.query.items()
+            }
+        )
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`_engine.URL.__to_string__ method is deprecated and will "
+        "be removed in a future release.  Please use the "
+        ":meth:`_engine.URL.render_as_string` method.",
+    )
+    def __to_string__(self, hide_password: bool = True) -> str:
+        """Render this :class:`_engine.URL` object as a string.
+
+        :param hide_password: Defaults to True.   The password is not shown
+         in the string unless this is set to False.
+
+        """
+        return self.render_as_string(hide_password=hide_password)
+
+    def render_as_string(self, hide_password: bool = True) -> str:
+        """Render this :class:`_engine.URL` object as a string.
+
+        This method is used when the ``__str__()`` or ``__repr__()``
+        methods are used.   The method directly includes additional options.
+
+        :param hide_password: Defaults to True.   The password is not shown
+         in the string unless this is set to False.
+
+        """
+        s = self.drivername + "://"
+        if self.username is not None:
+            s += quote(self.username, safe=" +")
+            if self.password is not None:
+                s += ":" + (
+                    "***"
+                    if hide_password
+                    else quote(str(self.password), safe=" +")
+                )
+            s += "@"
+        if self.host is not None:
+            if ":" in self.host:
+                s += f"[{self.host}]"
+            else:
+                s += self.host
+        if self.port is not None:
+            s += ":" + str(self.port)
+        if self.database is not None:
+            s += "/" + self.database
+        if self.query:
+            keys = list(self.query)
+            keys.sort()
+            s += "?" + "&".join(
+                f"{quote_plus(k)}={quote_plus(element)}"
+                for k in keys
+                for element in util.to_list(self.query[k])
+            )
+        return s
+
+    def __repr__(self) -> str:
+        return self.render_as_string()
+
+    def __copy__(self) -> URL:
+        return self.__class__.create(
+            self.drivername,
+            self.username,
+            self.password,
+            self.host,
+            self.port,
+            self.database,
+            # note this is an immutabledict of str-> str / tuple of str,
+            # also fully immutable.  does not require deepcopy
+            self.query,
+        )
+
+    def __deepcopy__(self, memo: Any) -> URL:
+        return self.__copy__()
+
+    def __hash__(self) -> int:
+        return hash(str(self))
+
+    def __eq__(self, other: Any) -> bool:
+        return (
+            isinstance(other, URL)
+            and self.drivername == other.drivername
+            and self.username == other.username
+            and self.password == other.password
+            and self.host == other.host
+            and self.database == other.database
+            and self.query == other.query
+            and self.port == other.port
+        )
+
+    def __ne__(self, other: Any) -> bool:
+        return not self == other
+
+    def get_backend_name(self) -> str:
+        """Return the backend name.
+
+        This is the name that corresponds to the database backend in
+        use, and is the portion of the :attr:`_engine.URL.drivername`
+        that is to the left of the plus sign.
+
+        """
+        if "+" not in self.drivername:
+            return self.drivername
+        else:
+            return self.drivername.split("+")[0]
+
+    def get_driver_name(self) -> str:
+        """Return the backend name.
+
+        This is the name that corresponds to the DBAPI driver in
+        use, and is the portion of the :attr:`_engine.URL.drivername`
+        that is to the right of the plus sign.
+
+        If the :attr:`_engine.URL.drivername` does not include a plus sign,
+        then the default :class:`_engine.Dialect` for this :class:`_engine.URL`
+        is imported in order to get the driver name.
+
+        """
+
+        if "+" not in self.drivername:
+            return self.get_dialect().driver
+        else:
+            return self.drivername.split("+")[1]
+
+    def _instantiate_plugins(
+        self, kwargs: Mapping[str, Any]
+    ) -> Tuple[URL, List[Any], Dict[str, Any]]:
+        plugin_names = util.to_list(self.query.get("plugin", ()))
+        plugin_names += kwargs.get("plugins", [])
+
+        kwargs = dict(kwargs)
+
+        loaded_plugins = [
+            plugins.load(plugin_name)(self, kwargs)
+            for plugin_name in plugin_names
+        ]
+
+        u = self.difference_update_query(["plugin", "plugins"])
+
+        for plugin in loaded_plugins:
+            new_u = plugin.update_url(u)
+            if new_u is not None:
+                u = new_u
+
+        kwargs.pop("plugins", None)
+
+        return u, loaded_plugins, kwargs
+
+    def _get_entrypoint(self) -> Type[Dialect]:
+        """Return the "entry point" dialect class.
+
+        This is normally the dialect itself except in the case when the
+        returned class implements the get_dialect_cls() method.
+
+        """
+        if "+" not in self.drivername:
+            name = self.drivername
+        else:
+            name = self.drivername.replace("+", ".")
+        cls = registry.load(name)
+        # check for legacy dialects that
+        # would return a module with 'dialect' as the
+        # actual class
+        if (
+            hasattr(cls, "dialect")
+            and isinstance(cls.dialect, type)
+            and issubclass(cls.dialect, Dialect)
+        ):
+            return cls.dialect
+        else:
+            return cast("Type[Dialect]", cls)
+
+    def get_dialect(self, _is_async: bool = False) -> Type[Dialect]:
+        """Return the SQLAlchemy :class:`_engine.Dialect` class corresponding
+        to this URL's driver name.
+
+        """
+        entrypoint = self._get_entrypoint()
+        if _is_async:
+            dialect_cls = entrypoint.get_async_dialect_cls(self)
+        else:
+            dialect_cls = entrypoint.get_dialect_cls(self)
+        return dialect_cls
+
+    def translate_connect_args(
+        self, names: Optional[List[str]] = None, **kw: Any
+    ) -> Dict[str, Any]:
+        r"""Translate url attributes into a dictionary of connection arguments.
+
+        Returns attributes of this url (`host`, `database`, `username`,
+        `password`, `port`) as a plain dictionary.  The attribute names are
+        used as the keys by default.  Unset or false attributes are omitted
+        from the final dictionary.
+
+        :param \**kw: Optional, alternate key names for url attributes.
+
+        :param names: Deprecated.  Same purpose as the keyword-based alternate
+            names, but correlates the name to the original positionally.
+        """
+
+        if names is not None:
+            util.warn_deprecated(
+                "The `URL.translate_connect_args.name`s parameter is "
+                "deprecated. Please pass the "
+                "alternate names as kw arguments.",
+                "1.4",
+            )
+
+        translated = {}
+        attribute_names = ["host", "database", "username", "password", "port"]
+        for sname in attribute_names:
+            if names:
+                name = names.pop(0)
+            elif sname in kw:
+                name = kw[sname]
+            else:
+                name = sname
+            if name is not None and getattr(self, sname, False):
+                if sname == "password":
+                    translated[name] = str(getattr(self, sname))
+                else:
+                    translated[name] = getattr(self, sname)
+
+        return translated
+
+
+def make_url(name_or_url: Union[str, URL]) -> URL:
+    """Given a string, produce a new URL instance.
+
+    The format of the URL generally follows `RFC-1738
+    <https://www.ietf.org/rfc/rfc1738.txt>`_, with some exceptions, including
+    that underscores, and not dashes or periods, are accepted within the
+    "scheme" portion.
+
+    If a :class:`.URL` object is passed, it is returned as is.
+
+    .. seealso::
+
+        :ref:`database_urls`
+
+    """
+
+    if isinstance(name_or_url, str):
+        return _parse_url(name_or_url)
+    elif not isinstance(name_or_url, URL) and not hasattr(
+        name_or_url, "_sqla_is_testing_if_this_is_a_mock_object"
+    ):
+        raise exc.ArgumentError(
+            f"Expected string or URL object, got {name_or_url!r}"
+        )
+    else:
+        return name_or_url
+
+
+def _parse_url(name: str) -> URL:
+    pattern = re.compile(
+        r"""
+            (?P<name>[\w\+]+)://
+            (?:
+                (?P<username>[^:/]*)
+                (?::(?P<password>[^@]*))?
+            @)?
+            (?:
+                (?:
+                    \[(?P<ipv6host>[^/\?]+)\] |
+                    (?P<ipv4host>[^/:\?]+)
+                )?
+                (?::(?P<port>[^/\?]*))?
+            )?
+            (?:/(?P<database>[^\?]*))?
+            (?:\?(?P<query>.*))?
+            """,
+        re.X,
+    )
+
+    m = pattern.match(name)
+    if m is not None:
+        components = m.groupdict()
+        query: Optional[Dict[str, Union[str, List[str]]]]
+        if components["query"] is not None:
+            query = {}
+
+            for key, value in parse_qsl(components["query"]):
+                if key in query:
+                    query[key] = util.to_list(query[key])
+                    cast("List[str]", query[key]).append(value)
+                else:
+                    query[key] = value
+        else:
+            query = None
+        components["query"] = query
+
+        if components["username"] is not None:
+            components["username"] = unquote(components["username"])
+
+        if components["password"] is not None:
+            components["password"] = unquote(components["password"])
+
+        ipv4host = components.pop("ipv4host")
+        ipv6host = components.pop("ipv6host")
+        components["host"] = ipv4host or ipv6host
+        name = components.pop("name")
+
+        if components["port"]:
+            components["port"] = int(components["port"])
+
+        return URL.create(name, **components)  # type: ignore
+
+    else:
+        raise exc.ArgumentError(
+            "Could not parse SQLAlchemy URL from string '%s'" % name
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py
new file mode 100644
index 00000000..e499efa9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/engine/util.py
@@ -0,0 +1,167 @@
+# engine/util.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from __future__ import annotations
+
+import typing
+from typing import Any
+from typing import Callable
+from typing import Optional
+from typing import TypeVar
+
+from .. import exc
+from .. import util
+from ..util._has_cy import HAS_CYEXTENSION
+from ..util.typing import Protocol
+from ..util.typing import Self
+
+if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
+    from ._py_util import _distill_params_20 as _distill_params_20
+    from ._py_util import _distill_raw_params as _distill_raw_params
+else:
+    from sqlalchemy.cyextension.util import (  # noqa: F401
+        _distill_params_20 as _distill_params_20,
+    )
+    from sqlalchemy.cyextension.util import (  # noqa: F401
+        _distill_raw_params as _distill_raw_params,
+    )
+
+_C = TypeVar("_C", bound=Callable[[], Any])
+
+
+def connection_memoize(key: str) -> Callable[[_C], _C]:
+    """Decorator, memoize a function in a connection.info stash.
+
+    Only applicable to functions which take no arguments other than a
+    connection.  The memo will be stored in ``connection.info[key]``.
+    """
+
+    @util.decorator
+    def decorated(fn, self, connection):  # type: ignore
+        connection = connection.connect()
+        try:
+            return connection.info[key]
+        except KeyError:
+            connection.info[key] = val = fn(self, connection)
+            return val
+
+    return decorated
+
+
+class _TConsSubject(Protocol):
+    _trans_context_manager: Optional[TransactionalContext]
+
+
+class TransactionalContext:
+    """Apply Python context manager behavior to transaction objects.
+
+    Performs validation to ensure the subject of the transaction is not
+    used if the transaction were ended prematurely.
+
+    """
+
+    __slots__ = ("_outer_trans_ctx", "_trans_subject", "__weakref__")
+
+    _trans_subject: Optional[_TConsSubject]
+
+    def _transaction_is_active(self) -> bool:
+        raise NotImplementedError()
+
+    def _transaction_is_closed(self) -> bool:
+        raise NotImplementedError()
+
+    def _rollback_can_be_called(self) -> bool:
+        """indicates the object is in a state that is known to be acceptable
+        for rollback() to be called.
+
+        This does not necessarily mean rollback() will succeed or not raise
+        an error, just that there is currently no state detected that indicates
+        rollback() would fail or emit warnings.
+
+        It also does not mean that there's a transaction in progress, as
+        it is usually safe to call rollback() even if no transaction is
+        present.
+
+        .. versionadded:: 1.4.28
+
+        """
+        raise NotImplementedError()
+
+    def _get_subject(self) -> _TConsSubject:
+        raise NotImplementedError()
+
+    def commit(self) -> None:
+        raise NotImplementedError()
+
+    def rollback(self) -> None:
+        raise NotImplementedError()
+
+    def close(self) -> None:
+        raise NotImplementedError()
+
+    @classmethod
+    def _trans_ctx_check(cls, subject: _TConsSubject) -> None:
+        trans_context = subject._trans_context_manager
+        if trans_context:
+            if not trans_context._transaction_is_active():
+                raise exc.InvalidRequestError(
+                    "Can't operate on closed transaction inside context "
+                    "manager.  Please complete the context manager "
+                    "before emitting further commands."
+                )
+
+    def __enter__(self) -> Self:
+        subject = self._get_subject()
+
+        # none for outer transaction, may be non-None for nested
+        # savepoint, legacy nesting cases
+        trans_context = subject._trans_context_manager
+        self._outer_trans_ctx = trans_context
+
+        self._trans_subject = subject
+        subject._trans_context_manager = self
+        return self
+
+    def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
+        subject = getattr(self, "_trans_subject", None)
+
+        # simplistically we could assume that
+        # "subject._trans_context_manager is self".  However, any calling
+        # code that is manipulating __exit__ directly would break this
+        # assumption.  alembic context manager
+        # is an example of partial use that just calls __exit__ and
+        # not __enter__ at the moment.  it's safe to assume this is being done
+        # in the wild also
+        out_of_band_exit = (
+            subject is None or subject._trans_context_manager is not self
+        )
+
+        if type_ is None and self._transaction_is_active():
+            try:
+                self.commit()
+            except:
+                with util.safe_reraise():
+                    if self._rollback_can_be_called():
+                        self.rollback()
+            finally:
+                if not out_of_band_exit:
+                    assert subject is not None
+                    subject._trans_context_manager = self._outer_trans_ctx
+                self._trans_subject = self._outer_trans_ctx = None
+        else:
+            try:
+                if not self._transaction_is_active():
+                    if not self._transaction_is_closed():
+                        self.close()
+                else:
+                    if self._rollback_can_be_called():
+                        self.rollback()
+            finally:
+                if not out_of_band_exit:
+                    assert subject is not None
+                    subject._trans_context_manager = self._outer_trans_ctx
+                self._trans_subject = self._outer_trans_ctx = None