about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql')
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py104
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py335
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py339
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py3575
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py84
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py225
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py243
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py143
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py81
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py61
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py277
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py180
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py305
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py110
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py136
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py139
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py677
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py571
-rw-r--r--.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py774
19 files changed, 8359 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py
new file mode 100644
index 00000000..9174c544
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/__init__.py
@@ -0,0 +1,104 @@
+# dialects/mysql/__init__.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+from . import aiomysql  # noqa
+from . import asyncmy  # noqa
+from . import base  # noqa
+from . import cymysql  # noqa
+from . import mariadbconnector  # noqa
+from . import mysqlconnector  # noqa
+from . import mysqldb  # noqa
+from . import pymysql  # noqa
+from . import pyodbc  # noqa
+from .base import BIGINT
+from .base import BINARY
+from .base import BIT
+from .base import BLOB
+from .base import BOOLEAN
+from .base import CHAR
+from .base import DATE
+from .base import DATETIME
+from .base import DECIMAL
+from .base import DOUBLE
+from .base import ENUM
+from .base import FLOAT
+from .base import INTEGER
+from .base import JSON
+from .base import LONGBLOB
+from .base import LONGTEXT
+from .base import MEDIUMBLOB
+from .base import MEDIUMINT
+from .base import MEDIUMTEXT
+from .base import NCHAR
+from .base import NUMERIC
+from .base import NVARCHAR
+from .base import REAL
+from .base import SET
+from .base import SMALLINT
+from .base import TEXT
+from .base import TIME
+from .base import TIMESTAMP
+from .base import TINYBLOB
+from .base import TINYINT
+from .base import TINYTEXT
+from .base import VARBINARY
+from .base import VARCHAR
+from .base import YEAR
+from .dml import Insert
+from .dml import insert
+from .expression import match
+from .mariadb import INET4
+from .mariadb import INET6
+
+# default dialect
+base.dialect = dialect = mysqldb.dialect
+
+__all__ = (
+    "BIGINT",
+    "BINARY",
+    "BIT",
+    "BLOB",
+    "BOOLEAN",
+    "CHAR",
+    "DATE",
+    "DATETIME",
+    "DECIMAL",
+    "DOUBLE",
+    "ENUM",
+    "FLOAT",
+    "INET4",
+    "INET6",
+    "INTEGER",
+    "INTEGER",
+    "JSON",
+    "LONGBLOB",
+    "LONGTEXT",
+    "MEDIUMBLOB",
+    "MEDIUMINT",
+    "MEDIUMTEXT",
+    "NCHAR",
+    "NVARCHAR",
+    "NUMERIC",
+    "SET",
+    "SMALLINT",
+    "REAL",
+    "TEXT",
+    "TIME",
+    "TIMESTAMP",
+    "TINYBLOB",
+    "TINYINT",
+    "TINYTEXT",
+    "VARBINARY",
+    "VARCHAR",
+    "YEAR",
+    "dialect",
+    "insert",
+    "Insert",
+    "match",
+)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py
new file mode 100644
index 00000000..bd5e7de6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/aiomysql.py
@@ -0,0 +1,335 @@
+# dialects/mysql/aiomysql.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors <see AUTHORS
+# file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+r"""
+.. dialect:: mysql+aiomysql
+    :name: aiomysql
+    :dbapi: aiomysql
+    :connectstring: mysql+aiomysql://user:password@host:port/dbname[?key=value&key=value...]
+    :url: https://github.com/aio-libs/aiomysql
+
+The aiomysql dialect is SQLAlchemy's second Python asyncio dialect.
+
+Using a special asyncio mediation layer, the aiomysql dialect is usable
+as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
+extension package.
+
+This dialect should normally be used only with the
+:func:`_asyncio.create_async_engine` engine creation function::
+
+    from sqlalchemy.ext.asyncio import create_async_engine
+
+    engine = create_async_engine(
+        "mysql+aiomysql://user:pass@hostname/dbname?charset=utf8mb4"
+    )
+
+"""  # noqa
+from collections import deque
+
+from .pymysql import MySQLDialect_pymysql
+from ... import pool
+from ... import util
+from ...engine import AdaptedConnection
+from ...util.concurrency import asyncio
+from ...util.concurrency import await_fallback
+from ...util.concurrency import await_only
+
+
+class AsyncAdapt_aiomysql_cursor:
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    server_side = False
+    __slots__ = (
+        "_adapt_connection",
+        "_connection",
+        "await_",
+        "_cursor",
+        "_rows",
+    )
+
+    def __init__(self, adapt_connection):
+        self._adapt_connection = adapt_connection
+        self._connection = adapt_connection._connection
+        self.await_ = adapt_connection.await_
+
+        cursor = self._connection.cursor(adapt_connection.dbapi.Cursor)
+
+        # see https://github.com/aio-libs/aiomysql/issues/543
+        self._cursor = self.await_(cursor.__aenter__())
+        self._rows = deque()
+
+    @property
+    def description(self):
+        return self._cursor.description
+
+    @property
+    def rowcount(self):
+        return self._cursor.rowcount
+
+    @property
+    def arraysize(self):
+        return self._cursor.arraysize
+
+    @arraysize.setter
+    def arraysize(self, value):
+        self._cursor.arraysize = value
+
+    @property
+    def lastrowid(self):
+        return self._cursor.lastrowid
+
+    def close(self):
+        # note we aren't actually closing the cursor here,
+        # we are just letting GC do it.   to allow this to be async
+        # we would need the Result to change how it does "Safe close cursor".
+        # MySQL "cursors" don't actually have state to be "closed" besides
+        # exhausting rows, which we already have done for sync cursor.
+        # another option would be to emulate aiosqlite dialect and assign
+        # cursor only if we are doing server side cursor operation.
+        self._rows.clear()
+
+    def execute(self, operation, parameters=None):
+        return self.await_(self._execute_async(operation, parameters))
+
+    def executemany(self, operation, seq_of_parameters):
+        return self.await_(
+            self._executemany_async(operation, seq_of_parameters)
+        )
+
+    async def _execute_async(self, operation, parameters):
+        async with self._adapt_connection._execute_mutex:
+            result = await self._cursor.execute(operation, parameters)
+
+            if not self.server_side:
+                # aiomysql has a "fake" async result, so we have to pull it out
+                # of that here since our default result is not async.
+                # we could just as easily grab "_rows" here and be done with it
+                # but this is safer.
+                self._rows = deque(await self._cursor.fetchall())
+            return result
+
+    async def _executemany_async(self, operation, seq_of_parameters):
+        async with self._adapt_connection._execute_mutex:
+            return await self._cursor.executemany(operation, seq_of_parameters)
+
+    def setinputsizes(self, *inputsizes):
+        pass
+
+    def __iter__(self):
+        while self._rows:
+            yield self._rows.popleft()
+
+    def fetchone(self):
+        if self._rows:
+            return self._rows.popleft()
+        else:
+            return None
+
+    def fetchmany(self, size=None):
+        if size is None:
+            size = self.arraysize
+
+        rr = self._rows
+        return [rr.popleft() for _ in range(min(size, len(rr)))]
+
+    def fetchall(self):
+        retval = list(self._rows)
+        self._rows.clear()
+        return retval
+
+
+class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor):
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    __slots__ = ()
+    server_side = True
+
+    def __init__(self, adapt_connection):
+        self._adapt_connection = adapt_connection
+        self._connection = adapt_connection._connection
+        self.await_ = adapt_connection.await_
+
+        cursor = self._connection.cursor(adapt_connection.dbapi.SSCursor)
+
+        self._cursor = self.await_(cursor.__aenter__())
+
+    def close(self):
+        if self._cursor is not None:
+            self.await_(self._cursor.close())
+            self._cursor = None
+
+    def fetchone(self):
+        return self.await_(self._cursor.fetchone())
+
+    def fetchmany(self, size=None):
+        return self.await_(self._cursor.fetchmany(size=size))
+
+    def fetchall(self):
+        return self.await_(self._cursor.fetchall())
+
+
+class AsyncAdapt_aiomysql_connection(AdaptedConnection):
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    await_ = staticmethod(await_only)
+    __slots__ = ("dbapi", "_execute_mutex")
+
+    def __init__(self, dbapi, connection):
+        self.dbapi = dbapi
+        self._connection = connection
+        self._execute_mutex = asyncio.Lock()
+
+    def ping(self, reconnect):
+        return self.await_(self._connection.ping(reconnect))
+
+    def character_set_name(self):
+        return self._connection.character_set_name()
+
+    def autocommit(self, value):
+        self.await_(self._connection.autocommit(value))
+
+    def cursor(self, server_side=False):
+        if server_side:
+            return AsyncAdapt_aiomysql_ss_cursor(self)
+        else:
+            return AsyncAdapt_aiomysql_cursor(self)
+
+    def rollback(self):
+        self.await_(self._connection.rollback())
+
+    def commit(self):
+        self.await_(self._connection.commit())
+
+    def terminate(self):
+        # it's not awaitable.
+        self._connection.close()
+
+    def close(self) -> None:
+        self.await_(self._connection.ensure_closed())
+
+
+class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection):
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    __slots__ = ()
+
+    await_ = staticmethod(await_fallback)
+
+
+class AsyncAdapt_aiomysql_dbapi:
+    def __init__(self, aiomysql, pymysql):
+        self.aiomysql = aiomysql
+        self.pymysql = pymysql
+        self.paramstyle = "format"
+        self._init_dbapi_attributes()
+        self.Cursor, self.SSCursor = self._init_cursors_subclasses()
+
+    def _init_dbapi_attributes(self):
+        for name in (
+            "Warning",
+            "Error",
+            "InterfaceError",
+            "DataError",
+            "DatabaseError",
+            "OperationalError",
+            "InterfaceError",
+            "IntegrityError",
+            "ProgrammingError",
+            "InternalError",
+            "NotSupportedError",
+        ):
+            setattr(self, name, getattr(self.aiomysql, name))
+
+        for name in (
+            "NUMBER",
+            "STRING",
+            "DATETIME",
+            "BINARY",
+            "TIMESTAMP",
+            "Binary",
+        ):
+            setattr(self, name, getattr(self.pymysql, name))
+
+    def connect(self, *arg, **kw):
+        async_fallback = kw.pop("async_fallback", False)
+        creator_fn = kw.pop("async_creator_fn", self.aiomysql.connect)
+
+        if util.asbool(async_fallback):
+            return AsyncAdaptFallback_aiomysql_connection(
+                self,
+                await_fallback(creator_fn(*arg, **kw)),
+            )
+        else:
+            return AsyncAdapt_aiomysql_connection(
+                self,
+                await_only(creator_fn(*arg, **kw)),
+            )
+
+    def _init_cursors_subclasses(self):
+        # suppress unconditional warning emitted by aiomysql
+        class Cursor(self.aiomysql.Cursor):
+            async def _show_warnings(self, conn):
+                pass
+
+        class SSCursor(self.aiomysql.SSCursor):
+            async def _show_warnings(self, conn):
+                pass
+
+        return Cursor, SSCursor
+
+
+class MySQLDialect_aiomysql(MySQLDialect_pymysql):
+    driver = "aiomysql"
+    supports_statement_cache = True
+
+    supports_server_side_cursors = True
+    _sscursor = AsyncAdapt_aiomysql_ss_cursor
+
+    is_async = True
+    has_terminate = True
+
+    @classmethod
+    def import_dbapi(cls):
+        return AsyncAdapt_aiomysql_dbapi(
+            __import__("aiomysql"), __import__("pymysql")
+        )
+
+    @classmethod
+    def get_pool_class(cls, url):
+        async_fallback = url.query.get("async_fallback", False)
+
+        if util.asbool(async_fallback):
+            return pool.FallbackAsyncAdaptedQueuePool
+        else:
+            return pool.AsyncAdaptedQueuePool
+
+    def do_terminate(self, dbapi_connection) -> None:
+        dbapi_connection.terminate()
+
+    def create_connect_args(self, url):
+        return super().create_connect_args(
+            url, _translate_args=dict(username="user", database="db")
+        )
+
+    def is_disconnect(self, e, connection, cursor):
+        if super().is_disconnect(e, connection, cursor):
+            return True
+        else:
+            str_e = str(e).lower()
+            return "not connected" in str_e
+
+    def _found_rows_client_flag(self):
+        from pymysql.constants import CLIENT
+
+        return CLIENT.FOUND_ROWS
+
+    def get_driver_connection(self, connection):
+        return connection._connection
+
+
+dialect = MySQLDialect_aiomysql
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py
new file mode 100644
index 00000000..9ec54e69
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/asyncmy.py
@@ -0,0 +1,339 @@
+# dialects/mysql/asyncmy.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors <see AUTHORS
+# file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+r"""
+.. dialect:: mysql+asyncmy
+    :name: asyncmy
+    :dbapi: asyncmy
+    :connectstring: mysql+asyncmy://user:password@host:port/dbname[?key=value&key=value...]
+    :url: https://github.com/long2ice/asyncmy
+
+Using a special asyncio mediation layer, the asyncmy dialect is usable
+as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
+extension package.
+
+This dialect should normally be used only with the
+:func:`_asyncio.create_async_engine` engine creation function::
+
+    from sqlalchemy.ext.asyncio import create_async_engine
+
+    engine = create_async_engine(
+        "mysql+asyncmy://user:pass@hostname/dbname?charset=utf8mb4"
+    )
+
+"""  # noqa
+from collections import deque
+from contextlib import asynccontextmanager
+
+from .pymysql import MySQLDialect_pymysql
+from ... import pool
+from ... import util
+from ...engine import AdaptedConnection
+from ...util.concurrency import asyncio
+from ...util.concurrency import await_fallback
+from ...util.concurrency import await_only
+
+
+class AsyncAdapt_asyncmy_cursor:
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    server_side = False
+    __slots__ = (
+        "_adapt_connection",
+        "_connection",
+        "await_",
+        "_cursor",
+        "_rows",
+    )
+
+    def __init__(self, adapt_connection):
+        self._adapt_connection = adapt_connection
+        self._connection = adapt_connection._connection
+        self.await_ = adapt_connection.await_
+
+        cursor = self._connection.cursor()
+
+        self._cursor = self.await_(cursor.__aenter__())
+        self._rows = deque()
+
+    @property
+    def description(self):
+        return self._cursor.description
+
+    @property
+    def rowcount(self):
+        return self._cursor.rowcount
+
+    @property
+    def arraysize(self):
+        return self._cursor.arraysize
+
+    @arraysize.setter
+    def arraysize(self, value):
+        self._cursor.arraysize = value
+
+    @property
+    def lastrowid(self):
+        return self._cursor.lastrowid
+
+    def close(self):
+        # note we aren't actually closing the cursor here,
+        # we are just letting GC do it.   to allow this to be async
+        # we would need the Result to change how it does "Safe close cursor".
+        # MySQL "cursors" don't actually have state to be "closed" besides
+        # exhausting rows, which we already have done for sync cursor.
+        # another option would be to emulate aiosqlite dialect and assign
+        # cursor only if we are doing server side cursor operation.
+        self._rows.clear()
+
+    def execute(self, operation, parameters=None):
+        return self.await_(self._execute_async(operation, parameters))
+
+    def executemany(self, operation, seq_of_parameters):
+        return self.await_(
+            self._executemany_async(operation, seq_of_parameters)
+        )
+
+    async def _execute_async(self, operation, parameters):
+        async with self._adapt_connection._mutex_and_adapt_errors():
+            if parameters is None:
+                result = await self._cursor.execute(operation)
+            else:
+                result = await self._cursor.execute(operation, parameters)
+
+            if not self.server_side:
+                # asyncmy has a "fake" async result, so we have to pull it out
+                # of that here since our default result is not async.
+                # we could just as easily grab "_rows" here and be done with it
+                # but this is safer.
+                self._rows = deque(await self._cursor.fetchall())
+            return result
+
+    async def _executemany_async(self, operation, seq_of_parameters):
+        async with self._adapt_connection._mutex_and_adapt_errors():
+            return await self._cursor.executemany(operation, seq_of_parameters)
+
+    def setinputsizes(self, *inputsizes):
+        pass
+
+    def __iter__(self):
+        while self._rows:
+            yield self._rows.popleft()
+
+    def fetchone(self):
+        if self._rows:
+            return self._rows.popleft()
+        else:
+            return None
+
+    def fetchmany(self, size=None):
+        if size is None:
+            size = self.arraysize
+
+        rr = self._rows
+        return [rr.popleft() for _ in range(min(size, len(rr)))]
+
+    def fetchall(self):
+        retval = list(self._rows)
+        self._rows.clear()
+        return retval
+
+
+class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor):
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    __slots__ = ()
+    server_side = True
+
+    def __init__(self, adapt_connection):
+        self._adapt_connection = adapt_connection
+        self._connection = adapt_connection._connection
+        self.await_ = adapt_connection.await_
+
+        cursor = self._connection.cursor(
+            adapt_connection.dbapi.asyncmy.cursors.SSCursor
+        )
+
+        self._cursor = self.await_(cursor.__aenter__())
+
+    def close(self):
+        if self._cursor is not None:
+            self.await_(self._cursor.close())
+            self._cursor = None
+
+    def fetchone(self):
+        return self.await_(self._cursor.fetchone())
+
+    def fetchmany(self, size=None):
+        return self.await_(self._cursor.fetchmany(size=size))
+
+    def fetchall(self):
+        return self.await_(self._cursor.fetchall())
+
+
+class AsyncAdapt_asyncmy_connection(AdaptedConnection):
+    # TODO: base on connectors/asyncio.py
+    # see #10415
+    await_ = staticmethod(await_only)
+    __slots__ = ("dbapi", "_execute_mutex")
+
+    def __init__(self, dbapi, connection):
+        self.dbapi = dbapi
+        self._connection = connection
+        self._execute_mutex = asyncio.Lock()
+
+    @asynccontextmanager
+    async def _mutex_and_adapt_errors(self):
+        async with self._execute_mutex:
+            try:
+                yield
+            except AttributeError:
+                raise self.dbapi.InternalError(
+                    "network operation failed due to asyncmy attribute error"
+                )
+
+    def ping(self, reconnect):
+        assert not reconnect
+        return self.await_(self._do_ping())
+
+    async def _do_ping(self):
+        async with self._mutex_and_adapt_errors():
+            return await self._connection.ping(False)
+
+    def character_set_name(self):
+        return self._connection.character_set_name()
+
+    def autocommit(self, value):
+        self.await_(self._connection.autocommit(value))
+
+    def cursor(self, server_side=False):
+        if server_side:
+            return AsyncAdapt_asyncmy_ss_cursor(self)
+        else:
+            return AsyncAdapt_asyncmy_cursor(self)
+
+    def rollback(self):
+        self.await_(self._connection.rollback())
+
+    def commit(self):
+        self.await_(self._connection.commit())
+
+    def terminate(self):
+        # it's not awaitable.
+        self._connection.close()
+
+    def close(self) -> None:
+        self.await_(self._connection.ensure_closed())
+
+
+class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection):
+    __slots__ = ()
+
+    await_ = staticmethod(await_fallback)
+
+
+def _Binary(x):
+    """Return x as a binary type."""
+    return bytes(x)
+
+
+class AsyncAdapt_asyncmy_dbapi:
+    def __init__(self, asyncmy):
+        self.asyncmy = asyncmy
+        self.paramstyle = "format"
+        self._init_dbapi_attributes()
+
+    def _init_dbapi_attributes(self):
+        for name in (
+            "Warning",
+            "Error",
+            "InterfaceError",
+            "DataError",
+            "DatabaseError",
+            "OperationalError",
+            "InterfaceError",
+            "IntegrityError",
+            "ProgrammingError",
+            "InternalError",
+            "NotSupportedError",
+        ):
+            setattr(self, name, getattr(self.asyncmy.errors, name))
+
+    STRING = util.symbol("STRING")
+    NUMBER = util.symbol("NUMBER")
+    BINARY = util.symbol("BINARY")
+    DATETIME = util.symbol("DATETIME")
+    TIMESTAMP = util.symbol("TIMESTAMP")
+    Binary = staticmethod(_Binary)
+
+    def connect(self, *arg, **kw):
+        async_fallback = kw.pop("async_fallback", False)
+        creator_fn = kw.pop("async_creator_fn", self.asyncmy.connect)
+
+        if util.asbool(async_fallback):
+            return AsyncAdaptFallback_asyncmy_connection(
+                self,
+                await_fallback(creator_fn(*arg, **kw)),
+            )
+        else:
+            return AsyncAdapt_asyncmy_connection(
+                self,
+                await_only(creator_fn(*arg, **kw)),
+            )
+
+
+class MySQLDialect_asyncmy(MySQLDialect_pymysql):
+    driver = "asyncmy"
+    supports_statement_cache = True
+
+    supports_server_side_cursors = True
+    _sscursor = AsyncAdapt_asyncmy_ss_cursor
+
+    is_async = True
+    has_terminate = True
+
+    @classmethod
+    def import_dbapi(cls):
+        return AsyncAdapt_asyncmy_dbapi(__import__("asyncmy"))
+
+    @classmethod
+    def get_pool_class(cls, url):
+        async_fallback = url.query.get("async_fallback", False)
+
+        if util.asbool(async_fallback):
+            return pool.FallbackAsyncAdaptedQueuePool
+        else:
+            return pool.AsyncAdaptedQueuePool
+
+    def do_terminate(self, dbapi_connection) -> None:
+        dbapi_connection.terminate()
+
+    def create_connect_args(self, url):
+        return super().create_connect_args(
+            url, _translate_args=dict(username="user", database="db")
+        )
+
+    def is_disconnect(self, e, connection, cursor):
+        if super().is_disconnect(e, connection, cursor):
+            return True
+        else:
+            str_e = str(e).lower()
+            return (
+                "not connected" in str_e or "network operation failed" in str_e
+            )
+
+    def _found_rows_client_flag(self):
+        from asyncmy.constants import CLIENT
+
+        return CLIENT.FOUND_ROWS
+
+    def get_driver_connection(self, connection):
+        return connection._connection
+
+
+dialect = MySQLDialect_asyncmy
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py
new file mode 100644
index 00000000..4a52d1b6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/base.py
@@ -0,0 +1,3575 @@
+# dialects/mysql/base.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+r"""
+
+.. dialect:: mysql
+    :name: MySQL / MariaDB
+    :normal_support: 5.6+ / 10+
+    :best_effort: 5.0.2+ / 5.0.2+
+
+Supported Versions and Features
+-------------------------------
+
+SQLAlchemy supports MySQL starting with version 5.0.2 through modern releases,
+as well as all modern versions of MariaDB.   See the official MySQL
+documentation for detailed information about features supported in any given
+server release.
+
+.. versionchanged:: 1.4  minimum MySQL version supported is now 5.0.2.
+
+MariaDB Support
+~~~~~~~~~~~~~~~
+
+The MariaDB variant of MySQL retains fundamental compatibility with MySQL's
+protocols however the development of these two products continues to diverge.
+Within the realm of SQLAlchemy, the two databases have a small number of
+syntactical and behavioral differences that SQLAlchemy accommodates automatically.
+To connect to a MariaDB database, no changes to the database URL are required::
+
+
+    engine = create_engine(
+        "mysql+pymysql://user:pass@some_mariadb/dbname?charset=utf8mb4"
+    )
+
+Upon first connect, the SQLAlchemy dialect employs a
+server version detection scheme that determines if the
+backing database reports as MariaDB.  Based on this flag, the dialect
+can make different choices in those of areas where its behavior
+must be different.
+
+.. _mysql_mariadb_only_mode:
+
+MariaDB-Only Mode
+~~~~~~~~~~~~~~~~~
+
+The dialect also supports an **optional** "MariaDB-only" mode of connection, which may be
+useful for the case where an application makes use of MariaDB-specific features
+and is not compatible with a MySQL database.    To use this mode of operation,
+replace the "mysql" token in the above URL with "mariadb"::
+
+    engine = create_engine(
+        "mariadb+pymysql://user:pass@some_mariadb/dbname?charset=utf8mb4"
+    )
+
+The above engine, upon first connect, will raise an error if the server version
+detection detects that the backing database is not MariaDB.
+
+When using an engine with ``"mariadb"`` as the dialect name, **all mysql-specific options
+that include the name "mysql" in them are now named with "mariadb"**.  This means
+options like ``mysql_engine`` should be named ``mariadb_engine``, etc.  Both
+"mysql" and "mariadb" options can be used simultaneously for applications that
+use URLs with both "mysql" and "mariadb" dialects::
+
+    my_table = Table(
+        "mytable",
+        metadata,
+        Column("id", Integer, primary_key=True),
+        Column("textdata", String(50)),
+        mariadb_engine="InnoDB",
+        mysql_engine="InnoDB",
+    )
+
+    Index(
+        "textdata_ix",
+        my_table.c.textdata,
+        mysql_prefix="FULLTEXT",
+        mariadb_prefix="FULLTEXT",
+    )
+
+Similar behavior will occur when the above structures are reflected, i.e. the
+"mariadb" prefix will be present in the option names when the database URL
+is based on the "mariadb" name.
+
+.. versionadded:: 1.4 Added "mariadb" dialect name supporting "MariaDB-only mode"
+   for the MySQL dialect.
+
+.. _mysql_connection_timeouts:
+
+Connection Timeouts and Disconnects
+-----------------------------------
+
+MySQL / MariaDB feature an automatic connection close behavior, for connections that
+have been idle for a fixed period of time, defaulting to eight hours.
+To circumvent having this issue, use
+the :paramref:`_sa.create_engine.pool_recycle` option which ensures that
+a connection will be discarded and replaced with a new one if it has been
+present in the pool for a fixed number of seconds::
+
+    engine = create_engine("mysql+mysqldb://...", pool_recycle=3600)
+
+For more comprehensive disconnect detection of pooled connections, including
+accommodation of  server restarts and network issues, a pre-ping approach may
+be employed.  See :ref:`pool_disconnects` for current approaches.
+
+.. seealso::
+
+    :ref:`pool_disconnects` - Background on several techniques for dealing
+    with timed out connections as well as database restarts.
+
+.. _mysql_storage_engines:
+
+CREATE TABLE arguments including Storage Engines
+------------------------------------------------
+
+Both MySQL's and MariaDB's CREATE TABLE syntax includes a wide array of special options,
+including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``,
+``INSERT_METHOD``, and many more.
+To accommodate the rendering of these arguments, specify the form
+``mysql_argument_name="value"``.  For example, to specify a table with
+``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8mb4``, and ``KEY_BLOCK_SIZE``
+of ``1024``::
+
+  Table(
+      "mytable",
+      metadata,
+      Column("data", String(32)),
+      mysql_engine="InnoDB",
+      mysql_charset="utf8mb4",
+      mysql_key_block_size="1024",
+  )
+
+When supporting :ref:`mysql_mariadb_only_mode` mode, similar keys against
+the "mariadb" prefix must be included as well.  The values can of course
+vary independently so that different settings on MySQL vs. MariaDB may
+be maintained::
+
+  # support both "mysql" and "mariadb-only" engine URLs
+
+  Table(
+      "mytable",
+      metadata,
+      Column("data", String(32)),
+      mysql_engine="InnoDB",
+      mariadb_engine="InnoDB",
+      mysql_charset="utf8mb4",
+      mariadb_charset="utf8",
+      mysql_key_block_size="1024",
+      mariadb_key_block_size="1024",
+  )
+
+The MySQL / MariaDB dialects will normally transfer any keyword specified as
+``mysql_keyword_name`` to be rendered as ``KEYWORD_NAME`` in the
+``CREATE TABLE`` statement.  A handful of these names will render with a space
+instead of an underscore; to support this, the MySQL dialect has awareness of
+these particular names, which include ``DATA DIRECTORY``
+(e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g.
+``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g.
+``mysql_index_directory``).
+
+The most common argument is ``mysql_engine``, which refers to the storage
+engine for the table.  Historically, MySQL server installations would default
+to ``MyISAM`` for this value, although newer versions may be defaulting
+to ``InnoDB``.  The ``InnoDB`` engine is typically preferred for its support
+of transactions and foreign keys.
+
+A :class:`_schema.Table`
+that is created in a MySQL / MariaDB database with a storage engine
+of ``MyISAM`` will be essentially non-transactional, meaning any
+INSERT/UPDATE/DELETE statement referring to this table will be invoked as
+autocommit.   It also will have no support for foreign key constraints; while
+the ``CREATE TABLE`` statement accepts foreign key options, when using the
+``MyISAM`` storage engine these arguments are discarded.  Reflecting such a
+table will also produce no foreign key constraint information.
+
+For fully atomic transactions as well as support for foreign key
+constraints, all participating ``CREATE TABLE`` statements must specify a
+transactional engine, which in the vast majority of cases is ``InnoDB``.
+
+Partitioning can similarly be specified using similar options.
+In the example below the create table will specify ``PARTITION_BY``,
+``PARTITIONS``, ``SUBPARTITIONS`` and ``SUBPARTITION_BY``::
+
+    # can also use mariadb_* prefix
+    Table(
+        "testtable",
+        MetaData(),
+        Column("id", Integer(), primary_key=True, autoincrement=True),
+        Column("other_id", Integer(), primary_key=True, autoincrement=False),
+        mysql_partitions="2",
+        mysql_partition_by="KEY(other_id)",
+        mysql_subpartition_by="HASH(some_expr)",
+        mysql_subpartitions="2",
+    )
+
+This will render:
+
+.. sourcecode:: sql
+
+    CREATE TABLE testtable (
+            id INTEGER NOT NULL AUTO_INCREMENT,
+            other_id INTEGER NOT NULL,
+            PRIMARY KEY (id, other_id)
+    )PARTITION BY KEY(other_id) PARTITIONS 2 SUBPARTITION BY HASH(some_expr) SUBPARTITIONS 2
+
+Case Sensitivity and Table Reflection
+-------------------------------------
+
+Both MySQL and MariaDB have inconsistent support for case-sensitive identifier
+names, basing support on specific details of the underlying
+operating system. However, it has been observed that no matter
+what case sensitivity behavior is present, the names of tables in
+foreign key declarations are *always* received from the database
+as all-lower case, making it impossible to accurately reflect a
+schema where inter-related tables use mixed-case identifier names.
+
+Therefore it is strongly advised that table names be declared as
+all lower case both within SQLAlchemy as well as on the MySQL / MariaDB
+database itself, especially if database reflection features are
+to be used.
+
+.. _mysql_isolation_level:
+
+Transaction Isolation Level
+---------------------------
+
+All MySQL / MariaDB dialects support setting of transaction isolation level both via a
+dialect-specific parameter :paramref:`_sa.create_engine.isolation_level`
+accepted
+by :func:`_sa.create_engine`, as well as the
+:paramref:`.Connection.execution_options.isolation_level` argument as passed to
+:meth:`_engine.Connection.execution_options`.
+This feature works by issuing the
+command ``SET SESSION TRANSACTION ISOLATION LEVEL <level>`` for each new
+connection.  For the special AUTOCOMMIT isolation level, DBAPI-specific
+techniques are used.
+
+To set isolation level using :func:`_sa.create_engine`::
+
+    engine = create_engine(
+        "mysql+mysqldb://scott:tiger@localhost/test",
+        isolation_level="READ UNCOMMITTED",
+    )
+
+To set using per-connection execution options::
+
+    connection = engine.connect()
+    connection = connection.execution_options(isolation_level="READ COMMITTED")
+
+Valid values for ``isolation_level`` include:
+
+* ``READ COMMITTED``
+* ``READ UNCOMMITTED``
+* ``REPEATABLE READ``
+* ``SERIALIZABLE``
+* ``AUTOCOMMIT``
+
+The special ``AUTOCOMMIT`` value makes use of the various "autocommit"
+attributes provided by specific DBAPIs, and is currently supported by
+MySQLdb, MySQL-Client, MySQL-Connector Python, and PyMySQL.   Using it,
+the database connection will return true for the value of
+``SELECT @@autocommit;``.
+
+There are also more options for isolation level configurations, such as
+"sub-engine" objects linked to a main :class:`_engine.Engine` which each apply
+different isolation level settings.  See the discussion at
+:ref:`dbapi_autocommit` for background.
+
+.. seealso::
+
+    :ref:`dbapi_autocommit`
+
+AUTO_INCREMENT Behavior
+-----------------------
+
+When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on
+the first :class:`.Integer` primary key column which is not marked as a
+foreign key::
+
+  >>> t = Table(
+  ...     "mytable", metadata, Column("mytable_id", Integer, primary_key=True)
+  ... )
+  >>> t.create()
+  CREATE TABLE mytable (
+          id INTEGER NOT NULL AUTO_INCREMENT,
+          PRIMARY KEY (id)
+  )
+
+You can disable this behavior by passing ``False`` to the
+:paramref:`_schema.Column.autoincrement` argument of :class:`_schema.Column`.
+This flag
+can also be used to enable auto-increment on a secondary column in a
+multi-column key for some storage engines::
+
+  Table(
+      "mytable",
+      metadata,
+      Column("gid", Integer, primary_key=True, autoincrement=False),
+      Column("id", Integer, primary_key=True),
+  )
+
+.. _mysql_ss_cursors:
+
+Server Side Cursors
+-------------------
+
+Server-side cursor support is available for the mysqlclient, PyMySQL,
+mariadbconnector dialects and may also be available in others.   This makes use
+of either the "buffered=True/False" flag if available or by using a class such
+as ``MySQLdb.cursors.SSCursor`` or ``pymysql.cursors.SSCursor`` internally.
+
+
+Server side cursors are enabled on a per-statement basis by using the
+:paramref:`.Connection.execution_options.stream_results` connection execution
+option::
+
+    with engine.connect() as conn:
+        result = conn.execution_options(stream_results=True).execute(
+            text("select * from table")
+        )
+
+Note that some kinds of SQL statements may not be supported with
+server side cursors; generally, only SQL statements that return rows should be
+used with this option.
+
+.. deprecated:: 1.4  The dialect-level server_side_cursors flag is deprecated
+   and will be removed in a future release.  Please use the
+   :paramref:`_engine.Connection.stream_results` execution option for
+   unbuffered cursor support.
+
+.. seealso::
+
+    :ref:`engine_stream_results`
+
+.. _mysql_unicode:
+
+Unicode
+-------
+
+Charset Selection
+~~~~~~~~~~~~~~~~~
+
+Most MySQL / MariaDB DBAPIs offer the option to set the client character set for
+a connection.   This is typically delivered using the ``charset`` parameter
+in the URL, such as::
+
+    e = create_engine(
+        "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4"
+    )
+
+This charset is the **client character set** for the connection.  Some
+MySQL DBAPIs will default this to a value such as ``latin1``, and some
+will make use of the ``default-character-set`` setting in the ``my.cnf``
+file as well.   Documentation for the DBAPI in use should be consulted
+for specific behavior.
+
+The encoding used for Unicode has traditionally been ``'utf8'``.  However, for
+MySQL versions 5.5.3 and MariaDB 5.5 on forward, a new MySQL-specific encoding
+``'utf8mb4'`` has been introduced, and as of MySQL 8.0 a warning is emitted by
+the server if plain ``utf8`` is specified within any server-side directives,
+replaced with ``utf8mb3``.  The rationale for this new encoding is due to the
+fact that MySQL's legacy utf-8 encoding only supports codepoints up to three
+bytes instead of four.  Therefore, when communicating with a MySQL or MariaDB
+database that includes codepoints more than three bytes in size, this new
+charset is preferred, if supported by both the database as well as the client
+DBAPI, as in::
+
+    e = create_engine(
+        "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4"
+    )
+
+All modern DBAPIs should support the ``utf8mb4`` charset.
+
+In order to use ``utf8mb4`` encoding for a schema that was created with  legacy
+``utf8``, changes to the MySQL/MariaDB schema and/or server configuration may be
+required.
+
+.. seealso::
+
+    `The utf8mb4 Character Set \
+    <https://dev.mysql.com/doc/refman/5.5/en/charset-unicode-utf8mb4.html>`_ - \
+    in the MySQL documentation
+
+.. _mysql_binary_introducer:
+
+Dealing with Binary Data Warnings and Unicode
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+MySQL versions 5.6, 5.7 and later (not MariaDB at the time of this writing) now
+emit a warning when attempting to pass binary data to the database, while a
+character set encoding is also in place, when the binary data itself is not
+valid for that encoding:
+
+.. sourcecode:: text
+
+    default.py:509: Warning: (1300, "Invalid utf8mb4 character string:
+    'F9876A'")
+      cursor.execute(statement, parameters)
+
+This warning is due to the fact that the MySQL client library is attempting to
+interpret the binary string as a unicode object even if a datatype such
+as :class:`.LargeBinary` is in use.   To resolve this, the SQL statement requires
+a binary "character set introducer" be present before any non-NULL value
+that renders like this:
+
+.. sourcecode:: sql
+
+    INSERT INTO table (data) VALUES (_binary %s)
+
+These character set introducers are provided by the DBAPI driver, assuming the
+use of mysqlclient or PyMySQL (both of which are recommended).  Add the query
+string parameter ``binary_prefix=true`` to the URL to repair this warning::
+
+    # mysqlclient
+    engine = create_engine(
+        "mysql+mysqldb://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true"
+    )
+
+    # PyMySQL
+    engine = create_engine(
+        "mysql+pymysql://scott:tiger@localhost/test?charset=utf8mb4&binary_prefix=true"
+    )
+
+The ``binary_prefix`` flag may or may not be supported by other MySQL drivers.
+
+SQLAlchemy itself cannot render this ``_binary`` prefix reliably, as it does
+not work with the NULL value, which is valid to be sent as a bound parameter.
+As the MySQL driver renders parameters directly into the SQL string, it's the
+most efficient place for this additional keyword to be passed.
+
+.. seealso::
+
+    `Character set introducers <https://dev.mysql.com/doc/refman/5.7/en/charset-introducer.html>`_ - on the MySQL website
+
+
+ANSI Quoting Style
+------------------
+
+MySQL / MariaDB feature two varieties of identifier "quoting style", one using
+backticks and the other using quotes, e.g. ```some_identifier```  vs.
+``"some_identifier"``.   All MySQL dialects detect which version
+is in use by checking the value of :ref:`sql_mode<mysql_sql_mode>` when a connection is first
+established with a particular :class:`_engine.Engine`.
+This quoting style comes
+into play when rendering table and column names as well as when reflecting
+existing database structures.  The detection is entirely automatic and
+no special configuration is needed to use either quoting style.
+
+
+.. _mysql_sql_mode:
+
+Changing the sql_mode
+---------------------
+
+MySQL supports operating in multiple
+`Server SQL Modes <https://dev.mysql.com/doc/refman/8.0/en/sql-mode.html>`_  for
+both Servers and Clients. To change the ``sql_mode`` for a given application, a
+developer can leverage SQLAlchemy's Events system.
+
+In the following example, the event system is used to set the ``sql_mode`` on
+the ``first_connect`` and ``connect`` events::
+
+    from sqlalchemy import create_engine, event
+
+    eng = create_engine(
+        "mysql+mysqldb://scott:tiger@localhost/test", echo="debug"
+    )
+
+
+    # `insert=True` will ensure this is the very first listener to run
+    @event.listens_for(eng, "connect", insert=True)
+    def connect(dbapi_connection, connection_record):
+        cursor = dbapi_connection.cursor()
+        cursor.execute("SET sql_mode = 'STRICT_ALL_TABLES'")
+
+
+    conn = eng.connect()
+
+In the example illustrated above, the "connect" event will invoke the "SET"
+statement on the connection at the moment a particular DBAPI connection is
+first created for a given Pool, before the connection is made available to the
+connection pool.  Additionally, because the function was registered with
+``insert=True``, it will be prepended to the internal list of registered
+functions.
+
+
+MySQL / MariaDB SQL Extensions
+------------------------------
+
+Many of the MySQL / MariaDB SQL extensions are handled through SQLAlchemy's generic
+function and operator support::
+
+  table.select(table.c.password == func.md5("plaintext"))
+  table.select(table.c.username.op("regexp")("^[a-d]"))
+
+And of course any valid SQL statement can be executed as a string as well.
+
+Some limited direct support for MySQL / MariaDB extensions to SQL is currently
+available.
+
+* INSERT..ON DUPLICATE KEY UPDATE:  See
+  :ref:`mysql_insert_on_duplicate_key_update`
+
+* SELECT pragma, use :meth:`_expression.Select.prefix_with` and
+  :meth:`_query.Query.prefix_with`::
+
+    select(...).prefix_with(["HIGH_PRIORITY", "SQL_SMALL_RESULT"])
+
+* UPDATE with LIMIT::
+
+    update(...).with_dialect_options(mysql_limit=10, mariadb_limit=10)
+
+* DELETE
+  with LIMIT::
+
+    delete(...).with_dialect_options(mysql_limit=10, mariadb_limit=10)
+
+  .. versionadded:: 2.0.37 Added delete with limit
+
+* optimizer hints, use :meth:`_expression.Select.prefix_with` and
+  :meth:`_query.Query.prefix_with`::
+
+    select(...).prefix_with("/*+ NO_RANGE_OPTIMIZATION(t4 PRIMARY) */")
+
+* index hints, use :meth:`_expression.Select.with_hint` and
+  :meth:`_query.Query.with_hint`::
+
+    select(...).with_hint(some_table, "USE INDEX xyz")
+
+* MATCH
+  operator support::
+
+        from sqlalchemy.dialects.mysql import match
+
+        select(...).where(match(col1, col2, against="some expr").in_boolean_mode())
+
+  .. seealso::
+
+    :class:`_mysql.match`
+
+INSERT/DELETE...RETURNING
+-------------------------
+
+The MariaDB dialect supports 10.5+'s ``INSERT..RETURNING`` and
+``DELETE..RETURNING`` (10.0+) syntaxes.   ``INSERT..RETURNING`` may be used
+automatically in some cases in order to fetch newly generated identifiers in
+place of the traditional approach of using ``cursor.lastrowid``, however
+``cursor.lastrowid`` is currently still preferred for simple single-statement
+cases for its better performance.
+
+To specify an explicit ``RETURNING`` clause, use the
+:meth:`._UpdateBase.returning` method on a per-statement basis::
+
+    # INSERT..RETURNING
+    result = connection.execute(
+        table.insert().values(name="foo").returning(table.c.col1, table.c.col2)
+    )
+    print(result.all())
+
+    # DELETE..RETURNING
+    result = connection.execute(
+        table.delete()
+        .where(table.c.name == "foo")
+        .returning(table.c.col1, table.c.col2)
+    )
+    print(result.all())
+
+.. versionadded:: 2.0  Added support for MariaDB RETURNING
+
+.. _mysql_insert_on_duplicate_key_update:
+
+INSERT...ON DUPLICATE KEY UPDATE (Upsert)
+------------------------------------------
+
+MySQL / MariaDB allow "upserts" (update or insert)
+of rows into a table via the ``ON DUPLICATE KEY UPDATE`` clause of the
+``INSERT`` statement.  A candidate row will only be inserted if that row does
+not match an existing primary or unique key in the table; otherwise, an UPDATE
+will be performed.   The statement allows for separate specification of the
+values to INSERT versus the values for UPDATE.
+
+SQLAlchemy provides ``ON DUPLICATE KEY UPDATE`` support via the MySQL-specific
+:func:`.mysql.insert()` function, which provides
+the generative method :meth:`~.mysql.Insert.on_duplicate_key_update`:
+
+.. sourcecode:: pycon+sql
+
+    >>> from sqlalchemy.dialects.mysql import insert
+
+    >>> insert_stmt = insert(my_table).values(
+    ...     id="some_existing_id", data="inserted value"
+    ... )
+
+    >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
+    ...     data=insert_stmt.inserted.data, status="U"
+    ... )
+    >>> print(on_duplicate_key_stmt)
+    {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s)
+    ON DUPLICATE KEY UPDATE data = VALUES(data), status = %s
+
+
+Unlike PostgreSQL's "ON CONFLICT" phrase, the "ON DUPLICATE KEY UPDATE"
+phrase will always match on any primary key or unique key, and will always
+perform an UPDATE if there's a match; there are no options for it to raise
+an error or to skip performing an UPDATE.
+
+``ON DUPLICATE KEY UPDATE`` is used to perform an update of the already
+existing row, using any combination of new values as well as values
+from the proposed insertion.   These values are normally specified using
+keyword arguments passed to the
+:meth:`_mysql.Insert.on_duplicate_key_update`
+given column key values (usually the name of the column, unless it
+specifies :paramref:`_schema.Column.key`
+) as keys and literal or SQL expressions
+as values:
+
+.. sourcecode:: pycon+sql
+
+    >>> insert_stmt = insert(my_table).values(
+    ...     id="some_existing_id", data="inserted value"
+    ... )
+
+    >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
+    ...     data="some data",
+    ...     updated_at=func.current_timestamp(),
+    ... )
+
+    >>> print(on_duplicate_key_stmt)
+    {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s)
+    ON DUPLICATE KEY UPDATE data = %s, updated_at = CURRENT_TIMESTAMP
+
+In a manner similar to that of :meth:`.UpdateBase.values`, other parameter
+forms are accepted, including a single dictionary:
+
+.. sourcecode:: pycon+sql
+
+    >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
+    ...     {"data": "some data", "updated_at": func.current_timestamp()},
+    ... )
+
+as well as a list of 2-tuples, which will automatically provide
+a parameter-ordered UPDATE statement in a manner similar to that described
+at :ref:`tutorial_parameter_ordered_updates`.  Unlike the :class:`_expression.Update`
+object,
+no special flag is needed to specify the intent since the argument form is
+this context is unambiguous:
+
+.. sourcecode:: pycon+sql
+
+    >>> on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
+    ...     [
+    ...         ("data", "some data"),
+    ...         ("updated_at", func.current_timestamp()),
+    ...     ]
+    ... )
+
+    >>> print(on_duplicate_key_stmt)
+    {printsql}INSERT INTO my_table (id, data) VALUES (%s, %s)
+    ON DUPLICATE KEY UPDATE data = %s, updated_at = CURRENT_TIMESTAMP
+
+.. versionchanged:: 1.3 support for parameter-ordered UPDATE clause within
+   MySQL ON DUPLICATE KEY UPDATE
+
+.. warning::
+
+    The :meth:`_mysql.Insert.on_duplicate_key_update`
+    method does **not** take into
+    account Python-side default UPDATE values or generation functions, e.g.
+    e.g. those specified using :paramref:`_schema.Column.onupdate`.
+    These values will not be exercised for an ON DUPLICATE KEY style of UPDATE,
+    unless they are manually specified explicitly in the parameters.
+
+
+
+In order to refer to the proposed insertion row, the special alias
+:attr:`_mysql.Insert.inserted` is available as an attribute on
+the :class:`_mysql.Insert` object; this object is a
+:class:`_expression.ColumnCollection` which contains all columns of the target
+table:
+
+.. sourcecode:: pycon+sql
+
+    >>> stmt = insert(my_table).values(
+    ...     id="some_id", data="inserted value", author="jlh"
+    ... )
+
+    >>> do_update_stmt = stmt.on_duplicate_key_update(
+    ...     data="updated value", author=stmt.inserted.author
+    ... )
+
+    >>> print(do_update_stmt)
+    {printsql}INSERT INTO my_table (id, data, author) VALUES (%s, %s, %s)
+    ON DUPLICATE KEY UPDATE data = %s, author = VALUES(author)
+
+When rendered, the "inserted" namespace will produce the expression
+``VALUES(<columnname>)``.
+
+.. versionadded:: 1.2 Added support for MySQL ON DUPLICATE KEY UPDATE clause
+
+
+
+rowcount Support
+----------------
+
+SQLAlchemy standardizes the DBAPI ``cursor.rowcount`` attribute to be the
+usual definition of "number of rows matched by an UPDATE or DELETE" statement.
+This is in contradiction to the default setting on most MySQL DBAPI drivers,
+which is "number of rows actually modified/deleted".  For this reason, the
+SQLAlchemy MySQL dialects always add the ``constants.CLIENT.FOUND_ROWS``
+flag, or whatever is equivalent for the target dialect, upon connection.
+This setting is currently hardcoded.
+
+.. seealso::
+
+    :attr:`_engine.CursorResult.rowcount`
+
+
+.. _mysql_indexes:
+
+MySQL / MariaDB- Specific Index Options
+-----------------------------------------
+
+MySQL and MariaDB-specific extensions to the :class:`.Index` construct are available.
+
+Index Length
+~~~~~~~~~~~~~
+
+MySQL and MariaDB both provide an option to create index entries with a certain length, where
+"length" refers to the number of characters or bytes in each value which will
+become part of the index. SQLAlchemy provides this feature via the
+``mysql_length`` and/or ``mariadb_length`` parameters::
+
+    Index("my_index", my_table.c.data, mysql_length=10, mariadb_length=10)
+
+    Index("a_b_idx", my_table.c.a, my_table.c.b, mysql_length={"a": 4, "b": 9})
+
+    Index(
+        "a_b_idx", my_table.c.a, my_table.c.b, mariadb_length={"a": 4, "b": 9}
+    )
+
+Prefix lengths are given in characters for nonbinary string types and in bytes
+for binary string types. The value passed to the keyword argument *must* be
+either an integer (and, thus, specify the same prefix length value for all
+columns of the index) or a dict in which keys are column names and values are
+prefix length values for corresponding columns. MySQL and MariaDB only allow a
+length for a column of an index if it is for a CHAR, VARCHAR, TEXT, BINARY,
+VARBINARY and BLOB.
+
+Index Prefixes
+~~~~~~~~~~~~~~
+
+MySQL storage engines permit you to specify an index prefix when creating
+an index. SQLAlchemy provides this feature via the
+``mysql_prefix`` parameter on :class:`.Index`::
+
+    Index("my_index", my_table.c.data, mysql_prefix="FULLTEXT")
+
+The value passed to the keyword argument will be simply passed through to the
+underlying CREATE INDEX, so it *must* be a valid index prefix for your MySQL
+storage engine.
+
+.. seealso::
+
+    `CREATE INDEX <https://dev.mysql.com/doc/refman/5.0/en/create-index.html>`_ - MySQL documentation
+
+Index Types
+~~~~~~~~~~~~~
+
+Some MySQL storage engines permit you to specify an index type when creating
+an index or primary key constraint. SQLAlchemy provides this feature via the
+``mysql_using`` parameter on :class:`.Index`::
+
+    Index(
+        "my_index", my_table.c.data, mysql_using="hash", mariadb_using="hash"
+    )
+
+As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`::
+
+    PrimaryKeyConstraint("data", mysql_using="hash", mariadb_using="hash")
+
+The value passed to the keyword argument will be simply passed through to the
+underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index
+type for your MySQL storage engine.
+
+More information can be found at:
+
+https://dev.mysql.com/doc/refman/5.0/en/create-index.html
+
+https://dev.mysql.com/doc/refman/5.0/en/create-table.html
+
+Index Parsers
+~~~~~~~~~~~~~
+
+CREATE FULLTEXT INDEX in MySQL also supports a "WITH PARSER" option.  This
+is available using the keyword argument ``mysql_with_parser``::
+
+    Index(
+        "my_index",
+        my_table.c.data,
+        mysql_prefix="FULLTEXT",
+        mysql_with_parser="ngram",
+        mariadb_prefix="FULLTEXT",
+        mariadb_with_parser="ngram",
+    )
+
+.. versionadded:: 1.3
+
+
+.. _mysql_foreign_keys:
+
+MySQL / MariaDB Foreign Keys
+-----------------------------
+
+MySQL and MariaDB's behavior regarding foreign keys has some important caveats.
+
+Foreign Key Arguments to Avoid
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Neither MySQL nor MariaDB support the foreign key arguments "DEFERRABLE", "INITIALLY",
+or "MATCH".  Using the ``deferrable`` or ``initially`` keyword argument with
+:class:`_schema.ForeignKeyConstraint` or :class:`_schema.ForeignKey`
+will have the effect of
+these keywords being rendered in a DDL expression, which will then raise an
+error on MySQL or MariaDB.  In order to use these keywords on a foreign key while having
+them ignored on a MySQL / MariaDB backend, use a custom compile rule::
+
+    from sqlalchemy.ext.compiler import compiles
+    from sqlalchemy.schema import ForeignKeyConstraint
+
+
+    @compiles(ForeignKeyConstraint, "mysql", "mariadb")
+    def process(element, compiler, **kw):
+        element.deferrable = element.initially = None
+        return compiler.visit_foreign_key_constraint(element, **kw)
+
+The "MATCH" keyword is in fact more insidious, and is explicitly disallowed
+by SQLAlchemy in conjunction with the MySQL or MariaDB backends.  This argument is
+silently ignored by MySQL / MariaDB, but in addition has the effect of ON UPDATE and ON
+DELETE options also being ignored by the backend.   Therefore MATCH should
+never be used with the MySQL / MariaDB backends; as is the case with DEFERRABLE and
+INITIALLY, custom compilation rules can be used to correct a
+ForeignKeyConstraint at DDL definition time.
+
+Reflection of Foreign Key Constraints
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Not all MySQL / MariaDB storage engines support foreign keys.  When using the
+very common ``MyISAM`` MySQL storage engine, the information loaded by table
+reflection will not include foreign keys.  For these tables, you may supply a
+:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time::
+
+  Table(
+      "mytable",
+      metadata,
+      ForeignKeyConstraint(["other_id"], ["othertable.other_id"]),
+      autoload_with=engine,
+  )
+
+.. seealso::
+
+    :ref:`mysql_storage_engines`
+
+.. _mysql_unique_constraints:
+
+MySQL / MariaDB Unique Constraints and Reflection
+----------------------------------------------------
+
+SQLAlchemy supports both the :class:`.Index` construct with the
+flag ``unique=True``, indicating a UNIQUE index, as well as the
+:class:`.UniqueConstraint` construct, representing a UNIQUE constraint.
+Both objects/syntaxes are supported by MySQL / MariaDB when emitting DDL to create
+these constraints.  However, MySQL / MariaDB does not have a unique constraint
+construct that is separate from a unique index; that is, the "UNIQUE"
+constraint on MySQL / MariaDB is equivalent to creating a "UNIQUE INDEX".
+
+When reflecting these constructs, the
+:meth:`_reflection.Inspector.get_indexes`
+and the :meth:`_reflection.Inspector.get_unique_constraints`
+methods will **both**
+return an entry for a UNIQUE index in MySQL / MariaDB.  However, when performing
+full table reflection using ``Table(..., autoload_with=engine)``,
+the :class:`.UniqueConstraint` construct is
+**not** part of the fully reflected :class:`_schema.Table` construct under any
+circumstances; this construct is always represented by a :class:`.Index`
+with the ``unique=True`` setting present in the :attr:`_schema.Table.indexes`
+collection.
+
+
+TIMESTAMP / DATETIME issues
+---------------------------
+
+.. _mysql_timestamp_onupdate:
+
+Rendering ON UPDATE CURRENT TIMESTAMP for MySQL / MariaDB's explicit_defaults_for_timestamp
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+MySQL / MariaDB have historically expanded the DDL for the :class:`_types.TIMESTAMP`
+datatype into the phrase "TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE
+CURRENT_TIMESTAMP", which includes non-standard SQL that automatically updates
+the column with the current timestamp when an UPDATE occurs, eliminating the
+usual need to use a trigger in such a case where server-side update changes are
+desired.
+
+MySQL 5.6 introduced a new flag `explicit_defaults_for_timestamp
+<https://dev.mysql.com/doc/refman/5.6/en/server-system-variables.html
+#sysvar_explicit_defaults_for_timestamp>`_ which disables the above behavior,
+and in MySQL 8 this flag defaults to true, meaning in order to get a MySQL
+"on update timestamp" without changing this flag, the above DDL must be
+rendered explicitly.   Additionally, the same DDL is valid for use of the
+``DATETIME`` datatype as well.
+
+SQLAlchemy's MySQL dialect does not yet have an option to generate
+MySQL's "ON UPDATE CURRENT_TIMESTAMP" clause, noting that this is not a general
+purpose "ON UPDATE" as there is no such syntax in standard SQL.  SQLAlchemy's
+:paramref:`_schema.Column.server_onupdate` parameter is currently not related
+to this special MySQL behavior.
+
+To generate this DDL, make use of the :paramref:`_schema.Column.server_default`
+parameter and pass a textual clause that also includes the ON UPDATE clause::
+
+    from sqlalchemy import Table, MetaData, Column, Integer, String, TIMESTAMP
+    from sqlalchemy import text
+
+    metadata = MetaData()
+
+    mytable = Table(
+        "mytable",
+        metadata,
+        Column("id", Integer, primary_key=True),
+        Column("data", String(50)),
+        Column(
+            "last_updated",
+            TIMESTAMP,
+            server_default=text(
+                "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+            ),
+        ),
+    )
+
+The same instructions apply to use of the :class:`_types.DateTime` and
+:class:`_types.DATETIME` datatypes::
+
+    from sqlalchemy import DateTime
+
+    mytable = Table(
+        "mytable",
+        metadata,
+        Column("id", Integer, primary_key=True),
+        Column("data", String(50)),
+        Column(
+            "last_updated",
+            DateTime,
+            server_default=text(
+                "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+            ),
+        ),
+    )
+
+Even though the :paramref:`_schema.Column.server_onupdate` feature does not
+generate this DDL, it still may be desirable to signal to the ORM that this
+updated value should be fetched.  This syntax looks like the following::
+
+    from sqlalchemy.schema import FetchedValue
+
+
+    class MyClass(Base):
+        __tablename__ = "mytable"
+
+        id = Column(Integer, primary_key=True)
+        data = Column(String(50))
+        last_updated = Column(
+            TIMESTAMP,
+            server_default=text(
+                "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+            ),
+            server_onupdate=FetchedValue(),
+        )
+
+.. _mysql_timestamp_null:
+
+TIMESTAMP Columns and NULL
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+MySQL historically enforces that a column which specifies the
+TIMESTAMP datatype implicitly includes a default value of
+CURRENT_TIMESTAMP, even though this is not stated, and additionally
+sets the column as NOT NULL, the opposite behavior vs. that of all
+other datatypes:
+
+.. sourcecode:: text
+
+    mysql> CREATE TABLE ts_test (
+        -> a INTEGER,
+        -> b INTEGER NOT NULL,
+        -> c TIMESTAMP,
+        -> d TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        -> e TIMESTAMP NULL);
+    Query OK, 0 rows affected (0.03 sec)
+
+    mysql> SHOW CREATE TABLE ts_test;
+    +---------+-----------------------------------------------------
+    | Table   | Create Table
+    +---------+-----------------------------------------------------
+    | ts_test | CREATE TABLE `ts_test` (
+      `a` int(11) DEFAULT NULL,
+      `b` int(11) NOT NULL,
+      `c` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+      `d` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+      `e` timestamp NULL DEFAULT NULL
+    ) ENGINE=MyISAM DEFAULT CHARSET=latin1
+
+Above, we see that an INTEGER column defaults to NULL, unless it is specified
+with NOT NULL.   But when the column is of type TIMESTAMP, an implicit
+default of CURRENT_TIMESTAMP is generated which also coerces the column
+to be a NOT NULL, even though we did not specify it as such.
+
+This behavior of MySQL can be changed on the MySQL side using the
+`explicit_defaults_for_timestamp
+<https://dev.mysql.com/doc/refman/5.6/en/server-system-variables.html
+#sysvar_explicit_defaults_for_timestamp>`_ configuration flag introduced in
+MySQL 5.6.  With this server setting enabled, TIMESTAMP columns behave like
+any other datatype on the MySQL side with regards to defaults and nullability.
+
+However, to accommodate the vast majority of MySQL databases that do not
+specify this new flag, SQLAlchemy emits the "NULL" specifier explicitly with
+any TIMESTAMP column that does not specify ``nullable=False``.   In order to
+accommodate newer databases that specify ``explicit_defaults_for_timestamp``,
+SQLAlchemy also emits NOT NULL for TIMESTAMP columns that do specify
+``nullable=False``.   The following example illustrates::
+
+    from sqlalchemy import MetaData, Integer, Table, Column, text
+    from sqlalchemy.dialects.mysql import TIMESTAMP
+
+    m = MetaData()
+    t = Table(
+        "ts_test",
+        m,
+        Column("a", Integer),
+        Column("b", Integer, nullable=False),
+        Column("c", TIMESTAMP),
+        Column("d", TIMESTAMP, nullable=False),
+    )
+
+
+    from sqlalchemy import create_engine
+
+    e = create_engine("mysql+mysqldb://scott:tiger@localhost/test", echo=True)
+    m.create_all(e)
+
+output:
+
+.. sourcecode:: sql
+
+    CREATE TABLE ts_test (
+        a INTEGER,
+        b INTEGER NOT NULL,
+        c TIMESTAMP NULL,
+        d TIMESTAMP NOT NULL
+    )
+
+"""  # noqa
+from __future__ import annotations
+
+from array import array as _array
+from collections import defaultdict
+from itertools import compress
+import re
+from typing import cast
+
+from . import reflection as _reflection
+from .enumerated import ENUM
+from .enumerated import SET
+from .json import JSON
+from .json import JSONIndexType
+from .json import JSONPathType
+from .reserved_words import RESERVED_WORDS_MARIADB
+from .reserved_words import RESERVED_WORDS_MYSQL
+from .types import _FloatType
+from .types import _IntegerType
+from .types import _MatchType
+from .types import _NumericType
+from .types import _StringType
+from .types import BIGINT
+from .types import BIT
+from .types import CHAR
+from .types import DATETIME
+from .types import DECIMAL
+from .types import DOUBLE
+from .types import FLOAT
+from .types import INTEGER
+from .types import LONGBLOB
+from .types import LONGTEXT
+from .types import MEDIUMBLOB
+from .types import MEDIUMINT
+from .types import MEDIUMTEXT
+from .types import NCHAR
+from .types import NUMERIC
+from .types import NVARCHAR
+from .types import REAL
+from .types import SMALLINT
+from .types import TEXT
+from .types import TIME
+from .types import TIMESTAMP
+from .types import TINYBLOB
+from .types import TINYINT
+from .types import TINYTEXT
+from .types import VARCHAR
+from .types import YEAR
+from ... import exc
+from ... import literal_column
+from ... import log
+from ... import schema as sa_schema
+from ... import sql
+from ... import util
+from ...engine import cursor as _cursor
+from ...engine import default
+from ...engine import reflection
+from ...engine.reflection import ReflectionDefaults
+from ...sql import coercions
+from ...sql import compiler
+from ...sql import elements
+from ...sql import functions
+from ...sql import operators
+from ...sql import roles
+from ...sql import sqltypes
+from ...sql import util as sql_util
+from ...sql import visitors
+from ...sql.compiler import InsertmanyvaluesSentinelOpts
+from ...sql.compiler import SQLCompiler
+from ...sql.schema import SchemaConst
+from ...types import BINARY
+from ...types import BLOB
+from ...types import BOOLEAN
+from ...types import DATE
+from ...types import UUID
+from ...types import VARBINARY
+from ...util import topological
+
+
+SET_RE = re.compile(
+    r"\s*SET\s+(?:(?:GLOBAL|SESSION)\s+)?\w", re.I | re.UNICODE
+)
+
+# old names
+MSTime = TIME
+MSSet = SET
+MSEnum = ENUM
+MSLongBlob = LONGBLOB
+MSMediumBlob = MEDIUMBLOB
+MSTinyBlob = TINYBLOB
+MSBlob = BLOB
+MSBinary = BINARY
+MSVarBinary = VARBINARY
+MSNChar = NCHAR
+MSNVarChar = NVARCHAR
+MSChar = CHAR
+MSString = VARCHAR
+MSLongText = LONGTEXT
+MSMediumText = MEDIUMTEXT
+MSTinyText = TINYTEXT
+MSText = TEXT
+MSYear = YEAR
+MSTimeStamp = TIMESTAMP
+MSBit = BIT
+MSSmallInteger = SMALLINT
+MSTinyInteger = TINYINT
+MSMediumInteger = MEDIUMINT
+MSBigInteger = BIGINT
+MSNumeric = NUMERIC
+MSDecimal = DECIMAL
+MSDouble = DOUBLE
+MSReal = REAL
+MSFloat = FLOAT
+MSInteger = INTEGER
+
+colspecs = {
+    _IntegerType: _IntegerType,
+    _NumericType: _NumericType,
+    _FloatType: _FloatType,
+    sqltypes.Numeric: NUMERIC,
+    sqltypes.Float: FLOAT,
+    sqltypes.Double: DOUBLE,
+    sqltypes.Time: TIME,
+    sqltypes.Enum: ENUM,
+    sqltypes.MatchType: _MatchType,
+    sqltypes.JSON: JSON,
+    sqltypes.JSON.JSONIndexType: JSONIndexType,
+    sqltypes.JSON.JSONPathType: JSONPathType,
+}
+
+# Everything 3.23 through 5.1 excepting OpenGIS types.
+ischema_names = {
+    "bigint": BIGINT,
+    "binary": BINARY,
+    "bit": BIT,
+    "blob": BLOB,
+    "boolean": BOOLEAN,
+    "char": CHAR,
+    "date": DATE,
+    "datetime": DATETIME,
+    "decimal": DECIMAL,
+    "double": DOUBLE,
+    "enum": ENUM,
+    "fixed": DECIMAL,
+    "float": FLOAT,
+    "int": INTEGER,
+    "integer": INTEGER,
+    "json": JSON,
+    "longblob": LONGBLOB,
+    "longtext": LONGTEXT,
+    "mediumblob": MEDIUMBLOB,
+    "mediumint": MEDIUMINT,
+    "mediumtext": MEDIUMTEXT,
+    "nchar": NCHAR,
+    "nvarchar": NVARCHAR,
+    "numeric": NUMERIC,
+    "set": SET,
+    "smallint": SMALLINT,
+    "text": TEXT,
+    "time": TIME,
+    "timestamp": TIMESTAMP,
+    "tinyblob": TINYBLOB,
+    "tinyint": TINYINT,
+    "tinytext": TINYTEXT,
+    "uuid": UUID,
+    "varbinary": VARBINARY,
+    "varchar": VARCHAR,
+    "year": YEAR,
+}
+
+
+class MySQLExecutionContext(default.DefaultExecutionContext):
+    def post_exec(self):
+        if (
+            self.isdelete
+            and cast(SQLCompiler, self.compiled).effective_returning
+            and not self.cursor.description
+        ):
+            # All MySQL/mariadb drivers appear to not include
+            # cursor.description for DELETE..RETURNING with no rows if the
+            # WHERE criteria is a straight "false" condition such as our EMPTY
+            # IN condition. manufacture an empty result in this case (issue
+            # #10505)
+            #
+            # taken from cx_Oracle implementation
+            self.cursor_fetch_strategy = (
+                _cursor.FullyBufferedCursorFetchStrategy(
+                    self.cursor,
+                    [
+                        (entry.keyname, None)
+                        for entry in cast(
+                            SQLCompiler, self.compiled
+                        )._result_columns
+                    ],
+                    [],
+                )
+            )
+
+    def create_server_side_cursor(self):
+        if self.dialect.supports_server_side_cursors:
+            return self._dbapi_connection.cursor(self.dialect._sscursor)
+        else:
+            raise NotImplementedError()
+
+    def fire_sequence(self, seq, type_):
+        return self._execute_scalar(
+            (
+                "select nextval(%s)"
+                % self.identifier_preparer.format_sequence(seq)
+            ),
+            type_,
+        )
+
+
+class MySQLCompiler(compiler.SQLCompiler):
+    render_table_with_column_in_update_from = True
+    """Overridden from base SQLCompiler value"""
+
+    extract_map = compiler.SQLCompiler.extract_map.copy()
+    extract_map.update({"milliseconds": "millisecond"})
+
+    def default_from(self):
+        """Called when a ``SELECT`` statement has no froms,
+        and no ``FROM`` clause is to be appended.
+
+        """
+        if self.stack:
+            stmt = self.stack[-1]["selectable"]
+            if stmt._where_criteria:
+                return " FROM DUAL"
+
+        return ""
+
+    def visit_random_func(self, fn, **kw):
+        return "rand%s" % self.function_argspec(fn)
+
+    def visit_rollup_func(self, fn, **kw):
+        clause = ", ".join(
+            elem._compiler_dispatch(self, **kw) for elem in fn.clauses
+        )
+        return f"{clause} WITH ROLLUP"
+
+    def visit_aggregate_strings_func(self, fn, **kw):
+        expr, delimeter = (
+            elem._compiler_dispatch(self, **kw) for elem in fn.clauses
+        )
+        return f"group_concat({expr} SEPARATOR {delimeter})"
+
+    def visit_sequence(self, seq, **kw):
+        return "nextval(%s)" % self.preparer.format_sequence(seq)
+
+    def visit_sysdate_func(self, fn, **kw):
+        return "SYSDATE()"
+
+    def _render_json_extract_from_binary(self, binary, operator, **kw):
+        # note we are intentionally calling upon the process() calls in the
+        # order in which they appear in the SQL String as this is used
+        # by positional parameter rendering
+
+        if binary.type._type_affinity is sqltypes.JSON:
+            return "JSON_EXTRACT(%s, %s)" % (
+                self.process(binary.left, **kw),
+                self.process(binary.right, **kw),
+            )
+
+        # for non-JSON, MySQL doesn't handle JSON null at all so it has to
+        # be explicit
+        case_expression = "CASE JSON_EXTRACT(%s, %s) WHEN 'null' THEN NULL" % (
+            self.process(binary.left, **kw),
+            self.process(binary.right, **kw),
+        )
+
+        if binary.type._type_affinity is sqltypes.Integer:
+            type_expression = (
+                "ELSE CAST(JSON_EXTRACT(%s, %s) AS SIGNED INTEGER)"
+                % (
+                    self.process(binary.left, **kw),
+                    self.process(binary.right, **kw),
+                )
+            )
+        elif binary.type._type_affinity is sqltypes.Numeric:
+            if (
+                binary.type.scale is not None
+                and binary.type.precision is not None
+            ):
+                # using DECIMAL here because MySQL does not recognize NUMERIC
+                type_expression = (
+                    "ELSE CAST(JSON_EXTRACT(%s, %s) AS DECIMAL(%s, %s))"
+                    % (
+                        self.process(binary.left, **kw),
+                        self.process(binary.right, **kw),
+                        binary.type.precision,
+                        binary.type.scale,
+                    )
+                )
+            else:
+                # FLOAT / REAL not added in MySQL til 8.0.17
+                type_expression = (
+                    "ELSE JSON_EXTRACT(%s, %s)+0.0000000000000000000000"
+                    % (
+                        self.process(binary.left, **kw),
+                        self.process(binary.right, **kw),
+                    )
+                )
+        elif binary.type._type_affinity is sqltypes.Boolean:
+            # the NULL handling is particularly weird with boolean, so
+            # explicitly return true/false constants
+            type_expression = "WHEN true THEN true ELSE false"
+        elif binary.type._type_affinity is sqltypes.String:
+            # (gord): this fails with a JSON value that's a four byte unicode
+            # string.  SQLite has the same problem at the moment
+            # (zzzeek): I'm not really sure.  let's take a look at a test case
+            # that hits each backend and maybe make a requires rule for it?
+            type_expression = "ELSE JSON_UNQUOTE(JSON_EXTRACT(%s, %s))" % (
+                self.process(binary.left, **kw),
+                self.process(binary.right, **kw),
+            )
+        else:
+            # other affinity....this is not expected right now
+            type_expression = "ELSE JSON_EXTRACT(%s, %s)" % (
+                self.process(binary.left, **kw),
+                self.process(binary.right, **kw),
+            )
+
+        return case_expression + " " + type_expression + " END"
+
+    def visit_json_getitem_op_binary(self, binary, operator, **kw):
+        return self._render_json_extract_from_binary(binary, operator, **kw)
+
+    def visit_json_path_getitem_op_binary(self, binary, operator, **kw):
+        return self._render_json_extract_from_binary(binary, operator, **kw)
+
+    def visit_on_duplicate_key_update(self, on_duplicate, **kw):
+        statement = self.current_executable
+
+        if on_duplicate._parameter_ordering:
+            parameter_ordering = [
+                coercions.expect(roles.DMLColumnRole, key)
+                for key in on_duplicate._parameter_ordering
+            ]
+            ordered_keys = set(parameter_ordering)
+            cols = [
+                statement.table.c[key]
+                for key in parameter_ordering
+                if key in statement.table.c
+            ] + [c for c in statement.table.c if c.key not in ordered_keys]
+        else:
+            cols = statement.table.c
+
+        clauses = []
+
+        requires_mysql8_alias = statement.select is None and (
+            self.dialect._requires_alias_for_on_duplicate_key
+        )
+
+        if requires_mysql8_alias:
+            if statement.table.name.lower() == "new":
+                _on_dup_alias_name = "new_1"
+            else:
+                _on_dup_alias_name = "new"
+
+        on_duplicate_update = {
+            coercions.expect_as_key(roles.DMLColumnRole, key): value
+            for key, value in on_duplicate.update.items()
+        }
+
+        # traverses through all table columns to preserve table column order
+        for column in (col for col in cols if col.key in on_duplicate_update):
+            val = on_duplicate_update[column.key]
+
+            # TODO: this coercion should be up front.  we can't cache
+            # SQL constructs with non-bound literals buried in them
+            if coercions._is_literal(val):
+                val = elements.BindParameter(None, val, type_=column.type)
+                value_text = self.process(val.self_group(), use_schema=False)
+            else:
+
+                def replace(obj):
+                    if (
+                        isinstance(obj, elements.BindParameter)
+                        and obj.type._isnull
+                    ):
+                        obj = obj._clone()
+                        obj.type = column.type
+                        return obj
+                    elif (
+                        isinstance(obj, elements.ColumnClause)
+                        and obj.table is on_duplicate.inserted_alias
+                    ):
+                        if requires_mysql8_alias:
+                            column_literal_clause = (
+                                f"{_on_dup_alias_name}."
+                                f"{self.preparer.quote(obj.name)}"
+                            )
+                        else:
+                            column_literal_clause = (
+                                f"VALUES({self.preparer.quote(obj.name)})"
+                            )
+                        return literal_column(column_literal_clause)
+                    else:
+                        # element is not replaced
+                        return None
+
+                val = visitors.replacement_traverse(val, {}, replace)
+                value_text = self.process(val.self_group(), use_schema=False)
+
+            name_text = self.preparer.quote(column.name)
+            clauses.append("%s = %s" % (name_text, value_text))
+
+        non_matching = set(on_duplicate_update) - {c.key for c in cols}
+        if non_matching:
+            util.warn(
+                "Additional column names not matching "
+                "any column keys in table '%s': %s"
+                % (
+                    self.statement.table.name,
+                    (", ".join("'%s'" % c for c in non_matching)),
+                )
+            )
+
+        if requires_mysql8_alias:
+            return (
+                f"AS {_on_dup_alias_name} "
+                f"ON DUPLICATE KEY UPDATE {', '.join(clauses)}"
+            )
+        else:
+            return f"ON DUPLICATE KEY UPDATE {', '.join(clauses)}"
+
+    def visit_concat_op_expression_clauselist(
+        self, clauselist, operator, **kw
+    ):
+        return "concat(%s)" % (
+            ", ".join(self.process(elem, **kw) for elem in clauselist.clauses)
+        )
+
+    def visit_concat_op_binary(self, binary, operator, **kw):
+        return "concat(%s, %s)" % (
+            self.process(binary.left, **kw),
+            self.process(binary.right, **kw),
+        )
+
+    _match_valid_flag_combinations = frozenset(
+        (
+            # (boolean_mode, natural_language, query_expansion)
+            (False, False, False),
+            (True, False, False),
+            (False, True, False),
+            (False, False, True),
+            (False, True, True),
+        )
+    )
+
+    _match_flag_expressions = (
+        "IN BOOLEAN MODE",
+        "IN NATURAL LANGUAGE MODE",
+        "WITH QUERY EXPANSION",
+    )
+
+    def visit_mysql_match(self, element, **kw):
+        return self.visit_match_op_binary(element, element.operator, **kw)
+
+    def visit_match_op_binary(self, binary, operator, **kw):
+        """
+        Note that `mysql_boolean_mode` is enabled by default because of
+        backward compatibility
+        """
+
+        modifiers = binary.modifiers
+
+        boolean_mode = modifiers.get("mysql_boolean_mode", True)
+        natural_language = modifiers.get("mysql_natural_language", False)
+        query_expansion = modifiers.get("mysql_query_expansion", False)
+
+        flag_combination = (boolean_mode, natural_language, query_expansion)
+
+        if flag_combination not in self._match_valid_flag_combinations:
+            flags = (
+                "in_boolean_mode=%s" % boolean_mode,
+                "in_natural_language_mode=%s" % natural_language,
+                "with_query_expansion=%s" % query_expansion,
+            )
+
+            flags = ", ".join(flags)
+
+            raise exc.CompileError("Invalid MySQL match flags: %s" % flags)
+
+        match_clause = binary.left
+        match_clause = self.process(match_clause, **kw)
+        against_clause = self.process(binary.right, **kw)
+
+        if any(flag_combination):
+            flag_expressions = compress(
+                self._match_flag_expressions,
+                flag_combination,
+            )
+
+            against_clause = [against_clause]
+            against_clause.extend(flag_expressions)
+
+            against_clause = " ".join(against_clause)
+
+        return "MATCH (%s) AGAINST (%s)" % (match_clause, against_clause)
+
+    def get_from_hint_text(self, table, text):
+        return text
+
+    def visit_typeclause(self, typeclause, type_=None, **kw):
+        if type_ is None:
+            type_ = typeclause.type.dialect_impl(self.dialect)
+        if isinstance(type_, sqltypes.TypeDecorator):
+            return self.visit_typeclause(typeclause, type_.impl, **kw)
+        elif isinstance(type_, sqltypes.Integer):
+            if getattr(type_, "unsigned", False):
+                return "UNSIGNED INTEGER"
+            else:
+                return "SIGNED INTEGER"
+        elif isinstance(type_, sqltypes.TIMESTAMP):
+            return "DATETIME"
+        elif isinstance(
+            type_,
+            (
+                sqltypes.DECIMAL,
+                sqltypes.DateTime,
+                sqltypes.Date,
+                sqltypes.Time,
+            ),
+        ):
+            return self.dialect.type_compiler_instance.process(type_)
+        elif isinstance(type_, sqltypes.String) and not isinstance(
+            type_, (ENUM, SET)
+        ):
+            adapted = CHAR._adapt_string_for_cast(type_)
+            return self.dialect.type_compiler_instance.process(adapted)
+        elif isinstance(type_, sqltypes._Binary):
+            return "BINARY"
+        elif isinstance(type_, sqltypes.JSON):
+            return "JSON"
+        elif isinstance(type_, sqltypes.NUMERIC):
+            return self.dialect.type_compiler_instance.process(type_).replace(
+                "NUMERIC", "DECIMAL"
+            )
+        elif (
+            isinstance(type_, sqltypes.Float)
+            and self.dialect._support_float_cast
+        ):
+            return self.dialect.type_compiler_instance.process(type_)
+        else:
+            return None
+
+    def visit_cast(self, cast, **kw):
+        type_ = self.process(cast.typeclause)
+        if type_ is None:
+            util.warn(
+                "Datatype %s does not support CAST on MySQL/MariaDb; "
+                "the CAST will be skipped."
+                % self.dialect.type_compiler_instance.process(
+                    cast.typeclause.type
+                )
+            )
+            return self.process(cast.clause.self_group(), **kw)
+
+        return "CAST(%s AS %s)" % (self.process(cast.clause, **kw), type_)
+
+    def render_literal_value(self, value, type_):
+        value = super().render_literal_value(value, type_)
+        if self.dialect._backslash_escapes:
+            value = value.replace("\\", "\\\\")
+        return value
+
+    # override native_boolean=False behavior here, as
+    # MySQL still supports native boolean
+    def visit_true(self, element, **kw):
+        return "true"
+
+    def visit_false(self, element, **kw):
+        return "false"
+
+    def get_select_precolumns(self, select, **kw):
+        """Add special MySQL keywords in place of DISTINCT.
+
+        .. deprecated:: 1.4  This usage is deprecated.
+           :meth:`_expression.Select.prefix_with` should be used for special
+           keywords at the start of a SELECT.
+
+        """
+        if isinstance(select._distinct, str):
+            util.warn_deprecated(
+                "Sending string values for 'distinct' is deprecated in the "
+                "MySQL dialect and will be removed in a future release.  "
+                "Please use :meth:`.Select.prefix_with` for special keywords "
+                "at the start of a SELECT statement",
+                version="1.4",
+            )
+            return select._distinct.upper() + " "
+
+        return super().get_select_precolumns(select, **kw)
+
+    def visit_join(self, join, asfrom=False, from_linter=None, **kwargs):
+        if from_linter:
+            from_linter.edges.add((join.left, join.right))
+
+        if join.full:
+            join_type = " FULL OUTER JOIN "
+        elif join.isouter:
+            join_type = " LEFT OUTER JOIN "
+        else:
+            join_type = " INNER JOIN "
+
+        return "".join(
+            (
+                self.process(
+                    join.left, asfrom=True, from_linter=from_linter, **kwargs
+                ),
+                join_type,
+                self.process(
+                    join.right, asfrom=True, from_linter=from_linter, **kwargs
+                ),
+                " ON ",
+                self.process(join.onclause, from_linter=from_linter, **kwargs),
+            )
+        )
+
+    def for_update_clause(self, select, **kw):
+        if select._for_update_arg.read:
+            tmp = " LOCK IN SHARE MODE"
+        else:
+            tmp = " FOR UPDATE"
+
+        if select._for_update_arg.of and self.dialect.supports_for_update_of:
+            tables = util.OrderedSet()
+            for c in select._for_update_arg.of:
+                tables.update(sql_util.surface_selectables_only(c))
+
+            tmp += " OF " + ", ".join(
+                self.process(table, ashint=True, use_schema=False, **kw)
+                for table in tables
+            )
+
+        if select._for_update_arg.nowait:
+            tmp += " NOWAIT"
+
+        if select._for_update_arg.skip_locked:
+            tmp += " SKIP LOCKED"
+
+        return tmp
+
+    def limit_clause(self, select, **kw):
+        # MySQL supports:
+        #   LIMIT <limit>
+        #   LIMIT <offset>, <limit>
+        # and in server versions > 3.3:
+        #   LIMIT <limit> OFFSET <offset>
+        # The latter is more readable for offsets but we're stuck with the
+        # former until we can refine dialects by server revision.
+
+        limit_clause, offset_clause = (
+            select._limit_clause,
+            select._offset_clause,
+        )
+
+        if limit_clause is None and offset_clause is None:
+            return ""
+        elif offset_clause is not None:
+            # As suggested by the MySQL docs, need to apply an
+            # artificial limit if one wasn't provided
+            # https://dev.mysql.com/doc/refman/5.0/en/select.html
+            if limit_clause is None:
+                # TODO: remove ??
+                # hardwire the upper limit.  Currently
+                # needed consistent with the usage of the upper
+                # bound as part of MySQL's "syntax" for OFFSET with
+                # no LIMIT.
+                return " \n LIMIT %s, %s" % (
+                    self.process(offset_clause, **kw),
+                    "18446744073709551615",
+                )
+            else:
+                return " \n LIMIT %s, %s" % (
+                    self.process(offset_clause, **kw),
+                    self.process(limit_clause, **kw),
+                )
+        else:
+            # No offset provided, so just use the limit
+            return " \n LIMIT %s" % (self.process(limit_clause, **kw),)
+
+    def update_limit_clause(self, update_stmt):
+        limit = update_stmt.kwargs.get("%s_limit" % self.dialect.name, None)
+        if limit is not None:
+            return f"LIMIT {int(limit)}"
+        else:
+            return None
+
+    def delete_limit_clause(self, delete_stmt):
+        limit = delete_stmt.kwargs.get("%s_limit" % self.dialect.name, None)
+        if limit is not None:
+            return f"LIMIT {int(limit)}"
+        else:
+            return None
+
+    def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
+        kw["asfrom"] = True
+        return ", ".join(
+            t._compiler_dispatch(self, **kw)
+            for t in [from_table] + list(extra_froms)
+        )
+
+    def update_from_clause(
+        self, update_stmt, from_table, extra_froms, from_hints, **kw
+    ):
+        return None
+
+    def delete_table_clause(self, delete_stmt, from_table, extra_froms, **kw):
+        """If we have extra froms make sure we render any alias as hint."""
+        ashint = False
+        if extra_froms:
+            ashint = True
+        return from_table._compiler_dispatch(
+            self, asfrom=True, iscrud=True, ashint=ashint, **kw
+        )
+
+    def delete_extra_from_clause(
+        self, delete_stmt, from_table, extra_froms, from_hints, **kw
+    ):
+        """Render the DELETE .. USING clause specific to MySQL."""
+        kw["asfrom"] = True
+        return "USING " + ", ".join(
+            t._compiler_dispatch(self, fromhints=from_hints, **kw)
+            for t in [from_table] + extra_froms
+        )
+
+    def visit_empty_set_expr(self, element_types, **kw):
+        return (
+            "SELECT %(outer)s FROM (SELECT %(inner)s) "
+            "as _empty_set WHERE 1!=1"
+            % {
+                "inner": ", ".join(
+                    "1 AS _in_%s" % idx
+                    for idx, type_ in enumerate(element_types)
+                ),
+                "outer": ", ".join(
+                    "_in_%s" % idx for idx, type_ in enumerate(element_types)
+                ),
+            }
+        )
+
+    def visit_is_distinct_from_binary(self, binary, operator, **kw):
+        return "NOT (%s <=> %s)" % (
+            self.process(binary.left),
+            self.process(binary.right),
+        )
+
+    def visit_is_not_distinct_from_binary(self, binary, operator, **kw):
+        return "%s <=> %s" % (
+            self.process(binary.left),
+            self.process(binary.right),
+        )
+
+    def _mariadb_regexp_flags(self, flags, pattern, **kw):
+        return "CONCAT('(?', %s, ')', %s)" % (
+            self.render_literal_value(flags, sqltypes.STRINGTYPE),
+            self.process(pattern, **kw),
+        )
+
+    def _regexp_match(self, op_string, binary, operator, **kw):
+        flags = binary.modifiers["flags"]
+        if flags is None:
+            return self._generate_generic_binary(binary, op_string, **kw)
+        elif self.dialect.is_mariadb:
+            return "%s%s%s" % (
+                self.process(binary.left, **kw),
+                op_string,
+                self._mariadb_regexp_flags(flags, binary.right),
+            )
+        else:
+            text = "REGEXP_LIKE(%s, %s, %s)" % (
+                self.process(binary.left, **kw),
+                self.process(binary.right, **kw),
+                self.render_literal_value(flags, sqltypes.STRINGTYPE),
+            )
+            if op_string == " NOT REGEXP ":
+                return "NOT %s" % text
+            else:
+                return text
+
+    def visit_regexp_match_op_binary(self, binary, operator, **kw):
+        return self._regexp_match(" REGEXP ", binary, operator, **kw)
+
+    def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
+        return self._regexp_match(" NOT REGEXP ", binary, operator, **kw)
+
+    def visit_regexp_replace_op_binary(self, binary, operator, **kw):
+        flags = binary.modifiers["flags"]
+        if flags is None:
+            return "REGEXP_REPLACE(%s, %s)" % (
+                self.process(binary.left, **kw),
+                self.process(binary.right, **kw),
+            )
+        elif self.dialect.is_mariadb:
+            return "REGEXP_REPLACE(%s, %s, %s)" % (
+                self.process(binary.left, **kw),
+                self._mariadb_regexp_flags(flags, binary.right.clauses[0]),
+                self.process(binary.right.clauses[1], **kw),
+            )
+        else:
+            return "REGEXP_REPLACE(%s, %s, %s)" % (
+                self.process(binary.left, **kw),
+                self.process(binary.right, **kw),
+                self.render_literal_value(flags, sqltypes.STRINGTYPE),
+            )
+
+
+class MySQLDDLCompiler(compiler.DDLCompiler):
+    def get_column_specification(self, column, **kw):
+        """Builds column DDL."""
+        if (
+            self.dialect.is_mariadb is True
+            and column.computed is not None
+            and column._user_defined_nullable is SchemaConst.NULL_UNSPECIFIED
+        ):
+            column.nullable = True
+        colspec = [
+            self.preparer.format_column(column),
+            self.dialect.type_compiler_instance.process(
+                column.type, type_expression=column
+            ),
+        ]
+
+        if column.computed is not None:
+            colspec.append(self.process(column.computed))
+
+        is_timestamp = isinstance(
+            column.type._unwrapped_dialect_impl(self.dialect),
+            sqltypes.TIMESTAMP,
+        )
+
+        if not column.nullable:
+            colspec.append("NOT NULL")
+
+        # see: https://docs.sqlalchemy.org/en/latest/dialects/mysql.html#mysql_timestamp_null  # noqa
+        elif column.nullable and is_timestamp:
+            colspec.append("NULL")
+
+        comment = column.comment
+        if comment is not None:
+            literal = self.sql_compiler.render_literal_value(
+                comment, sqltypes.String()
+            )
+            colspec.append("COMMENT " + literal)
+
+        if (
+            column.table is not None
+            and column is column.table._autoincrement_column
+            and (
+                column.server_default is None
+                or isinstance(column.server_default, sa_schema.Identity)
+            )
+            and not (
+                self.dialect.supports_sequences
+                and isinstance(column.default, sa_schema.Sequence)
+                and not column.default.optional
+            )
+        ):
+            colspec.append("AUTO_INCREMENT")
+        else:
+            default = self.get_column_default_string(column)
+            if default is not None:
+                if (
+                    isinstance(
+                        column.server_default.arg, functions.FunctionElement
+                    )
+                    and self.dialect._support_default_function
+                ):
+                    colspec.append(f"DEFAULT ({default})")
+                else:
+                    colspec.append("DEFAULT " + default)
+        return " ".join(colspec)
+
+    def post_create_table(self, table):
+        """Build table-level CREATE options like ENGINE and COLLATE."""
+
+        table_opts = []
+
+        opts = {
+            k[len(self.dialect.name) + 1 :].upper(): v
+            for k, v in table.kwargs.items()
+            if k.startswith("%s_" % self.dialect.name)
+        }
+
+        if table.comment is not None:
+            opts["COMMENT"] = table.comment
+
+        partition_options = [
+            "PARTITION_BY",
+            "PARTITIONS",
+            "SUBPARTITIONS",
+            "SUBPARTITION_BY",
+        ]
+
+        nonpart_options = set(opts).difference(partition_options)
+        part_options = set(opts).intersection(partition_options)
+
+        for opt in topological.sort(
+            [
+                ("DEFAULT_CHARSET", "COLLATE"),
+                ("DEFAULT_CHARACTER_SET", "COLLATE"),
+                ("CHARSET", "COLLATE"),
+                ("CHARACTER_SET", "COLLATE"),
+            ],
+            nonpart_options,
+        ):
+            arg = opts[opt]
+            if opt in _reflection._options_of_type_string:
+                arg = self.sql_compiler.render_literal_value(
+                    arg, sqltypes.String()
+                )
+
+            if opt in (
+                "DATA_DIRECTORY",
+                "INDEX_DIRECTORY",
+                "DEFAULT_CHARACTER_SET",
+                "CHARACTER_SET",
+                "DEFAULT_CHARSET",
+                "DEFAULT_COLLATE",
+            ):
+                opt = opt.replace("_", " ")
+
+            joiner = "="
+            if opt in (
+                "TABLESPACE",
+                "DEFAULT CHARACTER SET",
+                "CHARACTER SET",
+                "COLLATE",
+            ):
+                joiner = " "
+
+            table_opts.append(joiner.join((opt, arg)))
+
+        for opt in topological.sort(
+            [
+                ("PARTITION_BY", "PARTITIONS"),
+                ("PARTITION_BY", "SUBPARTITION_BY"),
+                ("PARTITION_BY", "SUBPARTITIONS"),
+                ("PARTITIONS", "SUBPARTITIONS"),
+                ("PARTITIONS", "SUBPARTITION_BY"),
+                ("SUBPARTITION_BY", "SUBPARTITIONS"),
+            ],
+            part_options,
+        ):
+            arg = opts[opt]
+            if opt in _reflection._options_of_type_string:
+                arg = self.sql_compiler.render_literal_value(
+                    arg, sqltypes.String()
+                )
+
+            opt = opt.replace("_", " ")
+            joiner = " "
+
+            table_opts.append(joiner.join((opt, arg)))
+
+        return " ".join(table_opts)
+
+    def visit_create_index(self, create, **kw):
+        index = create.element
+        self._verify_index_table(index)
+        preparer = self.preparer
+        table = preparer.format_table(index.table)
+
+        columns = [
+            self.sql_compiler.process(
+                (
+                    elements.Grouping(expr)
+                    if (
+                        isinstance(expr, elements.BinaryExpression)
+                        or (
+                            isinstance(expr, elements.UnaryExpression)
+                            and expr.modifier
+                            not in (operators.desc_op, operators.asc_op)
+                        )
+                        or isinstance(expr, functions.FunctionElement)
+                    )
+                    else expr
+                ),
+                include_table=False,
+                literal_binds=True,
+            )
+            for expr in index.expressions
+        ]
+
+        name = self._prepared_index_name(index)
+
+        text = "CREATE "
+        if index.unique:
+            text += "UNIQUE "
+
+        index_prefix = index.kwargs.get("%s_prefix" % self.dialect.name, None)
+        if index_prefix:
+            text += index_prefix + " "
+
+        text += "INDEX "
+        if create.if_not_exists:
+            text += "IF NOT EXISTS "
+        text += "%s ON %s " % (name, table)
+
+        length = index.dialect_options[self.dialect.name]["length"]
+        if length is not None:
+            if isinstance(length, dict):
+                # length value can be a (column_name --> integer value)
+                # mapping specifying the prefix length for each column of the
+                # index
+                columns = ", ".join(
+                    (
+                        "%s(%d)" % (expr, length[col.name])
+                        if col.name in length
+                        else (
+                            "%s(%d)" % (expr, length[expr])
+                            if expr in length
+                            else "%s" % expr
+                        )
+                    )
+                    for col, expr in zip(index.expressions, columns)
+                )
+            else:
+                # or can be an integer value specifying the same
+                # prefix length for all columns of the index
+                columns = ", ".join(
+                    "%s(%d)" % (col, length) for col in columns
+                )
+        else:
+            columns = ", ".join(columns)
+        text += "(%s)" % columns
+
+        parser = index.dialect_options["mysql"]["with_parser"]
+        if parser is not None:
+            text += " WITH PARSER %s" % (parser,)
+
+        using = index.dialect_options["mysql"]["using"]
+        if using is not None:
+            text += " USING %s" % (preparer.quote(using))
+
+        return text
+
+    def visit_primary_key_constraint(self, constraint, **kw):
+        text = super().visit_primary_key_constraint(constraint)
+        using = constraint.dialect_options["mysql"]["using"]
+        if using:
+            text += " USING %s" % (self.preparer.quote(using))
+        return text
+
+    def visit_drop_index(self, drop, **kw):
+        index = drop.element
+        text = "\nDROP INDEX "
+        if drop.if_exists:
+            text += "IF EXISTS "
+
+        return text + "%s ON %s" % (
+            self._prepared_index_name(index, include_schema=False),
+            self.preparer.format_table(index.table),
+        )
+
+    def visit_drop_constraint(self, drop, **kw):
+        constraint = drop.element
+        if isinstance(constraint, sa_schema.ForeignKeyConstraint):
+            qual = "FOREIGN KEY "
+            const = self.preparer.format_constraint(constraint)
+        elif isinstance(constraint, sa_schema.PrimaryKeyConstraint):
+            qual = "PRIMARY KEY "
+            const = ""
+        elif isinstance(constraint, sa_schema.UniqueConstraint):
+            qual = "INDEX "
+            const = self.preparer.format_constraint(constraint)
+        elif isinstance(constraint, sa_schema.CheckConstraint):
+            if self.dialect.is_mariadb:
+                qual = "CONSTRAINT "
+            else:
+                qual = "CHECK "
+            const = self.preparer.format_constraint(constraint)
+        else:
+            qual = ""
+            const = self.preparer.format_constraint(constraint)
+        return "ALTER TABLE %s DROP %s%s" % (
+            self.preparer.format_table(constraint.table),
+            qual,
+            const,
+        )
+
+    def define_constraint_match(self, constraint):
+        if constraint.match is not None:
+            raise exc.CompileError(
+                "MySQL ignores the 'MATCH' keyword while at the same time "
+                "causes ON UPDATE/ON DELETE clauses to be ignored."
+            )
+        return ""
+
+    def visit_set_table_comment(self, create, **kw):
+        return "ALTER TABLE %s COMMENT %s" % (
+            self.preparer.format_table(create.element),
+            self.sql_compiler.render_literal_value(
+                create.element.comment, sqltypes.String()
+            ),
+        )
+
+    def visit_drop_table_comment(self, create, **kw):
+        return "ALTER TABLE %s COMMENT ''" % (
+            self.preparer.format_table(create.element)
+        )
+
+    def visit_set_column_comment(self, create, **kw):
+        return "ALTER TABLE %s CHANGE %s %s" % (
+            self.preparer.format_table(create.element.table),
+            self.preparer.format_column(create.element),
+            self.get_column_specification(create.element),
+        )
+
+
+class MySQLTypeCompiler(compiler.GenericTypeCompiler):
+    def _extend_numeric(self, type_, spec):
+        "Extend a numeric-type declaration with MySQL specific extensions."
+
+        if not self._mysql_type(type_):
+            return spec
+
+        if type_.unsigned:
+            spec += " UNSIGNED"
+        if type_.zerofill:
+            spec += " ZEROFILL"
+        return spec
+
+    def _extend_string(self, type_, defaults, spec):
+        """Extend a string-type declaration with standard SQL CHARACTER SET /
+        COLLATE annotations and MySQL specific extensions.
+
+        """
+
+        def attr(name):
+            return getattr(type_, name, defaults.get(name))
+
+        if attr("charset"):
+            charset = "CHARACTER SET %s" % attr("charset")
+        elif attr("ascii"):
+            charset = "ASCII"
+        elif attr("unicode"):
+            charset = "UNICODE"
+        else:
+            charset = None
+
+        if attr("collation"):
+            collation = "COLLATE %s" % type_.collation
+        elif attr("binary"):
+            collation = "BINARY"
+        else:
+            collation = None
+
+        if attr("national"):
+            # NATIONAL (aka NCHAR/NVARCHAR) trumps charsets.
+            return " ".join(
+                [c for c in ("NATIONAL", spec, collation) if c is not None]
+            )
+        return " ".join(
+            [c for c in (spec, charset, collation) if c is not None]
+        )
+
+    def _mysql_type(self, type_):
+        return isinstance(type_, (_StringType, _NumericType))
+
+    def visit_NUMERIC(self, type_, **kw):
+        if type_.precision is None:
+            return self._extend_numeric(type_, "NUMERIC")
+        elif type_.scale is None:
+            return self._extend_numeric(
+                type_,
+                "NUMERIC(%(precision)s)" % {"precision": type_.precision},
+            )
+        else:
+            return self._extend_numeric(
+                type_,
+                "NUMERIC(%(precision)s, %(scale)s)"
+                % {"precision": type_.precision, "scale": type_.scale},
+            )
+
+    def visit_DECIMAL(self, type_, **kw):
+        if type_.precision is None:
+            return self._extend_numeric(type_, "DECIMAL")
+        elif type_.scale is None:
+            return self._extend_numeric(
+                type_,
+                "DECIMAL(%(precision)s)" % {"precision": type_.precision},
+            )
+        else:
+            return self._extend_numeric(
+                type_,
+                "DECIMAL(%(precision)s, %(scale)s)"
+                % {"precision": type_.precision, "scale": type_.scale},
+            )
+
+    def visit_DOUBLE(self, type_, **kw):
+        if type_.precision is not None and type_.scale is not None:
+            return self._extend_numeric(
+                type_,
+                "DOUBLE(%(precision)s, %(scale)s)"
+                % {"precision": type_.precision, "scale": type_.scale},
+            )
+        else:
+            return self._extend_numeric(type_, "DOUBLE")
+
+    def visit_REAL(self, type_, **kw):
+        if type_.precision is not None and type_.scale is not None:
+            return self._extend_numeric(
+                type_,
+                "REAL(%(precision)s, %(scale)s)"
+                % {"precision": type_.precision, "scale": type_.scale},
+            )
+        else:
+            return self._extend_numeric(type_, "REAL")
+
+    def visit_FLOAT(self, type_, **kw):
+        if (
+            self._mysql_type(type_)
+            and type_.scale is not None
+            and type_.precision is not None
+        ):
+            return self._extend_numeric(
+                type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale)
+            )
+        elif type_.precision is not None:
+            return self._extend_numeric(
+                type_, "FLOAT(%s)" % (type_.precision,)
+            )
+        else:
+            return self._extend_numeric(type_, "FLOAT")
+
+    def visit_INTEGER(self, type_, **kw):
+        if self._mysql_type(type_) and type_.display_width is not None:
+            return self._extend_numeric(
+                type_,
+                "INTEGER(%(display_width)s)"
+                % {"display_width": type_.display_width},
+            )
+        else:
+            return self._extend_numeric(type_, "INTEGER")
+
+    def visit_BIGINT(self, type_, **kw):
+        if self._mysql_type(type_) and type_.display_width is not None:
+            return self._extend_numeric(
+                type_,
+                "BIGINT(%(display_width)s)"
+                % {"display_width": type_.display_width},
+            )
+        else:
+            return self._extend_numeric(type_, "BIGINT")
+
+    def visit_MEDIUMINT(self, type_, **kw):
+        if self._mysql_type(type_) and type_.display_width is not None:
+            return self._extend_numeric(
+                type_,
+                "MEDIUMINT(%(display_width)s)"
+                % {"display_width": type_.display_width},
+            )
+        else:
+            return self._extend_numeric(type_, "MEDIUMINT")
+
+    def visit_TINYINT(self, type_, **kw):
+        if self._mysql_type(type_) and type_.display_width is not None:
+            return self._extend_numeric(
+                type_, "TINYINT(%s)" % type_.display_width
+            )
+        else:
+            return self._extend_numeric(type_, "TINYINT")
+
+    def visit_SMALLINT(self, type_, **kw):
+        if self._mysql_type(type_) and type_.display_width is not None:
+            return self._extend_numeric(
+                type_,
+                "SMALLINT(%(display_width)s)"
+                % {"display_width": type_.display_width},
+            )
+        else:
+            return self._extend_numeric(type_, "SMALLINT")
+
+    def visit_BIT(self, type_, **kw):
+        if type_.length is not None:
+            return "BIT(%s)" % type_.length
+        else:
+            return "BIT"
+
+    def visit_DATETIME(self, type_, **kw):
+        if getattr(type_, "fsp", None):
+            return "DATETIME(%d)" % type_.fsp
+        else:
+            return "DATETIME"
+
+    def visit_DATE(self, type_, **kw):
+        return "DATE"
+
+    def visit_TIME(self, type_, **kw):
+        if getattr(type_, "fsp", None):
+            return "TIME(%d)" % type_.fsp
+        else:
+            return "TIME"
+
+    def visit_TIMESTAMP(self, type_, **kw):
+        if getattr(type_, "fsp", None):
+            return "TIMESTAMP(%d)" % type_.fsp
+        else:
+            return "TIMESTAMP"
+
+    def visit_YEAR(self, type_, **kw):
+        if type_.display_width is None:
+            return "YEAR"
+        else:
+            return "YEAR(%s)" % type_.display_width
+
+    def visit_TEXT(self, type_, **kw):
+        if type_.length is not None:
+            return self._extend_string(type_, {}, "TEXT(%d)" % type_.length)
+        else:
+            return self._extend_string(type_, {}, "TEXT")
+
+    def visit_TINYTEXT(self, type_, **kw):
+        return self._extend_string(type_, {}, "TINYTEXT")
+
+    def visit_MEDIUMTEXT(self, type_, **kw):
+        return self._extend_string(type_, {}, "MEDIUMTEXT")
+
+    def visit_LONGTEXT(self, type_, **kw):
+        return self._extend_string(type_, {}, "LONGTEXT")
+
+    def visit_VARCHAR(self, type_, **kw):
+        if type_.length is not None:
+            return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length)
+        else:
+            raise exc.CompileError(
+                "VARCHAR requires a length on dialect %s" % self.dialect.name
+            )
+
+    def visit_CHAR(self, type_, **kw):
+        if type_.length is not None:
+            return self._extend_string(
+                type_, {}, "CHAR(%(length)s)" % {"length": type_.length}
+            )
+        else:
+            return self._extend_string(type_, {}, "CHAR")
+
+    def visit_NVARCHAR(self, type_, **kw):
+        # We'll actually generate the equiv. "NATIONAL VARCHAR" instead
+        # of "NVARCHAR".
+        if type_.length is not None:
+            return self._extend_string(
+                type_,
+                {"national": True},
+                "VARCHAR(%(length)s)" % {"length": type_.length},
+            )
+        else:
+            raise exc.CompileError(
+                "NVARCHAR requires a length on dialect %s" % self.dialect.name
+            )
+
+    def visit_NCHAR(self, type_, **kw):
+        # We'll actually generate the equiv.
+        # "NATIONAL CHAR" instead of "NCHAR".
+        if type_.length is not None:
+            return self._extend_string(
+                type_,
+                {"national": True},
+                "CHAR(%(length)s)" % {"length": type_.length},
+            )
+        else:
+            return self._extend_string(type_, {"national": True}, "CHAR")
+
+    def visit_UUID(self, type_, **kw):
+        return "UUID"
+
+    def visit_VARBINARY(self, type_, **kw):
+        return "VARBINARY(%d)" % type_.length
+
+    def visit_JSON(self, type_, **kw):
+        return "JSON"
+
+    def visit_large_binary(self, type_, **kw):
+        return self.visit_BLOB(type_)
+
+    def visit_enum(self, type_, **kw):
+        if not type_.native_enum:
+            return super().visit_enum(type_)
+        else:
+            return self._visit_enumerated_values("ENUM", type_, type_.enums)
+
+    def visit_BLOB(self, type_, **kw):
+        if type_.length is not None:
+            return "BLOB(%d)" % type_.length
+        else:
+            return "BLOB"
+
+    def visit_TINYBLOB(self, type_, **kw):
+        return "TINYBLOB"
+
+    def visit_MEDIUMBLOB(self, type_, **kw):
+        return "MEDIUMBLOB"
+
+    def visit_LONGBLOB(self, type_, **kw):
+        return "LONGBLOB"
+
+    def _visit_enumerated_values(self, name, type_, enumerated_values):
+        quoted_enums = []
+        for e in enumerated_values:
+            if self.dialect.identifier_preparer._double_percents:
+                e = e.replace("%", "%%")
+            quoted_enums.append("'%s'" % e.replace("'", "''"))
+        return self._extend_string(
+            type_, {}, "%s(%s)" % (name, ",".join(quoted_enums))
+        )
+
+    def visit_ENUM(self, type_, **kw):
+        return self._visit_enumerated_values("ENUM", type_, type_.enums)
+
+    def visit_SET(self, type_, **kw):
+        return self._visit_enumerated_values("SET", type_, type_.values)
+
+    def visit_BOOLEAN(self, type_, **kw):
+        return "BOOL"
+
+
+class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
+    reserved_words = RESERVED_WORDS_MYSQL
+
+    def __init__(self, dialect, server_ansiquotes=False, **kw):
+        if not server_ansiquotes:
+            quote = "`"
+        else:
+            quote = '"'
+
+        super().__init__(dialect, initial_quote=quote, escape_quote=quote)
+
+    def _quote_free_identifiers(self, *ids):
+        """Unilaterally identifier-quote any number of strings."""
+
+        return tuple([self.quote_identifier(i) for i in ids if i is not None])
+
+
+class MariaDBIdentifierPreparer(MySQLIdentifierPreparer):
+    reserved_words = RESERVED_WORDS_MARIADB
+
+
+@log.class_logger
+class MySQLDialect(default.DefaultDialect):
+    """Details of the MySQL dialect.
+    Not used directly in application code.
+    """
+
+    name = "mysql"
+    supports_statement_cache = True
+
+    supports_alter = True
+
+    # MySQL has no true "boolean" type; we
+    # allow for the "true" and "false" keywords, however
+    supports_native_boolean = False
+
+    # identifiers are 64, however aliases can be 255...
+    max_identifier_length = 255
+    max_index_name_length = 64
+    max_constraint_name_length = 64
+
+    div_is_floordiv = False
+
+    supports_native_enum = True
+
+    returns_native_bytes = True
+
+    supports_sequences = False  # default for MySQL ...
+    # ... may be updated to True for MariaDB 10.3+ in initialize()
+
+    sequences_optional = False
+
+    supports_for_update_of = False  # default for MySQL ...
+    # ... may be updated to True for MySQL 8+ in initialize()
+
+    _requires_alias_for_on_duplicate_key = False  # Only available ...
+    # ... in MySQL 8+
+
+    # MySQL doesn't support "DEFAULT VALUES" but *does* support
+    # "VALUES (DEFAULT)"
+    supports_default_values = False
+    supports_default_metavalue = True
+
+    use_insertmanyvalues: bool = True
+    insertmanyvalues_implicit_sentinel = (
+        InsertmanyvaluesSentinelOpts.ANY_AUTOINCREMENT
+    )
+
+    supports_sane_rowcount = True
+    supports_sane_multi_rowcount = False
+    supports_multivalues_insert = True
+    insert_null_pk_still_autoincrements = True
+
+    supports_comments = True
+    inline_comments = True
+    default_paramstyle = "format"
+    colspecs = colspecs
+
+    cte_follows_insert = True
+
+    statement_compiler = MySQLCompiler
+    ddl_compiler = MySQLDDLCompiler
+    type_compiler_cls = MySQLTypeCompiler
+    ischema_names = ischema_names
+    preparer = MySQLIdentifierPreparer
+
+    is_mariadb = False
+    _mariadb_normalized_version_info = None
+
+    # default SQL compilation settings -
+    # these are modified upon initialize(),
+    # i.e. first connect
+    _backslash_escapes = True
+    _server_ansiquotes = False
+
+    construct_arguments = [
+        (sa_schema.Table, {"*": None}),
+        (sql.Update, {"limit": None}),
+        (sql.Delete, {"limit": None}),
+        (sa_schema.PrimaryKeyConstraint, {"using": None}),
+        (
+            sa_schema.Index,
+            {
+                "using": None,
+                "length": None,
+                "prefix": None,
+                "with_parser": None,
+            },
+        ),
+    ]
+
+    def __init__(
+        self,
+        json_serializer=None,
+        json_deserializer=None,
+        is_mariadb=None,
+        **kwargs,
+    ):
+        kwargs.pop("use_ansiquotes", None)  # legacy
+        default.DefaultDialect.__init__(self, **kwargs)
+        self._json_serializer = json_serializer
+        self._json_deserializer = json_deserializer
+        self._set_mariadb(is_mariadb, None)
+
+    def get_isolation_level_values(self, dbapi_conn):
+        return (
+            "SERIALIZABLE",
+            "READ UNCOMMITTED",
+            "READ COMMITTED",
+            "REPEATABLE READ",
+        )
+
+    def set_isolation_level(self, dbapi_connection, level):
+        cursor = dbapi_connection.cursor()
+        cursor.execute(f"SET SESSION TRANSACTION ISOLATION LEVEL {level}")
+        cursor.execute("COMMIT")
+        cursor.close()
+
+    def get_isolation_level(self, dbapi_connection):
+        cursor = dbapi_connection.cursor()
+        if self._is_mysql and self.server_version_info >= (5, 7, 20):
+            cursor.execute("SELECT @@transaction_isolation")
+        else:
+            cursor.execute("SELECT @@tx_isolation")
+        row = cursor.fetchone()
+        if row is None:
+            util.warn(
+                "Could not retrieve transaction isolation level for MySQL "
+                "connection."
+            )
+            raise NotImplementedError()
+        val = row[0]
+        cursor.close()
+        if isinstance(val, bytes):
+            val = val.decode()
+        return val.upper().replace("-", " ")
+
+    @classmethod
+    def _is_mariadb_from_url(cls, url):
+        dbapi = cls.import_dbapi()
+        dialect = cls(dbapi=dbapi)
+
+        cargs, cparams = dialect.create_connect_args(url)
+        conn = dialect.connect(*cargs, **cparams)
+        try:
+            cursor = conn.cursor()
+            cursor.execute("SELECT VERSION() LIKE '%MariaDB%'")
+            val = cursor.fetchone()[0]
+        except:
+            raise
+        else:
+            return bool(val)
+        finally:
+            conn.close()
+
+    def _get_server_version_info(self, connection):
+        # get database server version info explicitly over the wire
+        # to avoid proxy servers like MaxScale getting in the
+        # way with their own values, see #4205
+        dbapi_con = connection.connection
+        cursor = dbapi_con.cursor()
+        cursor.execute("SELECT VERSION()")
+        val = cursor.fetchone()[0]
+        cursor.close()
+        if isinstance(val, bytes):
+            val = val.decode()
+
+        return self._parse_server_version(val)
+
+    def _parse_server_version(self, val):
+        version = []
+        is_mariadb = False
+
+        r = re.compile(r"[.\-+]")
+        tokens = r.split(val)
+        for token in tokens:
+            parsed_token = re.match(
+                r"^(?:(\d+)(?:a|b|c)?|(MariaDB\w*))$", token
+            )
+            if not parsed_token:
+                continue
+            elif parsed_token.group(2):
+                self._mariadb_normalized_version_info = tuple(version[-3:])
+                is_mariadb = True
+            else:
+                digit = int(parsed_token.group(1))
+                version.append(digit)
+
+        server_version_info = tuple(version)
+
+        self._set_mariadb(
+            server_version_info and is_mariadb, server_version_info
+        )
+
+        if not is_mariadb:
+            self._mariadb_normalized_version_info = server_version_info
+
+        if server_version_info < (5, 0, 2):
+            raise NotImplementedError(
+                "the MySQL/MariaDB dialect supports server "
+                "version info 5.0.2 and above."
+            )
+
+        # setting it here to help w the test suite
+        self.server_version_info = server_version_info
+        return server_version_info
+
+    def _set_mariadb(self, is_mariadb, server_version_info):
+        if is_mariadb is None:
+            return
+
+        if not is_mariadb and self.is_mariadb:
+            raise exc.InvalidRequestError(
+                "MySQL version %s is not a MariaDB variant."
+                % (".".join(map(str, server_version_info)),)
+            )
+        if is_mariadb:
+            self.preparer = MariaDBIdentifierPreparer
+            # this would have been set by the default dialect already,
+            # so set it again
+            self.identifier_preparer = self.preparer(self)
+
+            # this will be updated on first connect in initialize()
+            # if using older mariadb version
+            self.delete_returning = True
+            self.insert_returning = True
+
+        self.is_mariadb = is_mariadb
+
+    def do_begin_twophase(self, connection, xid):
+        connection.execute(sql.text("XA BEGIN :xid"), dict(xid=xid))
+
+    def do_prepare_twophase(self, connection, xid):
+        connection.execute(sql.text("XA END :xid"), dict(xid=xid))
+        connection.execute(sql.text("XA PREPARE :xid"), dict(xid=xid))
+
+    def do_rollback_twophase(
+        self, connection, xid, is_prepared=True, recover=False
+    ):
+        if not is_prepared:
+            connection.execute(sql.text("XA END :xid"), dict(xid=xid))
+        connection.execute(sql.text("XA ROLLBACK :xid"), dict(xid=xid))
+
+    def do_commit_twophase(
+        self, connection, xid, is_prepared=True, recover=False
+    ):
+        if not is_prepared:
+            self.do_prepare_twophase(connection, xid)
+        connection.execute(sql.text("XA COMMIT :xid"), dict(xid=xid))
+
+    def do_recover_twophase(self, connection):
+        resultset = connection.exec_driver_sql("XA RECOVER")
+        return [row["data"][0 : row["gtrid_length"]] for row in resultset]
+
+    def is_disconnect(self, e, connection, cursor):
+        if isinstance(
+            e,
+            (
+                self.dbapi.OperationalError,
+                self.dbapi.ProgrammingError,
+                self.dbapi.InterfaceError,
+            ),
+        ) and self._extract_error_code(e) in (
+            1927,
+            2006,
+            2013,
+            2014,
+            2045,
+            2055,
+            4031,
+        ):
+            return True
+        elif isinstance(
+            e, (self.dbapi.InterfaceError, self.dbapi.InternalError)
+        ):
+            # if underlying connection is closed,
+            # this is the error you get
+            return "(0, '')" in str(e)
+        else:
+            return False
+
+    def _compat_fetchall(self, rp, charset=None):
+        """Proxy result rows to smooth over MySQL-Python driver
+        inconsistencies."""
+
+        return [_DecodingRow(row, charset) for row in rp.fetchall()]
+
+    def _compat_fetchone(self, rp, charset=None):
+        """Proxy a result row to smooth over MySQL-Python driver
+        inconsistencies."""
+
+        row = rp.fetchone()
+        if row:
+            return _DecodingRow(row, charset)
+        else:
+            return None
+
+    def _compat_first(self, rp, charset=None):
+        """Proxy a result row to smooth over MySQL-Python driver
+        inconsistencies."""
+
+        row = rp.first()
+        if row:
+            return _DecodingRow(row, charset)
+        else:
+            return None
+
+    def _extract_error_code(self, exception):
+        raise NotImplementedError()
+
+    def _get_default_schema_name(self, connection):
+        return connection.exec_driver_sql("SELECT DATABASE()").scalar()
+
+    @reflection.cache
+    def has_table(self, connection, table_name, schema=None, **kw):
+        self._ensure_has_table_connection(connection)
+
+        if schema is None:
+            schema = self.default_schema_name
+
+        assert schema is not None
+
+        full_name = ".".join(
+            self.identifier_preparer._quote_free_identifiers(
+                schema, table_name
+            )
+        )
+
+        # DESCRIBE *must* be used because there is no information schema
+        # table that returns information on temp tables that is consistently
+        # available on MariaDB / MySQL / engine-agnostic etc.
+        # therefore we have no choice but to use DESCRIBE and an error catch
+        # to detect "False".  See issue #9058
+
+        try:
+            with connection.exec_driver_sql(
+                f"DESCRIBE {full_name}",
+                execution_options={"skip_user_error_events": True},
+            ) as rs:
+                return rs.fetchone() is not None
+        except exc.DBAPIError as e:
+            # https://dev.mysql.com/doc/mysql-errors/8.0/en/server-error-reference.html  # noqa: E501
+            # there are a lot of codes that *may* pop up here at some point
+            # but we continue to be fairly conservative.  We include:
+            # 1146: Table '%s.%s' doesn't exist - what every MySQL has emitted
+            # for decades
+            #
+            # mysql 8 suddenly started emitting:
+            # 1049: Unknown database '%s'  - for nonexistent schema
+            #
+            # also added:
+            # 1051: Unknown table '%s' - not known to emit
+            #
+            # there's more "doesn't exist" kinds of messages but they are
+            # less clear if mysql 8 would suddenly start using one of those
+            if self._extract_error_code(e.orig) in (1146, 1049, 1051):
+                return False
+            raise
+
+    @reflection.cache
+    def has_sequence(self, connection, sequence_name, schema=None, **kw):
+        if not self.supports_sequences:
+            self._sequences_not_supported()
+        if not schema:
+            schema = self.default_schema_name
+        # MariaDB implements sequences as a special type of table
+        #
+        cursor = connection.execute(
+            sql.text(
+                "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES "
+                "WHERE TABLE_TYPE='SEQUENCE' and TABLE_NAME=:name AND "
+                "TABLE_SCHEMA=:schema_name"
+            ),
+            dict(
+                name=str(sequence_name),
+                schema_name=str(schema),
+            ),
+        )
+        return cursor.first() is not None
+
+    def _sequences_not_supported(self):
+        raise NotImplementedError(
+            "Sequences are supported only by the "
+            "MariaDB series 10.3 or greater"
+        )
+
+    @reflection.cache
+    def get_sequence_names(self, connection, schema=None, **kw):
+        if not self.supports_sequences:
+            self._sequences_not_supported()
+        if not schema:
+            schema = self.default_schema_name
+        # MariaDB implements sequences as a special type of table
+        cursor = connection.execute(
+            sql.text(
+                "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES "
+                "WHERE TABLE_TYPE='SEQUENCE' and TABLE_SCHEMA=:schema_name"
+            ),
+            dict(schema_name=schema),
+        )
+        return [
+            row[0]
+            for row in self._compat_fetchall(
+                cursor, charset=self._connection_charset
+            )
+        ]
+
+    def initialize(self, connection):
+        # this is driver-based, does not need server version info
+        # and is fairly critical for even basic SQL operations
+        self._connection_charset = self._detect_charset(connection)
+
+        # call super().initialize() because we need to have
+        # server_version_info set up.  in 1.4 under python 2 only this does the
+        # "check unicode returns" thing, which is the one area that some
+        # SQL gets compiled within initialize() currently
+        default.DefaultDialect.initialize(self, connection)
+
+        self._detect_sql_mode(connection)
+        self._detect_ansiquotes(connection)  # depends on sql mode
+        self._detect_casing(connection)
+        if self._server_ansiquotes:
+            # if ansiquotes == True, build a new IdentifierPreparer
+            # with the new setting
+            self.identifier_preparer = self.preparer(
+                self, server_ansiquotes=self._server_ansiquotes
+            )
+
+        self.supports_sequences = (
+            self.is_mariadb and self.server_version_info >= (10, 3)
+        )
+
+        self.supports_for_update_of = (
+            self._is_mysql and self.server_version_info >= (8,)
+        )
+
+        self._needs_correct_for_88718_96365 = (
+            not self.is_mariadb and self.server_version_info >= (8,)
+        )
+
+        self.delete_returning = (
+            self.is_mariadb and self.server_version_info >= (10, 0, 5)
+        )
+
+        self.insert_returning = (
+            self.is_mariadb and self.server_version_info >= (10, 5)
+        )
+
+        self._requires_alias_for_on_duplicate_key = (
+            self._is_mysql and self.server_version_info >= (8, 0, 20)
+        )
+
+        self._warn_for_known_db_issues()
+
+    def _warn_for_known_db_issues(self):
+        if self.is_mariadb:
+            mdb_version = self._mariadb_normalized_version_info
+            if mdb_version > (10, 2) and mdb_version < (10, 2, 9):
+                util.warn(
+                    "MariaDB %r before 10.2.9 has known issues regarding "
+                    "CHECK constraints, which impact handling of NULL values "
+                    "with SQLAlchemy's boolean datatype (MDEV-13596). An "
+                    "additional issue prevents proper migrations of columns "
+                    "with CHECK constraints (MDEV-11114).  Please upgrade to "
+                    "MariaDB 10.2.9 or greater, or use the MariaDB 10.1 "
+                    "series, to avoid these issues." % (mdb_version,)
+                )
+
+    @property
+    def _support_float_cast(self):
+        if not self.server_version_info:
+            return False
+        elif self.is_mariadb:
+            # ref https://mariadb.com/kb/en/mariadb-1045-release-notes/
+            return self.server_version_info >= (10, 4, 5)
+        else:
+            # ref https://dev.mysql.com/doc/relnotes/mysql/8.0/en/news-8-0-17.html#mysqld-8-0-17-feature  # noqa
+            return self.server_version_info >= (8, 0, 17)
+
+    @property
+    def _support_default_function(self):
+        if not self.server_version_info:
+            return False
+        elif self.is_mariadb:
+            # ref https://mariadb.com/kb/en/mariadb-1021-release-notes/
+            return self.server_version_info >= (10, 2, 1)
+        else:
+            # ref https://dev.mysql.com/doc/refman/8.0/en/data-type-defaults.html # noqa
+            return self.server_version_info >= (8, 0, 13)
+
+    @property
+    def _is_mariadb(self):
+        return self.is_mariadb
+
+    @property
+    def _is_mysql(self):
+        return not self.is_mariadb
+
+    @property
+    def _is_mariadb_102(self):
+        return self.is_mariadb and self._mariadb_normalized_version_info > (
+            10,
+            2,
+        )
+
+    @reflection.cache
+    def get_schema_names(self, connection, **kw):
+        rp = connection.exec_driver_sql("SHOW schemas")
+        return [r[0] for r in rp]
+
+    @reflection.cache
+    def get_table_names(self, connection, schema=None, **kw):
+        """Return a Unicode SHOW TABLES from a given schema."""
+        if schema is not None:
+            current_schema = schema
+        else:
+            current_schema = self.default_schema_name
+
+        charset = self._connection_charset
+
+        rp = connection.exec_driver_sql(
+            "SHOW FULL TABLES FROM %s"
+            % self.identifier_preparer.quote_identifier(current_schema)
+        )
+
+        return [
+            row[0]
+            for row in self._compat_fetchall(rp, charset=charset)
+            if row[1] == "BASE TABLE"
+        ]
+
+    @reflection.cache
+    def get_view_names(self, connection, schema=None, **kw):
+        if schema is None:
+            schema = self.default_schema_name
+        charset = self._connection_charset
+        rp = connection.exec_driver_sql(
+            "SHOW FULL TABLES FROM %s"
+            % self.identifier_preparer.quote_identifier(schema)
+        )
+        return [
+            row[0]
+            for row in self._compat_fetchall(rp, charset=charset)
+            if row[1] in ("VIEW", "SYSTEM VIEW")
+        ]
+
+    @reflection.cache
+    def get_table_options(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+        if parsed_state.table_options:
+            return parsed_state.table_options
+        else:
+            return ReflectionDefaults.table_options()
+
+    @reflection.cache
+    def get_columns(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+        if parsed_state.columns:
+            return parsed_state.columns
+        else:
+            return ReflectionDefaults.columns()
+
+    @reflection.cache
+    def get_pk_constraint(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+        for key in parsed_state.keys:
+            if key["type"] == "PRIMARY":
+                # There can be only one.
+                cols = [s[0] for s in key["columns"]]
+                return {"constrained_columns": cols, "name": None}
+        return ReflectionDefaults.pk_constraint()
+
+    @reflection.cache
+    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+        default_schema = None
+
+        fkeys = []
+
+        for spec in parsed_state.fk_constraints:
+            ref_name = spec["table"][-1]
+            ref_schema = len(spec["table"]) > 1 and spec["table"][-2] or schema
+
+            if not ref_schema:
+                if default_schema is None:
+                    default_schema = connection.dialect.default_schema_name
+                if schema == default_schema:
+                    ref_schema = schema
+
+            loc_names = spec["local"]
+            ref_names = spec["foreign"]
+
+            con_kw = {}
+            for opt in ("onupdate", "ondelete"):
+                if spec.get(opt, False) not in ("NO ACTION", None):
+                    con_kw[opt] = spec[opt]
+
+            fkey_d = {
+                "name": spec["name"],
+                "constrained_columns": loc_names,
+                "referred_schema": ref_schema,
+                "referred_table": ref_name,
+                "referred_columns": ref_names,
+                "options": con_kw,
+            }
+            fkeys.append(fkey_d)
+
+        if self._needs_correct_for_88718_96365:
+            self._correct_for_mysql_bugs_88718_96365(fkeys, connection)
+
+        return fkeys if fkeys else ReflectionDefaults.foreign_keys()
+
+    def _correct_for_mysql_bugs_88718_96365(self, fkeys, connection):
+        # Foreign key is always in lower case (MySQL 8.0)
+        # https://bugs.mysql.com/bug.php?id=88718
+        # issue #4344 for SQLAlchemy
+
+        # table name also for MySQL 8.0
+        # https://bugs.mysql.com/bug.php?id=96365
+        # issue #4751 for SQLAlchemy
+
+        # for lower_case_table_names=2, information_schema.columns
+        # preserves the original table/schema casing, but SHOW CREATE
+        # TABLE does not.   this problem is not in lower_case_table_names=1,
+        # but use case-insensitive matching for these two modes in any case.
+
+        if self._casing in (1, 2):
+
+            def lower(s):
+                return s.lower()
+
+        else:
+            # if on case sensitive, there can be two tables referenced
+            # with the same name different casing, so we need to use
+            # case-sensitive matching.
+            def lower(s):
+                return s
+
+        default_schema_name = connection.dialect.default_schema_name
+
+        # NOTE: using (table_schema, table_name, lower(column_name)) in (...)
+        # is very slow since mysql does not seem able to properly use indexse.
+        # Unpack the where condition instead.
+        schema_by_table_by_column = defaultdict(lambda: defaultdict(list))
+        for rec in fkeys:
+            sch = lower(rec["referred_schema"] or default_schema_name)
+            tbl = lower(rec["referred_table"])
+            for col_name in rec["referred_columns"]:
+                schema_by_table_by_column[sch][tbl].append(col_name)
+
+        if schema_by_table_by_column:
+
+            condition = sql.or_(
+                *(
+                    sql.and_(
+                        _info_columns.c.table_schema == schema,
+                        sql.or_(
+                            *(
+                                sql.and_(
+                                    _info_columns.c.table_name == table,
+                                    sql.func.lower(
+                                        _info_columns.c.column_name
+                                    ).in_(columns),
+                                )
+                                for table, columns in tables.items()
+                            )
+                        ),
+                    )
+                    for schema, tables in schema_by_table_by_column.items()
+                )
+            )
+
+            select = sql.select(
+                _info_columns.c.table_schema,
+                _info_columns.c.table_name,
+                _info_columns.c.column_name,
+            ).where(condition)
+
+            correct_for_wrong_fk_case = connection.execute(select)
+
+            # in casing=0, table name and schema name come back in their
+            # exact case.
+            # in casing=1, table name and schema name come back in lower
+            # case.
+            # in casing=2, table name and schema name come back from the
+            # information_schema.columns view in the case
+            # that was used in CREATE DATABASE and CREATE TABLE, but
+            # SHOW CREATE TABLE converts them to *lower case*, therefore
+            # not matching.  So for this case, case-insensitive lookup
+            # is necessary
+            d = defaultdict(dict)
+            for schema, tname, cname in correct_for_wrong_fk_case:
+                d[(lower(schema), lower(tname))]["SCHEMANAME"] = schema
+                d[(lower(schema), lower(tname))]["TABLENAME"] = tname
+                d[(lower(schema), lower(tname))][cname.lower()] = cname
+
+            for fkey in fkeys:
+                rec = d[
+                    (
+                        lower(fkey["referred_schema"] or default_schema_name),
+                        lower(fkey["referred_table"]),
+                    )
+                ]
+
+                fkey["referred_table"] = rec["TABLENAME"]
+                if fkey["referred_schema"] is not None:
+                    fkey["referred_schema"] = rec["SCHEMANAME"]
+
+                fkey["referred_columns"] = [
+                    rec[col.lower()] for col in fkey["referred_columns"]
+                ]
+
+    @reflection.cache
+    def get_check_constraints(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+
+        cks = [
+            {"name": spec["name"], "sqltext": spec["sqltext"]}
+            for spec in parsed_state.ck_constraints
+        ]
+        cks.sort(key=lambda d: d["name"] or "~")  # sort None as last
+        return cks if cks else ReflectionDefaults.check_constraints()
+
+    @reflection.cache
+    def get_table_comment(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+        comment = parsed_state.table_options.get(f"{self.name}_comment", None)
+        if comment is not None:
+            return {"text": comment}
+        else:
+            return ReflectionDefaults.table_comment()
+
+    @reflection.cache
+    def get_indexes(self, connection, table_name, schema=None, **kw):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+
+        indexes = []
+
+        for spec in parsed_state.keys:
+            dialect_options = {}
+            unique = False
+            flavor = spec["type"]
+            if flavor == "PRIMARY":
+                continue
+            if flavor == "UNIQUE":
+                unique = True
+            elif flavor in ("FULLTEXT", "SPATIAL"):
+                dialect_options["%s_prefix" % self.name] = flavor
+            elif flavor is None:
+                pass
+            else:
+                self.logger.info(
+                    "Converting unknown KEY type %s to a plain KEY", flavor
+                )
+                pass
+
+            if spec["parser"]:
+                dialect_options["%s_with_parser" % (self.name)] = spec[
+                    "parser"
+                ]
+
+            index_d = {}
+
+            index_d["name"] = spec["name"]
+            index_d["column_names"] = [s[0] for s in spec["columns"]]
+            mysql_length = {
+                s[0]: s[1] for s in spec["columns"] if s[1] is not None
+            }
+            if mysql_length:
+                dialect_options["%s_length" % self.name] = mysql_length
+
+            index_d["unique"] = unique
+            if flavor:
+                index_d["type"] = flavor
+
+            if dialect_options:
+                index_d["dialect_options"] = dialect_options
+
+            indexes.append(index_d)
+        indexes.sort(key=lambda d: d["name"] or "~")  # sort None as last
+        return indexes if indexes else ReflectionDefaults.indexes()
+
+    @reflection.cache
+    def get_unique_constraints(
+        self, connection, table_name, schema=None, **kw
+    ):
+        parsed_state = self._parsed_state_or_create(
+            connection, table_name, schema, **kw
+        )
+
+        ucs = [
+            {
+                "name": key["name"],
+                "column_names": [col[0] for col in key["columns"]],
+                "duplicates_index": key["name"],
+            }
+            for key in parsed_state.keys
+            if key["type"] == "UNIQUE"
+        ]
+        ucs.sort(key=lambda d: d["name"] or "~")  # sort None as last
+        if ucs:
+            return ucs
+        else:
+            return ReflectionDefaults.unique_constraints()
+
+    @reflection.cache
+    def get_view_definition(self, connection, view_name, schema=None, **kw):
+        charset = self._connection_charset
+        full_name = ".".join(
+            self.identifier_preparer._quote_free_identifiers(schema, view_name)
+        )
+        sql = self._show_create_table(
+            connection, None, charset, full_name=full_name
+        )
+        if sql.upper().startswith("CREATE TABLE"):
+            # it's a table, not a view
+            raise exc.NoSuchTableError(full_name)
+        return sql
+
+    def _parsed_state_or_create(
+        self, connection, table_name, schema=None, **kw
+    ):
+        return self._setup_parser(
+            connection,
+            table_name,
+            schema,
+            info_cache=kw.get("info_cache", None),
+        )
+
+    @util.memoized_property
+    def _tabledef_parser(self):
+        """return the MySQLTableDefinitionParser, generate if needed.
+
+        The deferred creation ensures that the dialect has
+        retrieved server version information first.
+
+        """
+        preparer = self.identifier_preparer
+        return _reflection.MySQLTableDefinitionParser(self, preparer)
+
+    @reflection.cache
+    def _setup_parser(self, connection, table_name, schema=None, **kw):
+        charset = self._connection_charset
+        parser = self._tabledef_parser
+        full_name = ".".join(
+            self.identifier_preparer._quote_free_identifiers(
+                schema, table_name
+            )
+        )
+        sql = self._show_create_table(
+            connection, None, charset, full_name=full_name
+        )
+        if parser._check_view(sql):
+            # Adapt views to something table-like.
+            columns = self._describe_table(
+                connection, None, charset, full_name=full_name
+            )
+            sql = parser._describe_to_create(table_name, columns)
+        return parser.parse(sql, charset)
+
+    def _fetch_setting(self, connection, setting_name):
+        charset = self._connection_charset
+
+        if self.server_version_info and self.server_version_info < (5, 6):
+            sql = "SHOW VARIABLES LIKE '%s'" % setting_name
+            fetch_col = 1
+        else:
+            sql = "SELECT @@%s" % setting_name
+            fetch_col = 0
+
+        show_var = connection.exec_driver_sql(sql)
+        row = self._compat_first(show_var, charset=charset)
+        if not row:
+            return None
+        else:
+            return row[fetch_col]
+
+    def _detect_charset(self, connection):
+        raise NotImplementedError()
+
+    def _detect_casing(self, connection):
+        """Sniff out identifier case sensitivity.
+
+        Cached per-connection. This value can not change without a server
+        restart.
+
+        """
+        # https://dev.mysql.com/doc/refman/en/identifier-case-sensitivity.html
+
+        setting = self._fetch_setting(connection, "lower_case_table_names")
+        if setting is None:
+            cs = 0
+        else:
+            # 4.0.15 returns OFF or ON according to [ticket:489]
+            # 3.23 doesn't, 4.0.27 doesn't..
+            if setting == "OFF":
+                cs = 0
+            elif setting == "ON":
+                cs = 1
+            else:
+                cs = int(setting)
+        self._casing = cs
+        return cs
+
+    def _detect_collations(self, connection):
+        """Pull the active COLLATIONS list from the server.
+
+        Cached per-connection.
+        """
+
+        collations = {}
+        charset = self._connection_charset
+        rs = connection.exec_driver_sql("SHOW COLLATION")
+        for row in self._compat_fetchall(rs, charset):
+            collations[row[0]] = row[1]
+        return collations
+
+    def _detect_sql_mode(self, connection):
+        setting = self._fetch_setting(connection, "sql_mode")
+
+        if setting is None:
+            util.warn(
+                "Could not retrieve SQL_MODE; please ensure the "
+                "MySQL user has permissions to SHOW VARIABLES"
+            )
+            self._sql_mode = ""
+        else:
+            self._sql_mode = setting or ""
+
+    def _detect_ansiquotes(self, connection):
+        """Detect and adjust for the ANSI_QUOTES sql mode."""
+
+        mode = self._sql_mode
+        if not mode:
+            mode = ""
+        elif mode.isdigit():
+            mode_no = int(mode)
+            mode = (mode_no | 4 == mode_no) and "ANSI_QUOTES" or ""
+
+        self._server_ansiquotes = "ANSI_QUOTES" in mode
+
+        # as of MySQL 5.0.1
+        self._backslash_escapes = "NO_BACKSLASH_ESCAPES" not in mode
+
+    def _show_create_table(
+        self, connection, table, charset=None, full_name=None
+    ):
+        """Run SHOW CREATE TABLE for a ``Table``."""
+
+        if full_name is None:
+            full_name = self.identifier_preparer.format_table(table)
+        st = "SHOW CREATE TABLE %s" % full_name
+
+        rp = None
+        try:
+            rp = connection.execution_options(
+                skip_user_error_events=True
+            ).exec_driver_sql(st)
+        except exc.DBAPIError as e:
+            if self._extract_error_code(e.orig) == 1146:
+                raise exc.NoSuchTableError(full_name) from e
+            else:
+                raise
+        row = self._compat_first(rp, charset=charset)
+        if not row:
+            raise exc.NoSuchTableError(full_name)
+        return row[1].strip()
+
+    def _describe_table(self, connection, table, charset=None, full_name=None):
+        """Run DESCRIBE for a ``Table`` and return processed rows."""
+
+        if full_name is None:
+            full_name = self.identifier_preparer.format_table(table)
+        st = "DESCRIBE %s" % full_name
+
+        rp, rows = None, None
+        try:
+            try:
+                rp = connection.execution_options(
+                    skip_user_error_events=True
+                ).exec_driver_sql(st)
+            except exc.DBAPIError as e:
+                code = self._extract_error_code(e.orig)
+                if code == 1146:
+                    raise exc.NoSuchTableError(full_name) from e
+
+                elif code == 1356:
+                    raise exc.UnreflectableTableError(
+                        "Table or view named %s could not be "
+                        "reflected: %s" % (full_name, e)
+                    ) from e
+
+                else:
+                    raise
+            rows = self._compat_fetchall(rp, charset=charset)
+        finally:
+            if rp:
+                rp.close()
+        return rows
+
+
+class _DecodingRow:
+    """Return unicode-decoded values based on type inspection.
+
+    Smooth over data type issues (esp. with alpha driver versions) and
+    normalize strings as Unicode regardless of user-configured driver
+    encoding settings.
+
+    """
+
+    # Some MySQL-python versions can return some columns as
+    # sets.Set(['value']) (seriously) but thankfully that doesn't
+    # seem to come up in DDL queries.
+
+    _encoding_compat = {
+        "koi8r": "koi8_r",
+        "koi8u": "koi8_u",
+        "utf16": "utf-16-be",  # MySQL's uft16 is always bigendian
+        "utf8mb4": "utf8",  # real utf8
+        "utf8mb3": "utf8",  # real utf8; saw this happen on CI but I cannot
+        # reproduce, possibly mariadb10.6 related
+        "eucjpms": "ujis",
+    }
+
+    def __init__(self, rowproxy, charset):
+        self.rowproxy = rowproxy
+        self.charset = self._encoding_compat.get(charset, charset)
+
+    def __getitem__(self, index):
+        item = self.rowproxy[index]
+        if isinstance(item, _array):
+            item = item.tostring()
+
+        if self.charset and isinstance(item, bytes):
+            return item.decode(self.charset)
+        else:
+            return item
+
+    def __getattr__(self, attr):
+        item = getattr(self.rowproxy, attr)
+        if isinstance(item, _array):
+            item = item.tostring()
+        if self.charset and isinstance(item, bytes):
+            return item.decode(self.charset)
+        else:
+            return item
+
+
+_info_columns = sql.table(
+    "columns",
+    sql.column("table_schema", VARCHAR(64)),
+    sql.column("table_name", VARCHAR(64)),
+    sql.column("column_name", VARCHAR(64)),
+    schema="information_schema",
+)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py
new file mode 100644
index 00000000..5c00ada9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/cymysql.py
@@ -0,0 +1,84 @@
+# dialects/mysql/cymysql.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+r"""
+
+.. dialect:: mysql+cymysql
+    :name: CyMySQL
+    :dbapi: cymysql
+    :connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>[?<options>]
+    :url: https://github.com/nakagami/CyMySQL
+
+.. note::
+
+    The CyMySQL dialect is **not tested as part of SQLAlchemy's continuous
+    integration** and may have unresolved issues.  The recommended MySQL
+    dialects are mysqlclient and PyMySQL.
+
+"""  # noqa
+
+from .base import BIT
+from .base import MySQLDialect
+from .mysqldb import MySQLDialect_mysqldb
+from ... import util
+
+
+class _cymysqlBIT(BIT):
+    def result_processor(self, dialect, coltype):
+        """Convert MySQL's 64 bit, variable length binary string to a long."""
+
+        def process(value):
+            if value is not None:
+                v = 0
+                for i in iter(value):
+                    v = v << 8 | i
+                return v
+            return value
+
+        return process
+
+
+class MySQLDialect_cymysql(MySQLDialect_mysqldb):
+    driver = "cymysql"
+    supports_statement_cache = True
+
+    description_encoding = None
+    supports_sane_rowcount = True
+    supports_sane_multi_rowcount = False
+    supports_unicode_statements = True
+
+    colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _cymysqlBIT})
+
+    @classmethod
+    def import_dbapi(cls):
+        return __import__("cymysql")
+
+    def _detect_charset(self, connection):
+        return connection.connection.charset
+
+    def _extract_error_code(self, exception):
+        return exception.errno
+
+    def is_disconnect(self, e, connection, cursor):
+        if isinstance(e, self.dbapi.OperationalError):
+            return self._extract_error_code(e) in (
+                2006,
+                2013,
+                2014,
+                2045,
+                2055,
+            )
+        elif isinstance(e, self.dbapi.InterfaceError):
+            # if underlying connection is closed,
+            # this is the error you get
+            return True
+        else:
+            return False
+
+
+dialect = MySQLDialect_cymysql
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py
new file mode 100644
index 00000000..cceb0818
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/dml.py
@@ -0,0 +1,225 @@
+# dialects/mysql/dml.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+from ... import exc
+from ... import util
+from ...sql._typing import _DMLTableArgument
+from ...sql.base import _exclusive_against
+from ...sql.base import _generative
+from ...sql.base import ColumnCollection
+from ...sql.base import ReadOnlyColumnCollection
+from ...sql.dml import Insert as StandardInsert
+from ...sql.elements import ClauseElement
+from ...sql.elements import KeyedColumnElement
+from ...sql.expression import alias
+from ...sql.selectable import NamedFromClause
+from ...util.typing import Self
+
+
+__all__ = ("Insert", "insert")
+
+
+def insert(table: _DMLTableArgument) -> Insert:
+    """Construct a MySQL/MariaDB-specific variant :class:`_mysql.Insert`
+    construct.
+
+    .. container:: inherited_member
+
+        The :func:`sqlalchemy.dialects.mysql.insert` function creates
+        a :class:`sqlalchemy.dialects.mysql.Insert`.  This class is based
+        on the dialect-agnostic :class:`_sql.Insert` construct which may
+        be constructed using the :func:`_sql.insert` function in
+        SQLAlchemy Core.
+
+    The :class:`_mysql.Insert` construct includes additional methods
+    :meth:`_mysql.Insert.on_duplicate_key_update`.
+
+    """
+    return Insert(table)
+
+
+class Insert(StandardInsert):
+    """MySQL-specific implementation of INSERT.
+
+    Adds methods for MySQL-specific syntaxes such as ON DUPLICATE KEY UPDATE.
+
+    The :class:`~.mysql.Insert` object is created using the
+    :func:`sqlalchemy.dialects.mysql.insert` function.
+
+    .. versionadded:: 1.2
+
+    """
+
+    stringify_dialect = "mysql"
+    inherit_cache = False
+
+    @property
+    def inserted(
+        self,
+    ) -> ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]:
+        """Provide the "inserted" namespace for an ON DUPLICATE KEY UPDATE
+        statement
+
+        MySQL's ON DUPLICATE KEY UPDATE clause allows reference to the row
+        that would be inserted, via a special function called ``VALUES()``.
+        This attribute provides all columns in this row to be referenceable
+        such that they will render within a ``VALUES()`` function inside the
+        ON DUPLICATE KEY UPDATE clause.    The attribute is named ``.inserted``
+        so as not to conflict with the existing
+        :meth:`_expression.Insert.values` method.
+
+        .. tip::  The :attr:`_mysql.Insert.inserted` attribute is an instance
+            of :class:`_expression.ColumnCollection`, which provides an
+            interface the same as that of the :attr:`_schema.Table.c`
+            collection described at :ref:`metadata_tables_and_columns`.
+            With this collection, ordinary names are accessible like attributes
+            (e.g. ``stmt.inserted.some_column``), but special names and
+            dictionary method names should be accessed using indexed access,
+            such as ``stmt.inserted["column name"]`` or
+            ``stmt.inserted["values"]``.  See the docstring for
+            :class:`_expression.ColumnCollection` for further examples.
+
+        .. seealso::
+
+            :ref:`mysql_insert_on_duplicate_key_update` - example of how
+            to use :attr:`_expression.Insert.inserted`
+
+        """
+        return self.inserted_alias.columns
+
+    @util.memoized_property
+    def inserted_alias(self) -> NamedFromClause:
+        return alias(self.table, name="inserted")
+
+    @_generative
+    @_exclusive_against(
+        "_post_values_clause",
+        msgs={
+            "_post_values_clause": "This Insert construct already "
+            "has an ON DUPLICATE KEY clause present"
+        },
+    )
+    def on_duplicate_key_update(self, *args: _UpdateArg, **kw: Any) -> Self:
+        r"""
+        Specifies the ON DUPLICATE KEY UPDATE clause.
+
+        :param \**kw:  Column keys linked to UPDATE values.  The
+         values may be any SQL expression or supported literal Python
+         values.
+
+        .. warning:: This dictionary does **not** take into account
+           Python-specified default UPDATE values or generation functions,
+           e.g. those specified using :paramref:`_schema.Column.onupdate`.
+           These values will not be exercised for an ON DUPLICATE KEY UPDATE
+           style of UPDATE, unless values are manually specified here.
+
+        :param \*args: As an alternative to passing key/value parameters,
+         a dictionary or list of 2-tuples can be passed as a single positional
+         argument.
+
+         Passing a single dictionary is equivalent to the keyword argument
+         form::
+
+            insert().on_duplicate_key_update({"name": "some name"})
+
+         Passing a list of 2-tuples indicates that the parameter assignments
+         in the UPDATE clause should be ordered as sent, in a manner similar
+         to that described for the :class:`_expression.Update`
+         construct overall
+         in :ref:`tutorial_parameter_ordered_updates`::
+
+            insert().on_duplicate_key_update(
+                [
+                    ("name", "some name"),
+                    ("value", "some value"),
+                ]
+            )
+
+         .. versionchanged:: 1.3 parameters can be specified as a dictionary
+            or list of 2-tuples; the latter form provides for parameter
+            ordering.
+
+
+        .. versionadded:: 1.2
+
+        .. seealso::
+
+            :ref:`mysql_insert_on_duplicate_key_update`
+
+        """
+        if args and kw:
+            raise exc.ArgumentError(
+                "Can't pass kwargs and positional arguments simultaneously"
+            )
+
+        if args:
+            if len(args) > 1:
+                raise exc.ArgumentError(
+                    "Only a single dictionary or list of tuples "
+                    "is accepted positionally."
+                )
+            values = args[0]
+        else:
+            values = kw
+
+        self._post_values_clause = OnDuplicateClause(
+            self.inserted_alias, values
+        )
+        return self
+
+
+class OnDuplicateClause(ClauseElement):
+    __visit_name__ = "on_duplicate_key_update"
+
+    _parameter_ordering: Optional[List[str]] = None
+
+    update: Dict[str, Any]
+    stringify_dialect = "mysql"
+
+    def __init__(
+        self, inserted_alias: NamedFromClause, update: _UpdateArg
+    ) -> None:
+        self.inserted_alias = inserted_alias
+
+        # auto-detect that parameters should be ordered.   This is copied from
+        # Update._proces_colparams(), however we don't look for a special flag
+        # in this case since we are not disambiguating from other use cases as
+        # we are in Update.values().
+        if isinstance(update, list) and (
+            update and isinstance(update[0], tuple)
+        ):
+            self._parameter_ordering = [key for key, value in update]
+            update = dict(update)
+
+        if isinstance(update, dict):
+            if not update:
+                raise ValueError(
+                    "update parameter dictionary must not be empty"
+                )
+        elif isinstance(update, ColumnCollection):
+            update = dict(update)
+        else:
+            raise ValueError(
+                "update parameter must be a non-empty dictionary "
+                "or a ColumnCollection such as the `.c.` collection "
+                "of a Table object"
+            )
+        self.update = update
+
+
+_UpdateArg = Union[
+    Mapping[Any, Any], List[Tuple[str, Any]], ColumnCollection[Any, Any]
+]
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py
new file mode 100644
index 00000000..6745cae5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/enumerated.py
@@ -0,0 +1,243 @@
+# dialects/mysql/enumerated.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+import re
+
+from .types import _StringType
+from ... import exc
+from ... import sql
+from ... import util
+from ...sql import sqltypes
+
+
+class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType):
+    """MySQL ENUM type."""
+
+    __visit_name__ = "ENUM"
+
+    native_enum = True
+
+    def __init__(self, *enums, **kw):
+        """Construct an ENUM.
+
+        E.g.::
+
+          Column("myenum", ENUM("foo", "bar", "baz"))
+
+        :param enums: The range of valid values for this ENUM.  Values in
+          enums are not quoted, they will be escaped and surrounded by single
+          quotes when generating the schema.  This object may also be a
+          PEP-435-compliant enumerated type.
+
+          .. versionadded: 1.1 added support for PEP-435-compliant enumerated
+             types.
+
+        :param strict: This flag has no effect.
+
+         .. versionchanged:: The MySQL ENUM type as well as the base Enum
+            type now validates all Python data values.
+
+        :param charset: Optional, a column-level character set for this string
+          value.  Takes precedence to 'ascii' or 'unicode' short-hand.
+
+        :param collation: Optional, a column-level collation for this string
+          value.  Takes precedence to 'binary' short-hand.
+
+        :param ascii: Defaults to False: short-hand for the ``latin1``
+          character set, generates ASCII in schema.
+
+        :param unicode: Defaults to False: short-hand for the ``ucs2``
+          character set, generates UNICODE in schema.
+
+        :param binary: Defaults to False: short-hand, pick the binary
+          collation type that matches the column's character set.  Generates
+          BINARY in schema.  This does not affect the type of data stored,
+          only the collation of character data.
+
+        """
+        kw.pop("strict", None)
+        self._enum_init(enums, kw)
+        _StringType.__init__(self, length=self.length, **kw)
+
+    @classmethod
+    def adapt_emulated_to_native(cls, impl, **kw):
+        """Produce a MySQL native :class:`.mysql.ENUM` from plain
+        :class:`.Enum`.
+
+        """
+        kw.setdefault("validate_strings", impl.validate_strings)
+        kw.setdefault("values_callable", impl.values_callable)
+        kw.setdefault("omit_aliases", impl._omit_aliases)
+        return cls(**kw)
+
+    def _object_value_for_elem(self, elem):
+        # mysql sends back a blank string for any value that
+        # was persisted that was not in the enums; that is, it does no
+        # validation on the incoming data, it "truncates" it to be
+        # the blank string.  Return it straight.
+        if elem == "":
+            return elem
+        else:
+            return super()._object_value_for_elem(elem)
+
+    def __repr__(self):
+        return util.generic_repr(
+            self, to_inspect=[ENUM, _StringType, sqltypes.Enum]
+        )
+
+
+class SET(_StringType):
+    """MySQL SET type."""
+
+    __visit_name__ = "SET"
+
+    def __init__(self, *values, **kw):
+        """Construct a SET.
+
+        E.g.::
+
+          Column("myset", SET("foo", "bar", "baz"))
+
+        The list of potential values is required in the case that this
+        set will be used to generate DDL for a table, or if the
+        :paramref:`.SET.retrieve_as_bitwise` flag is set to True.
+
+        :param values: The range of valid values for this SET. The values
+          are not quoted, they will be escaped and surrounded by single
+          quotes when generating the schema.
+
+        :param convert_unicode: Same flag as that of
+         :paramref:`.String.convert_unicode`.
+
+        :param collation: same as that of :paramref:`.String.collation`
+
+        :param charset: same as that of :paramref:`.VARCHAR.charset`.
+
+        :param ascii: same as that of :paramref:`.VARCHAR.ascii`.
+
+        :param unicode: same as that of :paramref:`.VARCHAR.unicode`.
+
+        :param binary: same as that of :paramref:`.VARCHAR.binary`.
+
+        :param retrieve_as_bitwise: if True, the data for the set type will be
+          persisted and selected using an integer value, where a set is coerced
+          into a bitwise mask for persistence.  MySQL allows this mode which
+          has the advantage of being able to store values unambiguously,
+          such as the blank string ``''``.   The datatype will appear
+          as the expression ``col + 0`` in a SELECT statement, so that the
+          value is coerced into an integer value in result sets.
+          This flag is required if one wishes
+          to persist a set that can store the blank string ``''`` as a value.
+
+          .. warning::
+
+            When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
+            essential that the list of set values is expressed in the
+            **exact same order** as exists on the MySQL database.
+
+        """
+        self.retrieve_as_bitwise = kw.pop("retrieve_as_bitwise", False)
+        self.values = tuple(values)
+        if not self.retrieve_as_bitwise and "" in values:
+            raise exc.ArgumentError(
+                "Can't use the blank value '' in a SET without "
+                "setting retrieve_as_bitwise=True"
+            )
+        if self.retrieve_as_bitwise:
+            self._bitmap = {
+                value: 2**idx for idx, value in enumerate(self.values)
+            }
+            self._bitmap.update(
+                (2**idx, value) for idx, value in enumerate(self.values)
+            )
+        length = max([len(v) for v in values] + [0])
+        kw.setdefault("length", length)
+        super().__init__(**kw)
+
+    def column_expression(self, colexpr):
+        if self.retrieve_as_bitwise:
+            return sql.type_coerce(
+                sql.type_coerce(colexpr, sqltypes.Integer) + 0, self
+            )
+        else:
+            return colexpr
+
+    def result_processor(self, dialect, coltype):
+        if self.retrieve_as_bitwise:
+
+            def process(value):
+                if value is not None:
+                    value = int(value)
+
+                    return set(util.map_bits(self._bitmap.__getitem__, value))
+                else:
+                    return None
+
+        else:
+            super_convert = super().result_processor(dialect, coltype)
+
+            def process(value):
+                if isinstance(value, str):
+                    # MySQLdb returns a string, let's parse
+                    if super_convert:
+                        value = super_convert(value)
+                    return set(re.findall(r"[^,]+", value))
+                else:
+                    # mysql-connector-python does a naive
+                    # split(",") which throws in an empty string
+                    if value is not None:
+                        value.discard("")
+                    return value
+
+        return process
+
+    def bind_processor(self, dialect):
+        super_convert = super().bind_processor(dialect)
+        if self.retrieve_as_bitwise:
+
+            def process(value):
+                if value is None:
+                    return None
+                elif isinstance(value, (int, str)):
+                    if super_convert:
+                        return super_convert(value)
+                    else:
+                        return value
+                else:
+                    int_value = 0
+                    for v in value:
+                        int_value |= self._bitmap[v]
+                    return int_value
+
+        else:
+
+            def process(value):
+                # accept strings and int (actually bitflag) values directly
+                if value is not None and not isinstance(value, (int, str)):
+                    value = ",".join(value)
+
+                if super_convert:
+                    return super_convert(value)
+                else:
+                    return value
+
+        return process
+
+    def adapt(self, impltype, **kw):
+        kw["retrieve_as_bitwise"] = self.retrieve_as_bitwise
+        return util.constructor_copy(self, impltype, *self.values, **kw)
+
+    def __repr__(self):
+        return util.generic_repr(
+            self,
+            to_inspect=[SET, _StringType],
+            additional_kw=[
+                ("retrieve_as_bitwise", False),
+            ],
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py
new file mode 100644
index 00000000..b60a0888
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/expression.py
@@ -0,0 +1,143 @@
+# dialects/mysql/expression.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+from ... import exc
+from ... import util
+from ...sql import coercions
+from ...sql import elements
+from ...sql import operators
+from ...sql import roles
+from ...sql.base import _generative
+from ...sql.base import Generative
+from ...util.typing import Self
+
+
+class match(Generative, elements.BinaryExpression):
+    """Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause.
+
+    E.g.::
+
+        from sqlalchemy import desc
+        from sqlalchemy.dialects.mysql import match
+
+        match_expr = match(
+            users_table.c.firstname,
+            users_table.c.lastname,
+            against="Firstname Lastname",
+        )
+
+        stmt = (
+            select(users_table)
+            .where(match_expr.in_boolean_mode())
+            .order_by(desc(match_expr))
+        )
+
+    Would produce SQL resembling:
+
+    .. sourcecode:: sql
+
+        SELECT id, firstname, lastname
+        FROM user
+        WHERE MATCH(firstname, lastname) AGAINST (:param_1 IN BOOLEAN MODE)
+        ORDER BY MATCH(firstname, lastname) AGAINST (:param_2) DESC
+
+    The :func:`_mysql.match` function is a standalone version of the
+    :meth:`_sql.ColumnElement.match` method available on all
+    SQL expressions, as when :meth:`_expression.ColumnElement.match` is
+    used, but allows to pass multiple columns
+
+    :param cols: column expressions to match against
+
+    :param against: expression to be compared towards
+
+    :param in_boolean_mode: boolean, set "boolean mode" to true
+
+    :param in_natural_language_mode: boolean , set "natural language" to true
+
+    :param with_query_expansion: boolean, set "query expansion" to true
+
+    .. versionadded:: 1.4.19
+
+    .. seealso::
+
+        :meth:`_expression.ColumnElement.match`
+
+    """
+
+    __visit_name__ = "mysql_match"
+
+    inherit_cache = True
+
+    def __init__(self, *cols, **kw):
+        if not cols:
+            raise exc.ArgumentError("columns are required")
+
+        against = kw.pop("against", None)
+
+        if against is None:
+            raise exc.ArgumentError("against is required")
+        against = coercions.expect(
+            roles.ExpressionElementRole,
+            against,
+        )
+
+        left = elements.BooleanClauseList._construct_raw(
+            operators.comma_op,
+            clauses=cols,
+        )
+        left.group = False
+
+        flags = util.immutabledict(
+            {
+                "mysql_boolean_mode": kw.pop("in_boolean_mode", False),
+                "mysql_natural_language": kw.pop(
+                    "in_natural_language_mode", False
+                ),
+                "mysql_query_expansion": kw.pop("with_query_expansion", False),
+            }
+        )
+
+        if kw:
+            raise exc.ArgumentError("unknown arguments: %s" % (", ".join(kw)))
+
+        super().__init__(left, against, operators.match_op, modifiers=flags)
+
+    @_generative
+    def in_boolean_mode(self) -> Self:
+        """Apply the "IN BOOLEAN MODE" modifier to the MATCH expression.
+
+        :return: a new :class:`_mysql.match` instance with modifications
+         applied.
+        """
+
+        self.modifiers = self.modifiers.union({"mysql_boolean_mode": True})
+        return self
+
+    @_generative
+    def in_natural_language_mode(self) -> Self:
+        """Apply the "IN NATURAL LANGUAGE MODE" modifier to the MATCH
+        expression.
+
+        :return: a new :class:`_mysql.match` instance with modifications
+         applied.
+        """
+
+        self.modifiers = self.modifiers.union({"mysql_natural_language": True})
+        return self
+
+    @_generative
+    def with_query_expansion(self) -> Self:
+        """Apply the "WITH QUERY EXPANSION" modifier to the MATCH expression.
+
+        :return: a new :class:`_mysql.match` instance with modifications
+         applied.
+        """
+
+        self.modifiers = self.modifiers.union({"mysql_query_expansion": True})
+        return self
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py
new file mode 100644
index 00000000..8912af36
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/json.py
@@ -0,0 +1,81 @@
+# dialects/mysql/json.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+from ... import types as sqltypes
+
+
+class JSON(sqltypes.JSON):
+    """MySQL JSON type.
+
+    MySQL supports JSON as of version 5.7.
+    MariaDB supports JSON (as an alias for LONGTEXT) as of version 10.2.
+
+    :class:`_mysql.JSON` is used automatically whenever the base
+    :class:`_types.JSON` datatype is used against a MySQL or MariaDB backend.
+
+    .. seealso::
+
+        :class:`_types.JSON` - main documentation for the generic
+        cross-platform JSON datatype.
+
+    The :class:`.mysql.JSON` type supports persistence of JSON values
+    as well as the core index operations provided by :class:`_types.JSON`
+    datatype, by adapting the operations to render the ``JSON_EXTRACT``
+    function at the database level.
+
+    """
+
+    pass
+
+
+class _FormatTypeMixin:
+    def _format_value(self, value):
+        raise NotImplementedError()
+
+    def bind_processor(self, dialect):
+        super_proc = self.string_bind_processor(dialect)
+
+        def process(value):
+            value = self._format_value(value)
+            if super_proc:
+                value = super_proc(value)
+            return value
+
+        return process
+
+    def literal_processor(self, dialect):
+        super_proc = self.string_literal_processor(dialect)
+
+        def process(value):
+            value = self._format_value(value)
+            if super_proc:
+                value = super_proc(value)
+            return value
+
+        return process
+
+
+class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
+    def _format_value(self, value):
+        if isinstance(value, int):
+            value = "$[%s]" % value
+        else:
+            value = '$."%s"' % value
+        return value
+
+
+class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
+    def _format_value(self, value):
+        return "$%s" % (
+            "".join(
+                [
+                    "[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
+                    for elem in value
+                ]
+            )
+        )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py
new file mode 100644
index 00000000..ac2cfbd1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadb.py
@@ -0,0 +1,61 @@
+# dialects/mysql/mariadb.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+from .base import MariaDBIdentifierPreparer
+from .base import MySQLDialect
+from .base import MySQLTypeCompiler
+from ...sql import sqltypes
+
+
+class INET4(sqltypes.TypeEngine[str]):
+    """INET4 column type for MariaDB
+
+    .. versionadded:: 2.0.37
+    """
+
+    __visit_name__ = "INET4"
+
+
+class INET6(sqltypes.TypeEngine[str]):
+    """INET6 column type for MariaDB
+
+    .. versionadded:: 2.0.37
+    """
+
+    __visit_name__ = "INET6"
+
+
+class MariaDBTypeCompiler(MySQLTypeCompiler):
+    def visit_INET4(self, type_, **kwargs) -> str:
+        return "INET4"
+
+    def visit_INET6(self, type_, **kwargs) -> str:
+        return "INET6"
+
+
+class MariaDBDialect(MySQLDialect):
+    is_mariadb = True
+    supports_statement_cache = True
+    name = "mariadb"
+    preparer = MariaDBIdentifierPreparer
+    type_compiler_cls = MariaDBTypeCompiler
+
+
+def loader(driver):
+    driver_mod = __import__(
+        "sqlalchemy.dialects.mysql.%s" % driver
+    ).dialects.mysql
+    driver_cls = getattr(driver_mod, driver).dialect
+
+    return type(
+        "MariaDBDialect_%s" % driver,
+        (
+            MariaDBDialect,
+            driver_cls,
+        ),
+        {"supports_statement_cache": True},
+    )
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py
new file mode 100644
index 00000000..2d2ad199
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mariadbconnector.py
@@ -0,0 +1,277 @@
+# dialects/mysql/mariadbconnector.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""
+
+.. dialect:: mysql+mariadbconnector
+    :name: MariaDB Connector/Python
+    :dbapi: mariadb
+    :connectstring: mariadb+mariadbconnector://<user>:<password>@<host>[:<port>]/<dbname>
+    :url: https://pypi.org/project/mariadb/
+
+Driver Status
+-------------
+
+MariaDB Connector/Python enables Python programs to access MariaDB and MySQL
+databases using an API which is compliant with the Python DB API 2.0 (PEP-249).
+It is written in C and uses MariaDB Connector/C client library for client server
+communication.
+
+Note that the default driver for a ``mariadb://`` connection URI continues to
+be ``mysqldb``. ``mariadb+mariadbconnector://`` is required to use this driver.
+
+.. mariadb: https://github.com/mariadb-corporation/mariadb-connector-python
+
+"""  # noqa
+import re
+from uuid import UUID as _python_UUID
+
+from .base import MySQLCompiler
+from .base import MySQLDialect
+from .base import MySQLExecutionContext
+from ... import sql
+from ... import util
+from ...sql import sqltypes
+
+
+mariadb_cpy_minimum_version = (1, 0, 1)
+
+
+class _MariaDBUUID(sqltypes.UUID[sqltypes._UUID_RETURN]):
+    # work around JIRA issue
+    # https://jira.mariadb.org/browse/CONPY-270.  When that issue is fixed,
+    # this type can be removed.
+    def result_processor(self, dialect, coltype):
+        if self.as_uuid:
+
+            def process(value):
+                if value is not None:
+                    if hasattr(value, "decode"):
+                        value = value.decode("ascii")
+                    value = _python_UUID(value)
+                return value
+
+            return process
+        else:
+
+            def process(value):
+                if value is not None:
+                    if hasattr(value, "decode"):
+                        value = value.decode("ascii")
+                    value = str(_python_UUID(value))
+                return value
+
+            return process
+
+
+class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext):
+    _lastrowid = None
+
+    def create_server_side_cursor(self):
+        return self._dbapi_connection.cursor(buffered=False)
+
+    def create_default_cursor(self):
+        return self._dbapi_connection.cursor(buffered=True)
+
+    def post_exec(self):
+        super().post_exec()
+
+        self._rowcount = self.cursor.rowcount
+
+        if self.isinsert and self.compiled.postfetch_lastrowid:
+            self._lastrowid = self.cursor.lastrowid
+
+    def get_lastrowid(self):
+        return self._lastrowid
+
+
+class MySQLCompiler_mariadbconnector(MySQLCompiler):
+    pass
+
+
+class MySQLDialect_mariadbconnector(MySQLDialect):
+    driver = "mariadbconnector"
+    supports_statement_cache = True
+
+    # set this to True at the module level to prevent the driver from running
+    # against a backend that server detects as MySQL. currently this appears to
+    # be unnecessary as MariaDB client libraries have always worked against
+    # MySQL databases.   However, if this changes at some point, this can be
+    # adjusted, but PLEASE ADD A TEST in test/dialect/mysql/test_dialect.py if
+    # this change is made at some point to ensure the correct exception
+    # is raised at the correct point when running the driver against
+    # a MySQL backend.
+    # is_mariadb = True
+
+    supports_unicode_statements = True
+    encoding = "utf8mb4"
+    convert_unicode = True
+    supports_sane_rowcount = True
+    supports_sane_multi_rowcount = True
+    supports_native_decimal = True
+    default_paramstyle = "qmark"
+    execution_ctx_cls = MySQLExecutionContext_mariadbconnector
+    statement_compiler = MySQLCompiler_mariadbconnector
+
+    supports_server_side_cursors = True
+
+    colspecs = util.update_copy(
+        MySQLDialect.colspecs, {sqltypes.Uuid: _MariaDBUUID}
+    )
+
+    @util.memoized_property
+    def _dbapi_version(self):
+        if self.dbapi and hasattr(self.dbapi, "__version__"):
+            return tuple(
+                [
+                    int(x)
+                    for x in re.findall(
+                        r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__
+                    )
+                ]
+            )
+        else:
+            return (99, 99, 99)
+
+    def __init__(self, **kwargs):
+        super().__init__(**kwargs)
+        self.paramstyle = "qmark"
+        if self.dbapi is not None:
+            if self._dbapi_version < mariadb_cpy_minimum_version:
+                raise NotImplementedError(
+                    "The minimum required version for MariaDB "
+                    "Connector/Python is %s"
+                    % ".".join(str(x) for x in mariadb_cpy_minimum_version)
+                )
+
+    @classmethod
+    def import_dbapi(cls):
+        return __import__("mariadb")
+
+    def is_disconnect(self, e, connection, cursor):
+        if super().is_disconnect(e, connection, cursor):
+            return True
+        elif isinstance(e, self.dbapi.Error):
+            str_e = str(e).lower()
+            return "not connected" in str_e or "isn't valid" in str_e
+        else:
+            return False
+
+    def create_connect_args(self, url):
+        opts = url.translate_connect_args()
+        opts.update(url.query)
+
+        int_params = [
+            "connect_timeout",
+            "read_timeout",
+            "write_timeout",
+            "client_flag",
+            "port",
+            "pool_size",
+        ]
+        bool_params = [
+            "local_infile",
+            "ssl_verify_cert",
+            "ssl",
+            "pool_reset_connection",
+            "compress",
+        ]
+
+        for key in int_params:
+            util.coerce_kw_type(opts, key, int)
+        for key in bool_params:
+            util.coerce_kw_type(opts, key, bool)
+
+        # FOUND_ROWS must be set in CLIENT_FLAGS to enable
+        # supports_sane_rowcount.
+        client_flag = opts.get("client_flag", 0)
+        if self.dbapi is not None:
+            try:
+                CLIENT_FLAGS = __import__(
+                    self.dbapi.__name__ + ".constants.CLIENT"
+                ).constants.CLIENT
+                client_flag |= CLIENT_FLAGS.FOUND_ROWS
+            except (AttributeError, ImportError):
+                self.supports_sane_rowcount = False
+            opts["client_flag"] = client_flag
+        return [[], opts]
+
+    def _extract_error_code(self, exception):
+        try:
+            rc = exception.errno
+        except:
+            rc = -1
+        return rc
+
+    def _detect_charset(self, connection):
+        return "utf8mb4"
+
+    def get_isolation_level_values(self, dbapi_connection):
+        return (
+            "SERIALIZABLE",
+            "READ UNCOMMITTED",
+            "READ COMMITTED",
+            "REPEATABLE READ",
+            "AUTOCOMMIT",
+        )
+
+    def set_isolation_level(self, connection, level):
+        if level == "AUTOCOMMIT":
+            connection.autocommit = True
+        else:
+            connection.autocommit = False
+            super().set_isolation_level(connection, level)
+
+    def do_begin_twophase(self, connection, xid):
+        connection.execute(
+            sql.text("XA BEGIN :xid").bindparams(
+                sql.bindparam("xid", xid, literal_execute=True)
+            )
+        )
+
+    def do_prepare_twophase(self, connection, xid):
+        connection.execute(
+            sql.text("XA END :xid").bindparams(
+                sql.bindparam("xid", xid, literal_execute=True)
+            )
+        )
+        connection.execute(
+            sql.text("XA PREPARE :xid").bindparams(
+                sql.bindparam("xid", xid, literal_execute=True)
+            )
+        )
+
+    def do_rollback_twophase(
+        self, connection, xid, is_prepared=True, recover=False
+    ):
+        if not is_prepared:
+            connection.execute(
+                sql.text("XA END :xid").bindparams(
+                    sql.bindparam("xid", xid, literal_execute=True)
+                )
+            )
+        connection.execute(
+            sql.text("XA ROLLBACK :xid").bindparams(
+                sql.bindparam("xid", xid, literal_execute=True)
+            )
+        )
+
+    def do_commit_twophase(
+        self, connection, xid, is_prepared=True, recover=False
+    ):
+        if not is_prepared:
+            self.do_prepare_twophase(connection, xid)
+        connection.execute(
+            sql.text("XA COMMIT :xid").bindparams(
+                sql.bindparam("xid", xid, literal_execute=True)
+            )
+        )
+
+
+dialect = MySQLDialect_mariadbconnector
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py
new file mode 100644
index 00000000..e88f8fd7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -0,0 +1,180 @@
+# dialects/mysql/mysqlconnector.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+r"""
+.. dialect:: mysql+mysqlconnector
+    :name: MySQL Connector/Python
+    :dbapi: myconnpy
+    :connectstring: mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
+    :url: https://pypi.org/project/mysql-connector-python/
+
+.. note::
+
+    The MySQL Connector/Python DBAPI has had many issues since its release,
+    some of which may remain unresolved, and the mysqlconnector dialect is
+    **not tested as part of SQLAlchemy's continuous integration**.
+    The recommended MySQL dialects are mysqlclient and PyMySQL.
+
+"""  # noqa
+
+import re
+
+from .base import BIT
+from .base import MySQLCompiler
+from .base import MySQLDialect
+from .base import MySQLIdentifierPreparer
+from ... import util
+
+
+class MySQLCompiler_mysqlconnector(MySQLCompiler):
+    def visit_mod_binary(self, binary, operator, **kw):
+        return (
+            self.process(binary.left, **kw)
+            + " % "
+            + self.process(binary.right, **kw)
+        )
+
+
+class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
+    @property
+    def _double_percents(self):
+        return False
+
+    @_double_percents.setter
+    def _double_percents(self, value):
+        pass
+
+    def _escape_identifier(self, value):
+        value = value.replace(self.escape_quote, self.escape_to_quote)
+        return value
+
+
+class _myconnpyBIT(BIT):
+    def result_processor(self, dialect, coltype):
+        """MySQL-connector already converts mysql bits, so."""
+
+        return None
+
+
+class MySQLDialect_mysqlconnector(MySQLDialect):
+    driver = "mysqlconnector"
+    supports_statement_cache = True
+
+    supports_sane_rowcount = True
+    supports_sane_multi_rowcount = True
+
+    supports_native_decimal = True
+
+    default_paramstyle = "format"
+    statement_compiler = MySQLCompiler_mysqlconnector
+
+    preparer = MySQLIdentifierPreparer_mysqlconnector
+
+    colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _myconnpyBIT})
+
+    @classmethod
+    def import_dbapi(cls):
+        from mysql import connector
+
+        return connector
+
+    def do_ping(self, dbapi_connection):
+        dbapi_connection.ping(False)
+        return True
+
+    def create_connect_args(self, url):
+        opts = url.translate_connect_args(username="user")
+
+        opts.update(url.query)
+
+        util.coerce_kw_type(opts, "allow_local_infile", bool)
+        util.coerce_kw_type(opts, "autocommit", bool)
+        util.coerce_kw_type(opts, "buffered", bool)
+        util.coerce_kw_type(opts, "client_flag", int)
+        util.coerce_kw_type(opts, "compress", bool)
+        util.coerce_kw_type(opts, "connection_timeout", int)
+        util.coerce_kw_type(opts, "connect_timeout", int)
+        util.coerce_kw_type(opts, "consume_results", bool)
+        util.coerce_kw_type(opts, "force_ipv6", bool)
+        util.coerce_kw_type(opts, "get_warnings", bool)
+        util.coerce_kw_type(opts, "pool_reset_session", bool)
+        util.coerce_kw_type(opts, "pool_size", int)
+        util.coerce_kw_type(opts, "raise_on_warnings", bool)
+        util.coerce_kw_type(opts, "raw", bool)
+        util.coerce_kw_type(opts, "ssl_verify_cert", bool)
+        util.coerce_kw_type(opts, "use_pure", bool)
+        util.coerce_kw_type(opts, "use_unicode", bool)
+
+        # unfortunately, MySQL/connector python refuses to release a
+        # cursor without reading fully, so non-buffered isn't an option
+        opts.setdefault("buffered", True)
+
+        # FOUND_ROWS must be set in ClientFlag to enable
+        # supports_sane_rowcount.
+        if self.dbapi is not None:
+            try:
+                from mysql.connector.constants import ClientFlag
+
+                client_flags = opts.get(
+                    "client_flags", ClientFlag.get_default()
+                )
+                client_flags |= ClientFlag.FOUND_ROWS
+                opts["client_flags"] = client_flags
+            except Exception:
+                pass
+        return [[], opts]
+
+    @util.memoized_property
+    def _mysqlconnector_version_info(self):
+        if self.dbapi and hasattr(self.dbapi, "__version__"):
+            m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", self.dbapi.__version__)
+            if m:
+                return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
+
+    def _detect_charset(self, connection):
+        return connection.connection.charset
+
+    def _extract_error_code(self, exception):
+        return exception.errno
+
+    def is_disconnect(self, e, connection, cursor):
+        errnos = (2006, 2013, 2014, 2045, 2055, 2048)
+        exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
+        if isinstance(e, exceptions):
+            return (
+                e.errno in errnos
+                or "MySQL Connection not available." in str(e)
+                or "Connection to MySQL is not available" in str(e)
+            )
+        else:
+            return False
+
+    def _compat_fetchall(self, rp, charset=None):
+        return rp.fetchall()
+
+    def _compat_fetchone(self, rp, charset=None):
+        return rp.fetchone()
+
+    _isolation_lookup = {
+        "SERIALIZABLE",
+        "READ UNCOMMITTED",
+        "READ COMMITTED",
+        "REPEATABLE READ",
+        "AUTOCOMMIT",
+    }
+
+    def _set_isolation_level(self, connection, level):
+        if level == "AUTOCOMMIT":
+            connection.autocommit = True
+        else:
+            connection.autocommit = False
+            super()._set_isolation_level(connection, level)
+
+
+dialect = MySQLDialect_mysqlconnector
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py
new file mode 100644
index 00000000..3cf56c1f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/mysqldb.py
@@ -0,0 +1,305 @@
+# dialects/mysql/mysqldb.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""
+
+.. dialect:: mysql+mysqldb
+    :name: mysqlclient (maintained fork of MySQL-Python)
+    :dbapi: mysqldb
+    :connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
+    :url: https://pypi.org/project/mysqlclient/
+
+Driver Status
+-------------
+
+The mysqlclient DBAPI is a maintained fork of the
+`MySQL-Python <https://sourceforge.net/projects/mysql-python>`_ DBAPI
+that is no longer maintained.  `mysqlclient`_ supports Python 2 and Python 3
+and is very stable.
+
+.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
+
+.. _mysqldb_unicode:
+
+Unicode
+-------
+
+Please see :ref:`mysql_unicode` for current recommendations on unicode
+handling.
+
+.. _mysqldb_ssl:
+
+SSL Connections
+----------------
+
+The mysqlclient and PyMySQL DBAPIs accept an additional dictionary under the
+key "ssl", which may be specified using the
+:paramref:`_sa.create_engine.connect_args` dictionary::
+
+    engine = create_engine(
+        "mysql+mysqldb://scott:tiger@192.168.0.134/test",
+        connect_args={
+            "ssl": {
+                "ca": "/home/gord/client-ssl/ca.pem",
+                "cert": "/home/gord/client-ssl/client-cert.pem",
+                "key": "/home/gord/client-ssl/client-key.pem",
+            }
+        },
+    )
+
+For convenience, the following keys may also be specified inline within the URL
+where they will be interpreted into the "ssl" dictionary automatically:
+"ssl_ca", "ssl_cert", "ssl_key", "ssl_capath", "ssl_cipher",
+"ssl_check_hostname". An example is as follows::
+
+    connection_uri = (
+        "mysql+mysqldb://scott:tiger@192.168.0.134/test"
+        "?ssl_ca=/home/gord/client-ssl/ca.pem"
+        "&ssl_cert=/home/gord/client-ssl/client-cert.pem"
+        "&ssl_key=/home/gord/client-ssl/client-key.pem"
+    )
+
+.. seealso::
+
+    :ref:`pymysql_ssl` in the PyMySQL dialect
+
+
+Using MySQLdb with Google Cloud SQL
+-----------------------------------
+
+Google Cloud SQL now recommends use of the MySQLdb dialect.  Connect
+using a URL like the following:
+
+.. sourcecode:: text
+
+    mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
+
+Server Side Cursors
+-------------------
+
+The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`.
+
+"""
+
+import re
+
+from .base import MySQLCompiler
+from .base import MySQLDialect
+from .base import MySQLExecutionContext
+from .base import MySQLIdentifierPreparer
+from .base import TEXT
+from ... import sql
+from ... import util
+
+
+class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
+    pass
+
+
+class MySQLCompiler_mysqldb(MySQLCompiler):
+    pass
+
+
+class MySQLDialect_mysqldb(MySQLDialect):
+    driver = "mysqldb"
+    supports_statement_cache = True
+    supports_unicode_statements = True
+    supports_sane_rowcount = True
+    supports_sane_multi_rowcount = True
+
+    supports_native_decimal = True
+
+    default_paramstyle = "format"
+    execution_ctx_cls = MySQLExecutionContext_mysqldb
+    statement_compiler = MySQLCompiler_mysqldb
+    preparer = MySQLIdentifierPreparer
+
+    def __init__(self, **kwargs):
+        super().__init__(**kwargs)
+        self._mysql_dbapi_version = (
+            self._parse_dbapi_version(self.dbapi.__version__)
+            if self.dbapi is not None and hasattr(self.dbapi, "__version__")
+            else (0, 0, 0)
+        )
+
+    def _parse_dbapi_version(self, version):
+        m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", version)
+        if m:
+            return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
+        else:
+            return (0, 0, 0)
+
+    @util.langhelpers.memoized_property
+    def supports_server_side_cursors(self):
+        try:
+            cursors = __import__("MySQLdb.cursors").cursors
+            self._sscursor = cursors.SSCursor
+            return True
+        except (ImportError, AttributeError):
+            return False
+
+    @classmethod
+    def import_dbapi(cls):
+        return __import__("MySQLdb")
+
+    def on_connect(self):
+        super_ = super().on_connect()
+
+        def on_connect(conn):
+            if super_ is not None:
+                super_(conn)
+
+            charset_name = conn.character_set_name()
+
+            if charset_name is not None:
+                cursor = conn.cursor()
+                cursor.execute("SET NAMES %s" % charset_name)
+                cursor.close()
+
+        return on_connect
+
+    def do_ping(self, dbapi_connection):
+        dbapi_connection.ping()
+        return True
+
+    def do_executemany(self, cursor, statement, parameters, context=None):
+        rowcount = cursor.executemany(statement, parameters)
+        if context is not None:
+            context._rowcount = rowcount
+
+    def _check_unicode_returns(self, connection):
+        # work around issue fixed in
+        # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
+        # specific issue w/ the utf8mb4_bin collation and unicode returns
+
+        collation = connection.exec_driver_sql(
+            "show collation where %s = 'utf8mb4' and %s = 'utf8mb4_bin'"
+            % (
+                self.identifier_preparer.quote("Charset"),
+                self.identifier_preparer.quote("Collation"),
+            )
+        ).scalar()
+        has_utf8mb4_bin = self.server_version_info > (5,) and collation
+        if has_utf8mb4_bin:
+            additional_tests = [
+                sql.collate(
+                    sql.cast(
+                        sql.literal_column("'test collated returns'"),
+                        TEXT(charset="utf8mb4"),
+                    ),
+                    "utf8mb4_bin",
+                )
+            ]
+        else:
+            additional_tests = []
+        return super()._check_unicode_returns(connection, additional_tests)
+
+    def create_connect_args(self, url, _translate_args=None):
+        if _translate_args is None:
+            _translate_args = dict(
+                database="db", username="user", password="passwd"
+            )
+
+        opts = url.translate_connect_args(**_translate_args)
+        opts.update(url.query)
+
+        util.coerce_kw_type(opts, "compress", bool)
+        util.coerce_kw_type(opts, "connect_timeout", int)
+        util.coerce_kw_type(opts, "read_timeout", int)
+        util.coerce_kw_type(opts, "write_timeout", int)
+        util.coerce_kw_type(opts, "client_flag", int)
+        util.coerce_kw_type(opts, "local_infile", bool)
+        # Note: using either of the below will cause all strings to be
+        # returned as Unicode, both in raw SQL operations and with column
+        # types like String and MSString.
+        util.coerce_kw_type(opts, "use_unicode", bool)
+        util.coerce_kw_type(opts, "charset", str)
+
+        # Rich values 'cursorclass' and 'conv' are not supported via
+        # query string.
+
+        ssl = {}
+        keys = [
+            ("ssl_ca", str),
+            ("ssl_key", str),
+            ("ssl_cert", str),
+            ("ssl_capath", str),
+            ("ssl_cipher", str),
+            ("ssl_check_hostname", bool),
+        ]
+        for key, kw_type in keys:
+            if key in opts:
+                ssl[key[4:]] = opts[key]
+                util.coerce_kw_type(ssl, key[4:], kw_type)
+                del opts[key]
+        if ssl:
+            opts["ssl"] = ssl
+
+        # FOUND_ROWS must be set in CLIENT_FLAGS to enable
+        # supports_sane_rowcount.
+        client_flag = opts.get("client_flag", 0)
+
+        client_flag_found_rows = self._found_rows_client_flag()
+        if client_flag_found_rows is not None:
+            client_flag |= client_flag_found_rows
+            opts["client_flag"] = client_flag
+        return [[], opts]
+
+    def _found_rows_client_flag(self):
+        if self.dbapi is not None:
+            try:
+                CLIENT_FLAGS = __import__(
+                    self.dbapi.__name__ + ".constants.CLIENT"
+                ).constants.CLIENT
+            except (AttributeError, ImportError):
+                return None
+            else:
+                return CLIENT_FLAGS.FOUND_ROWS
+        else:
+            return None
+
+    def _extract_error_code(self, exception):
+        return exception.args[0]
+
+    def _detect_charset(self, connection):
+        """Sniff out the character set in use for connection results."""
+
+        try:
+            # note: the SQL here would be
+            # "SHOW VARIABLES LIKE 'character_set%%'"
+            cset_name = connection.connection.character_set_name
+        except AttributeError:
+            util.warn(
+                "No 'character_set_name' can be detected with "
+                "this MySQL-Python version; "
+                "please upgrade to a recent version of MySQL-Python.  "
+                "Assuming latin1."
+            )
+            return "latin1"
+        else:
+            return cset_name()
+
+    def get_isolation_level_values(self, dbapi_connection):
+        return (
+            "SERIALIZABLE",
+            "READ UNCOMMITTED",
+            "READ COMMITTED",
+            "REPEATABLE READ",
+            "AUTOCOMMIT",
+        )
+
+    def set_isolation_level(self, dbapi_connection, level):
+        if level == "AUTOCOMMIT":
+            dbapi_connection.autocommit(True)
+        else:
+            dbapi_connection.autocommit(False)
+            super().set_isolation_level(dbapi_connection, level)
+
+
+dialect = MySQLDialect_mysqldb
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py
new file mode 100644
index 00000000..7807af40
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/provision.py
@@ -0,0 +1,110 @@
+# dialects/mysql/provision.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+from ... import exc
+from ...testing.provision import configure_follower
+from ...testing.provision import create_db
+from ...testing.provision import drop_db
+from ...testing.provision import generate_driver_url
+from ...testing.provision import temp_table_keyword_args
+from ...testing.provision import upsert
+
+
+@generate_driver_url.for_db("mysql", "mariadb")
+def generate_driver_url(url, driver, query_str):
+    backend = url.get_backend_name()
+
+    # NOTE: at the moment, tests are running mariadbconnector
+    # against both mariadb and mysql backends.   if we want this to be
+    # limited, do the decision making here to reject a "mysql+mariadbconnector"
+    # URL.  Optionally also re-enable the module level
+    # MySQLDialect_mariadbconnector.is_mysql flag as well, which must include
+    # a unit and/or functional test.
+
+    # all the Jenkins tests have been running mysqlclient Python library
+    # built against mariadb client drivers for years against all MySQL /
+    # MariaDB versions going back to MySQL 5.6, currently they can talk
+    # to MySQL databases without problems.
+
+    if backend == "mysql":
+        dialect_cls = url.get_dialect()
+        if dialect_cls._is_mariadb_from_url(url):
+            backend = "mariadb"
+
+    new_url = url.set(
+        drivername="%s+%s" % (backend, driver)
+    ).update_query_string(query_str)
+
+    if driver == "mariadbconnector":
+        new_url = new_url.difference_update_query(["charset"])
+
+    try:
+        new_url.get_dialect()
+    except exc.NoSuchModuleError:
+        return None
+    else:
+        return new_url
+
+
+@create_db.for_db("mysql", "mariadb")
+def _mysql_create_db(cfg, eng, ident):
+    with eng.begin() as conn:
+        try:
+            _mysql_drop_db(cfg, conn, ident)
+        except Exception:
+            pass
+
+    with eng.begin() as conn:
+        conn.exec_driver_sql(
+            "CREATE DATABASE %s CHARACTER SET utf8mb4" % ident
+        )
+        conn.exec_driver_sql(
+            "CREATE DATABASE %s_test_schema CHARACTER SET utf8mb4" % ident
+        )
+        conn.exec_driver_sql(
+            "CREATE DATABASE %s_test_schema_2 CHARACTER SET utf8mb4" % ident
+        )
+
+
+@configure_follower.for_db("mysql", "mariadb")
+def _mysql_configure_follower(config, ident):
+    config.test_schema = "%s_test_schema" % ident
+    config.test_schema_2 = "%s_test_schema_2" % ident
+
+
+@drop_db.for_db("mysql", "mariadb")
+def _mysql_drop_db(cfg, eng, ident):
+    with eng.begin() as conn:
+        conn.exec_driver_sql("DROP DATABASE %s_test_schema" % ident)
+        conn.exec_driver_sql("DROP DATABASE %s_test_schema_2" % ident)
+        conn.exec_driver_sql("DROP DATABASE %s" % ident)
+
+
+@temp_table_keyword_args.for_db("mysql", "mariadb")
+def _mysql_temp_table_keyword_args(cfg, eng):
+    return {"prefixes": ["TEMPORARY"]}
+
+
+@upsert.for_db("mariadb")
+def _upsert(
+    cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False
+):
+    from sqlalchemy.dialects.mysql import insert
+
+    stmt = insert(table)
+
+    if set_lambda:
+        stmt = stmt.on_duplicate_key_update(**set_lambda(stmt.inserted))
+    else:
+        pk1 = table.primary_key.c[0]
+        stmt = stmt.on_duplicate_key_update({pk1.key: pk1})
+
+    stmt = stmt.returning(
+        *returning, sort_by_parameter_order=sort_by_parameter_order
+    )
+    return stmt
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py
new file mode 100644
index 00000000..67cb4cdd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pymysql.py
@@ -0,0 +1,136 @@
+# dialects/mysql/pymysql.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+r"""
+
+.. dialect:: mysql+pymysql
+    :name: PyMySQL
+    :dbapi: pymysql
+    :connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>[?<options>]
+    :url: https://pymysql.readthedocs.io/
+
+Unicode
+-------
+
+Please see :ref:`mysql_unicode` for current recommendations on unicode
+handling.
+
+.. _pymysql_ssl:
+
+SSL Connections
+------------------
+
+The PyMySQL DBAPI accepts the same SSL arguments as that of MySQLdb,
+described at :ref:`mysqldb_ssl`.   See that section for additional examples.
+
+If the server uses an automatically-generated certificate that is self-signed
+or does not match the host name (as seen from the client), it may also be
+necessary to indicate ``ssl_check_hostname=false`` in PyMySQL::
+
+    connection_uri = (
+        "mysql+pymysql://scott:tiger@192.168.0.134/test"
+        "?ssl_ca=/home/gord/client-ssl/ca.pem"
+        "&ssl_cert=/home/gord/client-ssl/client-cert.pem"
+        "&ssl_key=/home/gord/client-ssl/client-key.pem"
+        "&ssl_check_hostname=false"
+    )
+
+MySQL-Python Compatibility
+--------------------------
+
+The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
+and targets 100% compatibility.   Most behavioral notes for MySQL-python apply
+to the pymysql driver as well.
+
+"""  # noqa
+
+from .mysqldb import MySQLDialect_mysqldb
+from ...util import langhelpers
+
+
+class MySQLDialect_pymysql(MySQLDialect_mysqldb):
+    driver = "pymysql"
+    supports_statement_cache = True
+
+    description_encoding = None
+
+    @langhelpers.memoized_property
+    def supports_server_side_cursors(self):
+        try:
+            cursors = __import__("pymysql.cursors").cursors
+            self._sscursor = cursors.SSCursor
+            return True
+        except (ImportError, AttributeError):
+            return False
+
+    @classmethod
+    def import_dbapi(cls):
+        return __import__("pymysql")
+
+    @langhelpers.memoized_property
+    def _send_false_to_ping(self):
+        """determine if pymysql has deprecated, changed the default of,
+        or removed the 'reconnect' argument of connection.ping().
+
+        See #10492 and
+        https://github.com/PyMySQL/mysqlclient/discussions/651#discussioncomment-7308971
+        for background.
+
+        """  # noqa: E501
+
+        try:
+            Connection = __import__(
+                "pymysql.connections"
+            ).connections.Connection
+        except (ImportError, AttributeError):
+            return True
+        else:
+            insp = langhelpers.get_callable_argspec(Connection.ping)
+            try:
+                reconnect_arg = insp.args[1]
+            except IndexError:
+                return False
+            else:
+                return reconnect_arg == "reconnect" and (
+                    not insp.defaults or insp.defaults[0] is not False
+                )
+
+    def do_ping(self, dbapi_connection):
+        if self._send_false_to_ping:
+            dbapi_connection.ping(False)
+        else:
+            dbapi_connection.ping()
+
+        return True
+
+    def create_connect_args(self, url, _translate_args=None):
+        if _translate_args is None:
+            _translate_args = dict(username="user")
+        return super().create_connect_args(
+            url, _translate_args=_translate_args
+        )
+
+    def is_disconnect(self, e, connection, cursor):
+        if super().is_disconnect(e, connection, cursor):
+            return True
+        elif isinstance(e, self.dbapi.Error):
+            str_e = str(e).lower()
+            return (
+                "already closed" in str_e or "connection was killed" in str_e
+            )
+        else:
+            return False
+
+    def _extract_error_code(self, exception):
+        if isinstance(exception.args[0], Exception):
+            exception = exception.args[0]
+        return exception.args[0]
+
+
+dialect = MySQLDialect_pymysql
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py
new file mode 100644
index 00000000..6d44bd38
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/pyodbc.py
@@ -0,0 +1,139 @@
+# dialects/mysql/pyodbc.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+r"""
+
+
+.. dialect:: mysql+pyodbc
+    :name: PyODBC
+    :dbapi: pyodbc
+    :connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
+    :url: https://pypi.org/project/pyodbc/
+
+.. note::
+
+    The PyODBC for MySQL dialect is **not tested as part of
+    SQLAlchemy's continuous integration**.
+    The recommended MySQL dialects are mysqlclient and PyMySQL.
+    However, if you want to use the mysql+pyodbc dialect and require
+    full support for ``utf8mb4`` characters (including supplementary
+    characters like emoji) be sure to use a current release of
+    MySQL Connector/ODBC and specify the "ANSI" (**not** "Unicode")
+    version of the driver in your DSN or connection string.
+
+Pass through exact pyodbc connection string::
+
+    import urllib
+
+    connection_string = (
+        "DRIVER=MySQL ODBC 8.0 ANSI Driver;"
+        "SERVER=localhost;"
+        "PORT=3307;"
+        "DATABASE=mydb;"
+        "UID=root;"
+        "PWD=(whatever);"
+        "charset=utf8mb4;"
+    )
+    params = urllib.parse.quote_plus(connection_string)
+    connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params
+
+"""  # noqa
+
+import re
+
+from .base import MySQLDialect
+from .base import MySQLExecutionContext
+from .types import TIME
+from ... import exc
+from ... import util
+from ...connectors.pyodbc import PyODBCConnector
+from ...sql.sqltypes import Time
+
+
+class _pyodbcTIME(TIME):
+    def result_processor(self, dialect, coltype):
+        def process(value):
+            # pyodbc returns a datetime.time object; no need to convert
+            return value
+
+        return process
+
+
+class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
+    def get_lastrowid(self):
+        cursor = self.create_cursor()
+        cursor.execute("SELECT LAST_INSERT_ID()")
+        lastrowid = cursor.fetchone()[0]
+        cursor.close()
+        return lastrowid
+
+
+class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
+    supports_statement_cache = True
+    colspecs = util.update_copy(MySQLDialect.colspecs, {Time: _pyodbcTIME})
+    supports_unicode_statements = True
+    execution_ctx_cls = MySQLExecutionContext_pyodbc
+
+    pyodbc_driver_name = "MySQL"
+
+    def _detect_charset(self, connection):
+        """Sniff out the character set in use for connection results."""
+
+        # Prefer 'character_set_results' for the current connection over the
+        # value in the driver.  SET NAMES or individual variable SETs will
+        # change the charset without updating the driver's view of the world.
+        #
+        # If it's decided that issuing that sort of SQL leaves you SOL, then
+        # this can prefer the driver value.
+
+        # set this to None as _fetch_setting attempts to use it (None is OK)
+        self._connection_charset = None
+        try:
+            value = self._fetch_setting(connection, "character_set_client")
+            if value:
+                return value
+        except exc.DBAPIError:
+            pass
+
+        util.warn(
+            "Could not detect the connection character set.  "
+            "Assuming latin1."
+        )
+        return "latin1"
+
+    def _get_server_version_info(self, connection):
+        return MySQLDialect._get_server_version_info(self, connection)
+
+    def _extract_error_code(self, exception):
+        m = re.compile(r"\((\d+)\)").search(str(exception.args))
+        c = m.group(1)
+        if c:
+            return int(c)
+        else:
+            return None
+
+    def on_connect(self):
+        super_ = super().on_connect()
+
+        def on_connect(conn):
+            if super_ is not None:
+                super_(conn)
+
+            # declare Unicode encoding for pyodbc as per
+            #   https://github.com/mkleehammer/pyodbc/wiki/Unicode
+            pyodbc_SQL_CHAR = 1  # pyodbc.SQL_CHAR
+            pyodbc_SQL_WCHAR = -8  # pyodbc.SQL_WCHAR
+            conn.setdecoding(pyodbc_SQL_CHAR, encoding="utf-8")
+            conn.setdecoding(pyodbc_SQL_WCHAR, encoding="utf-8")
+            conn.setencoding(encoding="utf-8")
+
+        return on_connect
+
+
+dialect = MySQLDialect_pyodbc
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py
new file mode 100644
index 00000000..3998be97
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reflection.py
@@ -0,0 +1,677 @@
+# dialects/mysql/reflection.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+import re
+
+from .enumerated import ENUM
+from .enumerated import SET
+from .types import DATETIME
+from .types import TIME
+from .types import TIMESTAMP
+from ... import log
+from ... import types as sqltypes
+from ... import util
+
+
+class ReflectedState:
+    """Stores raw information about a SHOW CREATE TABLE statement."""
+
+    def __init__(self):
+        self.columns = []
+        self.table_options = {}
+        self.table_name = None
+        self.keys = []
+        self.fk_constraints = []
+        self.ck_constraints = []
+
+
+@log.class_logger
+class MySQLTableDefinitionParser:
+    """Parses the results of a SHOW CREATE TABLE statement."""
+
+    def __init__(self, dialect, preparer):
+        self.dialect = dialect
+        self.preparer = preparer
+        self._prep_regexes()
+
+    def parse(self, show_create, charset):
+        state = ReflectedState()
+        state.charset = charset
+        for line in re.split(r"\r?\n", show_create):
+            if line.startswith("  " + self.preparer.initial_quote):
+                self._parse_column(line, state)
+            # a regular table options line
+            elif line.startswith(") "):
+                self._parse_table_options(line, state)
+            # an ANSI-mode table options line
+            elif line == ")":
+                pass
+            elif line.startswith("CREATE "):
+                self._parse_table_name(line, state)
+            elif "PARTITION" in line:
+                self._parse_partition_options(line, state)
+            # Not present in real reflection, but may be if
+            # loading from a file.
+            elif not line:
+                pass
+            else:
+                type_, spec = self._parse_constraints(line)
+                if type_ is None:
+                    util.warn("Unknown schema content: %r" % line)
+                elif type_ == "key":
+                    state.keys.append(spec)
+                elif type_ == "fk_constraint":
+                    state.fk_constraints.append(spec)
+                elif type_ == "ck_constraint":
+                    state.ck_constraints.append(spec)
+                else:
+                    pass
+        return state
+
+    def _check_view(self, sql: str) -> bool:
+        return bool(self._re_is_view.match(sql))
+
+    def _parse_constraints(self, line):
+        """Parse a KEY or CONSTRAINT line.
+
+        :param line: A line of SHOW CREATE TABLE output
+        """
+
+        # KEY
+        m = self._re_key.match(line)
+        if m:
+            spec = m.groupdict()
+            # convert columns into name, length pairs
+            # NOTE: we may want to consider SHOW INDEX as the
+            # format of indexes in MySQL becomes more complex
+            spec["columns"] = self._parse_keyexprs(spec["columns"])
+            if spec["version_sql"]:
+                m2 = self._re_key_version_sql.match(spec["version_sql"])
+                if m2 and m2.groupdict()["parser"]:
+                    spec["parser"] = m2.groupdict()["parser"]
+            if spec["parser"]:
+                spec["parser"] = self.preparer.unformat_identifiers(
+                    spec["parser"]
+                )[0]
+            return "key", spec
+
+        # FOREIGN KEY CONSTRAINT
+        m = self._re_fk_constraint.match(line)
+        if m:
+            spec = m.groupdict()
+            spec["table"] = self.preparer.unformat_identifiers(spec["table"])
+            spec["local"] = [c[0] for c in self._parse_keyexprs(spec["local"])]
+            spec["foreign"] = [
+                c[0] for c in self._parse_keyexprs(spec["foreign"])
+            ]
+            return "fk_constraint", spec
+
+        # CHECK constraint
+        m = self._re_ck_constraint.match(line)
+        if m:
+            spec = m.groupdict()
+            return "ck_constraint", spec
+
+        # PARTITION and SUBPARTITION
+        m = self._re_partition.match(line)
+        if m:
+            # Punt!
+            return "partition", line
+
+        # No match.
+        return (None, line)
+
+    def _parse_table_name(self, line, state):
+        """Extract the table name.
+
+        :param line: The first line of SHOW CREATE TABLE
+        """
+
+        regex, cleanup = self._pr_name
+        m = regex.match(line)
+        if m:
+            state.table_name = cleanup(m.group("name"))
+
+    def _parse_table_options(self, line, state):
+        """Build a dictionary of all reflected table-level options.
+
+        :param line: The final line of SHOW CREATE TABLE output.
+        """
+
+        options = {}
+
+        if line and line != ")":
+            rest_of_line = line
+            for regex, cleanup in self._pr_options:
+                m = regex.search(rest_of_line)
+                if not m:
+                    continue
+                directive, value = m.group("directive"), m.group("val")
+                if cleanup:
+                    value = cleanup(value)
+                options[directive.lower()] = value
+                rest_of_line = regex.sub("", rest_of_line)
+
+        for nope in ("auto_increment", "data directory", "index directory"):
+            options.pop(nope, None)
+
+        for opt, val in options.items():
+            state.table_options["%s_%s" % (self.dialect.name, opt)] = val
+
+    def _parse_partition_options(self, line, state):
+        options = {}
+        new_line = line[:]
+
+        while new_line.startswith("(") or new_line.startswith(" "):
+            new_line = new_line[1:]
+
+        for regex, cleanup in self._pr_options:
+            m = regex.search(new_line)
+            if not m or "PARTITION" not in regex.pattern:
+                continue
+
+            directive = m.group("directive")
+            directive = directive.lower()
+            is_subpartition = directive == "subpartition"
+
+            if directive == "partition" or is_subpartition:
+                new_line = new_line.replace(") */", "")
+                new_line = new_line.replace(",", "")
+                if is_subpartition and new_line.endswith(")"):
+                    new_line = new_line[:-1]
+                if self.dialect.name == "mariadb" and new_line.endswith(")"):
+                    if (
+                        "MAXVALUE" in new_line
+                        or "MINVALUE" in new_line
+                        or "ENGINE" in new_line
+                    ):
+                        # final line of MariaDB partition endswith ")"
+                        new_line = new_line[:-1]
+
+                defs = "%s_%s_definitions" % (self.dialect.name, directive)
+                options[defs] = new_line
+
+            else:
+                directive = directive.replace(" ", "_")
+                value = m.group("val")
+                if cleanup:
+                    value = cleanup(value)
+                options[directive] = value
+            break
+
+        for opt, val in options.items():
+            part_def = "%s_partition_definitions" % (self.dialect.name)
+            subpart_def = "%s_subpartition_definitions" % (self.dialect.name)
+            if opt == part_def or opt == subpart_def:
+                # builds a string of definitions
+                if opt not in state.table_options:
+                    state.table_options[opt] = val
+                else:
+                    state.table_options[opt] = "%s, %s" % (
+                        state.table_options[opt],
+                        val,
+                    )
+            else:
+                state.table_options["%s_%s" % (self.dialect.name, opt)] = val
+
+    def _parse_column(self, line, state):
+        """Extract column details.
+
+        Falls back to a 'minimal support' variant if full parse fails.
+
+        :param line: Any column-bearing line from SHOW CREATE TABLE
+        """
+
+        spec = None
+        m = self._re_column.match(line)
+        if m:
+            spec = m.groupdict()
+            spec["full"] = True
+        else:
+            m = self._re_column_loose.match(line)
+            if m:
+                spec = m.groupdict()
+                spec["full"] = False
+        if not spec:
+            util.warn("Unknown column definition %r" % line)
+            return
+        if not spec["full"]:
+            util.warn("Incomplete reflection of column definition %r" % line)
+
+        name, type_, args = spec["name"], spec["coltype"], spec["arg"]
+
+        try:
+            col_type = self.dialect.ischema_names[type_]
+        except KeyError:
+            util.warn(
+                "Did not recognize type '%s' of column '%s'" % (type_, name)
+            )
+            col_type = sqltypes.NullType
+
+        # Column type positional arguments eg. varchar(32)
+        if args is None or args == "":
+            type_args = []
+        elif args[0] == "'" and args[-1] == "'":
+            type_args = self._re_csv_str.findall(args)
+        else:
+            type_args = [int(v) for v in self._re_csv_int.findall(args)]
+
+        # Column type keyword options
+        type_kw = {}
+
+        if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
+            if type_args:
+                type_kw["fsp"] = type_args.pop(0)
+
+        for kw in ("unsigned", "zerofill"):
+            if spec.get(kw, False):
+                type_kw[kw] = True
+        for kw in ("charset", "collate"):
+            if spec.get(kw, False):
+                type_kw[kw] = spec[kw]
+        if issubclass(col_type, (ENUM, SET)):
+            type_args = _strip_values(type_args)
+
+            if issubclass(col_type, SET) and "" in type_args:
+                type_kw["retrieve_as_bitwise"] = True
+
+        type_instance = col_type(*type_args, **type_kw)
+
+        col_kw = {}
+
+        # NOT NULL
+        col_kw["nullable"] = True
+        # this can be "NULL" in the case of TIMESTAMP
+        if spec.get("notnull", False) == "NOT NULL":
+            col_kw["nullable"] = False
+        # For generated columns, the nullability is marked in a different place
+        if spec.get("notnull_generated", False) == "NOT NULL":
+            col_kw["nullable"] = False
+
+        # AUTO_INCREMENT
+        if spec.get("autoincr", False):
+            col_kw["autoincrement"] = True
+        elif issubclass(col_type, sqltypes.Integer):
+            col_kw["autoincrement"] = False
+
+        # DEFAULT
+        default = spec.get("default", None)
+
+        if default == "NULL":
+            # eliminates the need to deal with this later.
+            default = None
+
+        comment = spec.get("comment", None)
+
+        if comment is not None:
+            comment = cleanup_text(comment)
+
+        sqltext = spec.get("generated")
+        if sqltext is not None:
+            computed = dict(sqltext=sqltext)
+            persisted = spec.get("persistence")
+            if persisted is not None:
+                computed["persisted"] = persisted == "STORED"
+            col_kw["computed"] = computed
+
+        col_d = dict(
+            name=name, type=type_instance, default=default, comment=comment
+        )
+        col_d.update(col_kw)
+        state.columns.append(col_d)
+
+    def _describe_to_create(self, table_name, columns):
+        """Re-format DESCRIBE output as a SHOW CREATE TABLE string.
+
+        DESCRIBE is a much simpler reflection and is sufficient for
+        reflecting views for runtime use.  This method formats DDL
+        for columns only- keys are omitted.
+
+        :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
+          SHOW FULL COLUMNS FROM rows must be rearranged for use with
+          this function.
+        """
+
+        buffer = []
+        for row in columns:
+            (name, col_type, nullable, default, extra) = (
+                row[i] for i in (0, 1, 2, 4, 5)
+            )
+
+            line = [" "]
+            line.append(self.preparer.quote_identifier(name))
+            line.append(col_type)
+            if not nullable:
+                line.append("NOT NULL")
+            if default:
+                if "auto_increment" in default:
+                    pass
+                elif col_type.startswith("timestamp") and default.startswith(
+                    "C"
+                ):
+                    line.append("DEFAULT")
+                    line.append(default)
+                elif default == "NULL":
+                    line.append("DEFAULT")
+                    line.append(default)
+                else:
+                    line.append("DEFAULT")
+                    line.append("'%s'" % default.replace("'", "''"))
+            if extra:
+                line.append(extra)
+
+            buffer.append(" ".join(line))
+
+        return "".join(
+            [
+                (
+                    "CREATE TABLE %s (\n"
+                    % self.preparer.quote_identifier(table_name)
+                ),
+                ",\n".join(buffer),
+                "\n) ",
+            ]
+        )
+
+    def _parse_keyexprs(self, identifiers):
+        """Unpack '"col"(2),"col" ASC'-ish strings into components."""
+
+        return [
+            (colname, int(length) if length else None, modifiers)
+            for colname, length, modifiers in self._re_keyexprs.findall(
+                identifiers
+            )
+        ]
+
+    def _prep_regexes(self):
+        """Pre-compile regular expressions."""
+
+        self._re_columns = []
+        self._pr_options = []
+
+        _final = self.preparer.final_quote
+
+        quotes = dict(
+            zip(
+                ("iq", "fq", "esc_fq"),
+                [
+                    re.escape(s)
+                    for s in (
+                        self.preparer.initial_quote,
+                        _final,
+                        self.preparer._escape_identifier(_final),
+                    )
+                ],
+            )
+        )
+
+        self._pr_name = _pr_compile(
+            r"^CREATE (?:\w+ +)?TABLE +"
+            r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($" % quotes,
+            self.preparer._unescape_identifier,
+        )
+
+        self._re_is_view = _re_compile(r"^CREATE(?! TABLE)(\s.*)?\sVIEW")
+
+        # `col`,`col2`(32),`col3`(15) DESC
+        #
+        self._re_keyexprs = _re_compile(
+            r"(?:"
+            r"(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)"
+            r"(?:\((\d+)\))?(?: +(ASC|DESC))?(?=\,|$))+" % quotes
+        )
+
+        # 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
+        self._re_csv_str = _re_compile(r"\x27(?:\x27\x27|[^\x27])*\x27")
+
+        # 123 or 123,456
+        self._re_csv_int = _re_compile(r"\d+")
+
+        # `colname` <type> [type opts]
+        #  (NOT NULL | NULL)
+        #   DEFAULT ('value' | CURRENT_TIMESTAMP...)
+        #   COMMENT 'comment'
+        #  COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
+        #  STORAGE (DISK|MEMORY)
+        self._re_column = _re_compile(
+            r"  "
+            r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
+            r"(?P<coltype>\w+)"
+            r"(?:\((?P<arg>(?:\d+|\d+,\d+|"
+            r"(?:'(?:''|[^'])*',?)+))\))?"
+            r"(?: +(?P<unsigned>UNSIGNED))?"
+            r"(?: +(?P<zerofill>ZEROFILL))?"
+            r"(?: +CHARACTER SET +(?P<charset>[\w_]+))?"
+            r"(?: +COLLATE +(?P<collate>[\w_]+))?"
+            r"(?: +(?P<notnull>(?:NOT )?NULL))?"
+            r"(?: +DEFAULT +(?P<default>"
+            r"(?:NULL|'(?:''|[^'])*'|[\-\w\.\(\)]+"
+            r"(?: +ON UPDATE [\-\w\.\(\)]+)?)"
+            r"))?"
+            r"(?: +(?:GENERATED ALWAYS)? ?AS +(?P<generated>\("
+            r".*\))? ?(?P<persistence>VIRTUAL|STORED)?"
+            r"(?: +(?P<notnull_generated>(?:NOT )?NULL))?"
+            r")?"
+            r"(?: +(?P<autoincr>AUTO_INCREMENT))?"
+            r"(?: +COMMENT +'(?P<comment>(?:''|[^'])*)')?"
+            r"(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?"
+            r"(?: +STORAGE +(?P<storage>\w+))?"
+            r"(?: +(?P<extra>.*))?"
+            r",?$" % quotes
+        )
+
+        # Fallback, try to parse as little as possible
+        self._re_column_loose = _re_compile(
+            r"  "
+            r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
+            r"(?P<coltype>\w+)"
+            r"(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?"
+            r".*?(?P<notnull>(?:NOT )NULL)?" % quotes
+        )
+
+        # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
+        # (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
+        # KEY_BLOCK_SIZE size | WITH PARSER name  /*!50100 WITH PARSER name */
+        self._re_key = _re_compile(
+            r"  "
+            r"(?:(?P<type>\S+) )?KEY"
+            r"(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?"
+            r"(?: +USING +(?P<using_pre>\S+))?"
+            r" +\((?P<columns>.+?)\)"
+            r"(?: +USING +(?P<using_post>\S+))?"
+            r"(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?"
+            r"(?: +WITH PARSER +(?P<parser>\S+))?"
+            r"(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?"
+            r"(?: +/\*(?P<version_sql>.+)\*/ *)?"
+            r",?$" % quotes
+        )
+
+        # https://forums.mysql.com/read.php?20,567102,567111#msg-567111
+        # It means if the MySQL version >= \d+, execute what's in the comment
+        self._re_key_version_sql = _re_compile(
+            r"\!\d+ " r"(?: *WITH PARSER +(?P<parser>\S+) *)?"
+        )
+
+        # CONSTRAINT `name` FOREIGN KEY (`local_col`)
+        # REFERENCES `remote` (`remote_col`)
+        # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
+        # ON DELETE CASCADE ON UPDATE RESTRICT
+        #
+        # unique constraints come back as KEYs
+        kw = quotes.copy()
+        kw["on"] = "RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT"
+        self._re_fk_constraint = _re_compile(
+            r"  "
+            r"CONSTRAINT +"
+            r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
+            r"FOREIGN KEY +"
+            r"\((?P<local>[^\)]+?)\) REFERENCES +"
+            r"(?P<table>%(iq)s[^%(fq)s]+%(fq)s"
+            r"(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +"
+            r"\((?P<foreign>(?:%(iq)s[^%(fq)s]+%(fq)s(?: *, *)?)+)\)"
+            r"(?: +(?P<match>MATCH \w+))?"
+            r"(?: +ON DELETE (?P<ondelete>%(on)s))?"
+            r"(?: +ON UPDATE (?P<onupdate>%(on)s))?" % kw
+        )
+
+        # CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)'
+        # testing on MariaDB 10.2 shows that the CHECK constraint
+        # is returned on a line by itself, so to match without worrying
+        # about parenthesis in the expression we go to the end of the line
+        self._re_ck_constraint = _re_compile(
+            r"  "
+            r"CONSTRAINT +"
+            r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +"
+            r"CHECK +"
+            r"\((?P<sqltext>.+)\),?" % kw
+        )
+
+        # PARTITION
+        #
+        # punt!
+        self._re_partition = _re_compile(r"(?:.*)(?:SUB)?PARTITION(?:.*)")
+
+        # Table-level options (COLLATE, ENGINE, etc.)
+        # Do the string options first, since they have quoted
+        # strings we need to get rid of.
+        for option in _options_of_type_string:
+            self._add_option_string(option)
+
+        for option in (
+            "ENGINE",
+            "TYPE",
+            "AUTO_INCREMENT",
+            "AVG_ROW_LENGTH",
+            "CHARACTER SET",
+            "DEFAULT CHARSET",
+            "CHECKSUM",
+            "COLLATE",
+            "DELAY_KEY_WRITE",
+            "INSERT_METHOD",
+            "MAX_ROWS",
+            "MIN_ROWS",
+            "PACK_KEYS",
+            "ROW_FORMAT",
+            "KEY_BLOCK_SIZE",
+            "STATS_SAMPLE_PAGES",
+        ):
+            self._add_option_word(option)
+
+        for option in (
+            "PARTITION BY",
+            "SUBPARTITION BY",
+            "PARTITIONS",
+            "SUBPARTITIONS",
+            "PARTITION",
+            "SUBPARTITION",
+        ):
+            self._add_partition_option_word(option)
+
+        self._add_option_regex("UNION", r"\([^\)]+\)")
+        self._add_option_regex("TABLESPACE", r".*? STORAGE DISK")
+        self._add_option_regex(
+            "RAID_TYPE",
+            r"\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+",
+        )
+
+    _optional_equals = r"(?:\s*(?:=\s*)|\s+)"
+
+    def _add_option_string(self, directive):
+        regex = r"(?P<directive>%s)%s" r"'(?P<val>(?:[^']|'')*?)'(?!')" % (
+            re.escape(directive),
+            self._optional_equals,
+        )
+        self._pr_options.append(_pr_compile(regex, cleanup_text))
+
+    def _add_option_word(self, directive):
+        regex = r"(?P<directive>%s)%s" r"(?P<val>\w+)" % (
+            re.escape(directive),
+            self._optional_equals,
+        )
+        self._pr_options.append(_pr_compile(regex))
+
+    def _add_partition_option_word(self, directive):
+        if directive == "PARTITION BY" or directive == "SUBPARTITION BY":
+            regex = r"(?<!\S)(?P<directive>%s)%s" r"(?P<val>\w+.*)" % (
+                re.escape(directive),
+                self._optional_equals,
+            )
+        elif directive == "SUBPARTITIONS" or directive == "PARTITIONS":
+            regex = r"(?<!\S)(?P<directive>%s)%s" r"(?P<val>\d+)" % (
+                re.escape(directive),
+                self._optional_equals,
+            )
+        else:
+            regex = r"(?<!\S)(?P<directive>%s)(?!\S)" % (re.escape(directive),)
+        self._pr_options.append(_pr_compile(regex))
+
+    def _add_option_regex(self, directive, regex):
+        regex = r"(?P<directive>%s)%s" r"(?P<val>%s)" % (
+            re.escape(directive),
+            self._optional_equals,
+            regex,
+        )
+        self._pr_options.append(_pr_compile(regex))
+
+
+_options_of_type_string = (
+    "COMMENT",
+    "DATA DIRECTORY",
+    "INDEX DIRECTORY",
+    "PASSWORD",
+    "CONNECTION",
+)
+
+
+def _pr_compile(regex, cleanup=None):
+    """Prepare a 2-tuple of compiled regex and callable."""
+
+    return (_re_compile(regex), cleanup)
+
+
+def _re_compile(regex):
+    """Compile a string to regex, I and UNICODE."""
+
+    return re.compile(regex, re.I | re.UNICODE)
+
+
+def _strip_values(values):
+    "Strip reflected values quotes"
+    strip_values = []
+    for a in values:
+        if a[0:1] == '"' or a[0:1] == "'":
+            # strip enclosing quotes and unquote interior
+            a = a[1:-1].replace(a[0] * 2, a[0])
+        strip_values.append(a)
+    return strip_values
+
+
+def cleanup_text(raw_text: str) -> str:
+    if "\\" in raw_text:
+        raw_text = re.sub(
+            _control_char_regexp, lambda s: _control_char_map[s[0]], raw_text
+        )
+    return raw_text.replace("''", "'")
+
+
+_control_char_map = {
+    "\\\\": "\\",
+    "\\0": "\0",
+    "\\a": "\a",
+    "\\b": "\b",
+    "\\t": "\t",
+    "\\n": "\n",
+    "\\v": "\v",
+    "\\f": "\f",
+    "\\r": "\r",
+    # '\\e':'\e',
+}
+_control_char_regexp = re.compile(
+    "|".join(re.escape(k) for k in _control_char_map)
+)
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py
new file mode 100644
index 00000000..34fecf42
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/reserved_words.py
@@ -0,0 +1,571 @@
+# dialects/mysql/reserved_words.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+# generated using:
+# https://gist.github.com/kkirsche/4f31f2153ed7a3248be1ec44ca6ddbc9
+#
+# https://mariadb.com/kb/en/reserved-words/
+# includes: Reserved Words, Oracle Mode (separate set unioned)
+# excludes: Exceptions, Function Names
+# mypy: ignore-errors
+
+RESERVED_WORDS_MARIADB = {
+    "accessible",
+    "add",
+    "all",
+    "alter",
+    "analyze",
+    "and",
+    "as",
+    "asc",
+    "asensitive",
+    "before",
+    "between",
+    "bigint",
+    "binary",
+    "blob",
+    "both",
+    "by",
+    "call",
+    "cascade",
+    "case",
+    "change",
+    "char",
+    "character",
+    "check",
+    "collate",
+    "column",
+    "condition",
+    "constraint",
+    "continue",
+    "convert",
+    "create",
+    "cross",
+    "current_date",
+    "current_role",
+    "current_time",
+    "current_timestamp",
+    "current_user",
+    "cursor",
+    "database",
+    "databases",
+    "day_hour",
+    "day_microsecond",
+    "day_minute",
+    "day_second",
+    "dec",
+    "decimal",
+    "declare",
+    "default",
+    "delayed",
+    "delete",
+    "desc",
+    "describe",
+    "deterministic",
+    "distinct",
+    "distinctrow",
+    "div",
+    "do_domain_ids",
+    "double",
+    "drop",
+    "dual",
+    "each",
+    "else",
+    "elseif",
+    "enclosed",
+    "escaped",
+    "except",
+    "exists",
+    "exit",
+    "explain",
+    "false",
+    "fetch",
+    "float",
+    "float4",
+    "float8",
+    "for",
+    "force",
+    "foreign",
+    "from",
+    "fulltext",
+    "general",
+    "grant",
+    "group",
+    "having",
+    "high_priority",
+    "hour_microsecond",
+    "hour_minute",
+    "hour_second",
+    "if",
+    "ignore",
+    "ignore_domain_ids",
+    "ignore_server_ids",
+    "in",
+    "index",
+    "infile",
+    "inner",
+    "inout",
+    "insensitive",
+    "insert",
+    "int",
+    "int1",
+    "int2",
+    "int3",
+    "int4",
+    "int8",
+    "integer",
+    "intersect",
+    "interval",
+    "into",
+    "is",
+    "iterate",
+    "join",
+    "key",
+    "keys",
+    "kill",
+    "leading",
+    "leave",
+    "left",
+    "like",
+    "limit",
+    "linear",
+    "lines",
+    "load",
+    "localtime",
+    "localtimestamp",
+    "lock",
+    "long",
+    "longblob",
+    "longtext",
+    "loop",
+    "low_priority",
+    "master_heartbeat_period",
+    "master_ssl_verify_server_cert",
+    "match",
+    "maxvalue",
+    "mediumblob",
+    "mediumint",
+    "mediumtext",
+    "middleint",
+    "minute_microsecond",
+    "minute_second",
+    "mod",
+    "modifies",
+    "natural",
+    "no_write_to_binlog",
+    "not",
+    "null",
+    "numeric",
+    "offset",
+    "on",
+    "optimize",
+    "option",
+    "optionally",
+    "or",
+    "order",
+    "out",
+    "outer",
+    "outfile",
+    "over",
+    "page_checksum",
+    "parse_vcol_expr",
+    "partition",
+    "position",
+    "precision",
+    "primary",
+    "procedure",
+    "purge",
+    "range",
+    "read",
+    "read_write",
+    "reads",
+    "real",
+    "recursive",
+    "ref_system_id",
+    "references",
+    "regexp",
+    "release",
+    "rename",
+    "repeat",
+    "replace",
+    "require",
+    "resignal",
+    "restrict",
+    "return",
+    "returning",
+    "revoke",
+    "right",
+    "rlike",
+    "rows",
+    "row_number",
+    "schema",
+    "schemas",
+    "second_microsecond",
+    "select",
+    "sensitive",
+    "separator",
+    "set",
+    "show",
+    "signal",
+    "slow",
+    "smallint",
+    "spatial",
+    "specific",
+    "sql",
+    "sql_big_result",
+    "sql_calc_found_rows",
+    "sql_small_result",
+    "sqlexception",
+    "sqlstate",
+    "sqlwarning",
+    "ssl",
+    "starting",
+    "stats_auto_recalc",
+    "stats_persistent",
+    "stats_sample_pages",
+    "straight_join",
+    "table",
+    "terminated",
+    "then",
+    "tinyblob",
+    "tinyint",
+    "tinytext",
+    "to",
+    "trailing",
+    "trigger",
+    "true",
+    "undo",
+    "union",
+    "unique",
+    "unlock",
+    "unsigned",
+    "update",
+    "usage",
+    "use",
+    "using",
+    "utc_date",
+    "utc_time",
+    "utc_timestamp",
+    "values",
+    "varbinary",
+    "varchar",
+    "varcharacter",
+    "varying",
+    "when",
+    "where",
+    "while",
+    "window",
+    "with",
+    "write",
+    "xor",
+    "year_month",
+    "zerofill",
+}.union(
+    {
+        "body",
+        "elsif",
+        "goto",
+        "history",
+        "others",
+        "package",
+        "period",
+        "raise",
+        "rowtype",
+        "system",
+        "system_time",
+        "versioning",
+        "without",
+    }
+)
+
+# https://dev.mysql.com/doc/refman/8.3/en/keywords.html
+# https://dev.mysql.com/doc/refman/8.0/en/keywords.html
+# https://dev.mysql.com/doc/refman/5.7/en/keywords.html
+# https://dev.mysql.com/doc/refman/5.6/en/keywords.html
+# includes: MySQL x.0 Keywords and Reserved Words
+# excludes: MySQL x.0 New Keywords and Reserved Words,
+#       MySQL x.0 Removed Keywords and Reserved Words
+RESERVED_WORDS_MYSQL = {
+    "accessible",
+    "add",
+    "admin",
+    "all",
+    "alter",
+    "analyze",
+    "and",
+    "array",
+    "as",
+    "asc",
+    "asensitive",
+    "before",
+    "between",
+    "bigint",
+    "binary",
+    "blob",
+    "both",
+    "by",
+    "call",
+    "cascade",
+    "case",
+    "change",
+    "char",
+    "character",
+    "check",
+    "collate",
+    "column",
+    "condition",
+    "constraint",
+    "continue",
+    "convert",
+    "create",
+    "cross",
+    "cube",
+    "cume_dist",
+    "current_date",
+    "current_time",
+    "current_timestamp",
+    "current_user",
+    "cursor",
+    "database",
+    "databases",
+    "day_hour",
+    "day_microsecond",
+    "day_minute",
+    "day_second",
+    "dec",
+    "decimal",
+    "declare",
+    "default",
+    "delayed",
+    "delete",
+    "dense_rank",
+    "desc",
+    "describe",
+    "deterministic",
+    "distinct",
+    "distinctrow",
+    "div",
+    "double",
+    "drop",
+    "dual",
+    "each",
+    "else",
+    "elseif",
+    "empty",
+    "enclosed",
+    "escaped",
+    "except",
+    "exists",
+    "exit",
+    "explain",
+    "false",
+    "fetch",
+    "first_value",
+    "float",
+    "float4",
+    "float8",
+    "for",
+    "force",
+    "foreign",
+    "from",
+    "fulltext",
+    "function",
+    "general",
+    "generated",
+    "get",
+    "get_master_public_key",
+    "grant",
+    "group",
+    "grouping",
+    "groups",
+    "having",
+    "high_priority",
+    "hour_microsecond",
+    "hour_minute",
+    "hour_second",
+    "if",
+    "ignore",
+    "ignore_server_ids",
+    "in",
+    "index",
+    "infile",
+    "inner",
+    "inout",
+    "insensitive",
+    "insert",
+    "int",
+    "int1",
+    "int2",
+    "int3",
+    "int4",
+    "int8",
+    "integer",
+    "intersect",
+    "interval",
+    "into",
+    "io_after_gtids",
+    "io_before_gtids",
+    "is",
+    "iterate",
+    "join",
+    "json_table",
+    "key",
+    "keys",
+    "kill",
+    "lag",
+    "last_value",
+    "lateral",
+    "lead",
+    "leading",
+    "leave",
+    "left",
+    "like",
+    "limit",
+    "linear",
+    "lines",
+    "load",
+    "localtime",
+    "localtimestamp",
+    "lock",
+    "long",
+    "longblob",
+    "longtext",
+    "loop",
+    "low_priority",
+    "master_bind",
+    "master_heartbeat_period",
+    "master_ssl_verify_server_cert",
+    "match",
+    "maxvalue",
+    "mediumblob",
+    "mediumint",
+    "mediumtext",
+    "member",
+    "middleint",
+    "minute_microsecond",
+    "minute_second",
+    "mod",
+    "modifies",
+    "natural",
+    "no_write_to_binlog",
+    "not",
+    "nth_value",
+    "ntile",
+    "null",
+    "numeric",
+    "of",
+    "on",
+    "optimize",
+    "optimizer_costs",
+    "option",
+    "optionally",
+    "or",
+    "order",
+    "out",
+    "outer",
+    "outfile",
+    "over",
+    "parse_gcol_expr",
+    "parallel",
+    "partition",
+    "percent_rank",
+    "persist",
+    "persist_only",
+    "precision",
+    "primary",
+    "procedure",
+    "purge",
+    "qualify",
+    "range",
+    "rank",
+    "read",
+    "read_write",
+    "reads",
+    "real",
+    "recursive",
+    "references",
+    "regexp",
+    "release",
+    "rename",
+    "repeat",
+    "replace",
+    "require",
+    "resignal",
+    "restrict",
+    "return",
+    "revoke",
+    "right",
+    "rlike",
+    "role",
+    "row",
+    "row_number",
+    "rows",
+    "schema",
+    "schemas",
+    "second_microsecond",
+    "select",
+    "sensitive",
+    "separator",
+    "set",
+    "show",
+    "signal",
+    "slow",
+    "smallint",
+    "spatial",
+    "specific",
+    "sql",
+    "sql_after_gtids",
+    "sql_before_gtids",
+    "sql_big_result",
+    "sql_calc_found_rows",
+    "sql_small_result",
+    "sqlexception",
+    "sqlstate",
+    "sqlwarning",
+    "ssl",
+    "starting",
+    "stored",
+    "straight_join",
+    "system",
+    "table",
+    "terminated",
+    "then",
+    "tinyblob",
+    "tinyint",
+    "tinytext",
+    "to",
+    "trailing",
+    "trigger",
+    "true",
+    "undo",
+    "union",
+    "unique",
+    "unlock",
+    "unsigned",
+    "update",
+    "usage",
+    "use",
+    "using",
+    "utc_date",
+    "utc_time",
+    "utc_timestamp",
+    "values",
+    "varbinary",
+    "varchar",
+    "varcharacter",
+    "varying",
+    "virtual",
+    "when",
+    "where",
+    "while",
+    "window",
+    "with",
+    "write",
+    "xor",
+    "year_month",
+    "zerofill",
+}
diff --git a/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py
new file mode 100644
index 00000000..0c05aacb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sqlalchemy/dialects/mysql/types.py
@@ -0,0 +1,774 @@
+# dialects/mysql/types.py
+# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+import datetime
+
+from ... import exc
+from ... import util
+from ...sql import sqltypes
+
+
+class _NumericType:
+    """Base for MySQL numeric types.
+
+    This is the base both for NUMERIC as well as INTEGER, hence
+    it's a mixin.
+
+    """
+
+    def __init__(self, unsigned=False, zerofill=False, **kw):
+        self.unsigned = unsigned
+        self.zerofill = zerofill
+        super().__init__(**kw)
+
+    def __repr__(self):
+        return util.generic_repr(
+            self, to_inspect=[_NumericType, sqltypes.Numeric]
+        )
+
+
+class _FloatType(_NumericType, sqltypes.Float):
+    def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+        if isinstance(self, (REAL, DOUBLE)) and (
+            (precision is None and scale is not None)
+            or (precision is not None and scale is None)
+        ):
+            raise exc.ArgumentError(
+                "You must specify both precision and scale or omit "
+                "both altogether."
+            )
+        super().__init__(precision=precision, asdecimal=asdecimal, **kw)
+        self.scale = scale
+
+    def __repr__(self):
+        return util.generic_repr(
+            self, to_inspect=[_FloatType, _NumericType, sqltypes.Float]
+        )
+
+
+class _IntegerType(_NumericType, sqltypes.Integer):
+    def __init__(self, display_width=None, **kw):
+        self.display_width = display_width
+        super().__init__(**kw)
+
+    def __repr__(self):
+        return util.generic_repr(
+            self, to_inspect=[_IntegerType, _NumericType, sqltypes.Integer]
+        )
+
+
+class _StringType(sqltypes.String):
+    """Base for MySQL string types."""
+
+    def __init__(
+        self,
+        charset=None,
+        collation=None,
+        ascii=False,  # noqa
+        binary=False,
+        unicode=False,
+        national=False,
+        **kw,
+    ):
+        self.charset = charset
+
+        # allow collate= or collation=
+        kw.setdefault("collation", kw.pop("collate", collation))
+
+        self.ascii = ascii
+        self.unicode = unicode
+        self.binary = binary
+        self.national = national
+        super().__init__(**kw)
+
+    def __repr__(self):
+        return util.generic_repr(
+            self, to_inspect=[_StringType, sqltypes.String]
+        )
+
+
+class _MatchType(sqltypes.Float, sqltypes.MatchType):
+    def __init__(self, **kw):
+        # TODO: float arguments?
+        sqltypes.Float.__init__(self)
+        sqltypes.MatchType.__init__(self)
+
+
+class NUMERIC(_NumericType, sqltypes.NUMERIC):
+    """MySQL NUMERIC type."""
+
+    __visit_name__ = "NUMERIC"
+
+    def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+        """Construct a NUMERIC.
+
+        :param precision: Total digits in this number.  If scale and precision
+          are both None, values are stored to limits allowed by the server.
+
+        :param scale: The number of digits after the decimal point.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(
+            precision=precision, scale=scale, asdecimal=asdecimal, **kw
+        )
+
+
+class DECIMAL(_NumericType, sqltypes.DECIMAL):
+    """MySQL DECIMAL type."""
+
+    __visit_name__ = "DECIMAL"
+
+    def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+        """Construct a DECIMAL.
+
+        :param precision: Total digits in this number.  If scale and precision
+          are both None, values are stored to limits allowed by the server.
+
+        :param scale: The number of digits after the decimal point.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(
+            precision=precision, scale=scale, asdecimal=asdecimal, **kw
+        )
+
+
+class DOUBLE(_FloatType, sqltypes.DOUBLE):
+    """MySQL DOUBLE type."""
+
+    __visit_name__ = "DOUBLE"
+
+    def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+        """Construct a DOUBLE.
+
+        .. note::
+
+            The :class:`.DOUBLE` type by default converts from float
+            to Decimal, using a truncation that defaults to 10 digits.
+            Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
+            to change this scale, or ``asdecimal=False`` to return values
+            directly as Python floating points.
+
+        :param precision: Total digits in this number.  If scale and precision
+          are both None, values are stored to limits allowed by the server.
+
+        :param scale: The number of digits after the decimal point.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(
+            precision=precision, scale=scale, asdecimal=asdecimal, **kw
+        )
+
+
+class REAL(_FloatType, sqltypes.REAL):
+    """MySQL REAL type."""
+
+    __visit_name__ = "REAL"
+
+    def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+        """Construct a REAL.
+
+        .. note::
+
+            The :class:`.REAL` type by default converts from float
+            to Decimal, using a truncation that defaults to 10 digits.
+            Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
+            to change this scale, or ``asdecimal=False`` to return values
+            directly as Python floating points.
+
+        :param precision: Total digits in this number.  If scale and precision
+          are both None, values are stored to limits allowed by the server.
+
+        :param scale: The number of digits after the decimal point.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(
+            precision=precision, scale=scale, asdecimal=asdecimal, **kw
+        )
+
+
+class FLOAT(_FloatType, sqltypes.FLOAT):
+    """MySQL FLOAT type."""
+
+    __visit_name__ = "FLOAT"
+
+    def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
+        """Construct a FLOAT.
+
+        :param precision: Total digits in this number.  If scale and precision
+          are both None, values are stored to limits allowed by the server.
+
+        :param scale: The number of digits after the decimal point.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(
+            precision=precision, scale=scale, asdecimal=asdecimal, **kw
+        )
+
+    def bind_processor(self, dialect):
+        return None
+
+
+class INTEGER(_IntegerType, sqltypes.INTEGER):
+    """MySQL INTEGER type."""
+
+    __visit_name__ = "INTEGER"
+
+    def __init__(self, display_width=None, **kw):
+        """Construct an INTEGER.
+
+        :param display_width: Optional, maximum display width for this number.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(display_width=display_width, **kw)
+
+
+class BIGINT(_IntegerType, sqltypes.BIGINT):
+    """MySQL BIGINTEGER type."""
+
+    __visit_name__ = "BIGINT"
+
+    def __init__(self, display_width=None, **kw):
+        """Construct a BIGINTEGER.
+
+        :param display_width: Optional, maximum display width for this number.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(display_width=display_width, **kw)
+
+
+class MEDIUMINT(_IntegerType):
+    """MySQL MEDIUMINTEGER type."""
+
+    __visit_name__ = "MEDIUMINT"
+
+    def __init__(self, display_width=None, **kw):
+        """Construct a MEDIUMINTEGER
+
+        :param display_width: Optional, maximum display width for this number.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(display_width=display_width, **kw)
+
+
+class TINYINT(_IntegerType):
+    """MySQL TINYINT type."""
+
+    __visit_name__ = "TINYINT"
+
+    def __init__(self, display_width=None, **kw):
+        """Construct a TINYINT.
+
+        :param display_width: Optional, maximum display width for this number.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(display_width=display_width, **kw)
+
+
+class SMALLINT(_IntegerType, sqltypes.SMALLINT):
+    """MySQL SMALLINTEGER type."""
+
+    __visit_name__ = "SMALLINT"
+
+    def __init__(self, display_width=None, **kw):
+        """Construct a SMALLINTEGER.
+
+        :param display_width: Optional, maximum display width for this number.
+
+        :param unsigned: a boolean, optional.
+
+        :param zerofill: Optional. If true, values will be stored as strings
+          left-padded with zeros. Note that this does not effect the values
+          returned by the underlying database API, which continue to be
+          numeric.
+
+        """
+        super().__init__(display_width=display_width, **kw)
+
+
+class BIT(sqltypes.TypeEngine):
+    """MySQL BIT type.
+
+    This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
+    for MyISAM, MEMORY, InnoDB and BDB.  For older versions, use a
+    MSTinyInteger() type.
+
+    """
+
+    __visit_name__ = "BIT"
+
+    def __init__(self, length=None):
+        """Construct a BIT.
+
+        :param length: Optional, number of bits.
+
+        """
+        self.length = length
+
+    def result_processor(self, dialect, coltype):
+        """Convert a MySQL's 64 bit, variable length binary string to a long.
+
+        TODO: this is MySQL-db, pyodbc specific.  OurSQL and mysqlconnector
+        already do this, so this logic should be moved to those dialects.
+
+        """
+
+        def process(value):
+            if value is not None:
+                v = 0
+                for i in value:
+                    if not isinstance(i, int):
+                        i = ord(i)  # convert byte to int on Python 2
+                    v = v << 8 | i
+                return v
+            return value
+
+        return process
+
+
+class TIME(sqltypes.TIME):
+    """MySQL TIME type."""
+
+    __visit_name__ = "TIME"
+
+    def __init__(self, timezone=False, fsp=None):
+        """Construct a MySQL TIME type.
+
+        :param timezone: not used by the MySQL dialect.
+        :param fsp: fractional seconds precision value.
+         MySQL 5.6 supports storage of fractional seconds;
+         this parameter will be used when emitting DDL
+         for the TIME type.
+
+         .. note::
+
+            DBAPI driver support for fractional seconds may
+            be limited; current support includes
+            MySQL Connector/Python.
+
+        """
+        super().__init__(timezone=timezone)
+        self.fsp = fsp
+
+    def result_processor(self, dialect, coltype):
+        time = datetime.time
+
+        def process(value):
+            # convert from a timedelta value
+            if value is not None:
+                microseconds = value.microseconds
+                seconds = value.seconds
+                minutes = seconds // 60
+                return time(
+                    minutes // 60,
+                    minutes % 60,
+                    seconds - minutes * 60,
+                    microsecond=microseconds,
+                )
+            else:
+                return None
+
+        return process
+
+
+class TIMESTAMP(sqltypes.TIMESTAMP):
+    """MySQL TIMESTAMP type."""
+
+    __visit_name__ = "TIMESTAMP"
+
+    def __init__(self, timezone=False, fsp=None):
+        """Construct a MySQL TIMESTAMP type.
+
+        :param timezone: not used by the MySQL dialect.
+        :param fsp: fractional seconds precision value.
+         MySQL 5.6.4 supports storage of fractional seconds;
+         this parameter will be used when emitting DDL
+         for the TIMESTAMP type.
+
+         .. note::
+
+            DBAPI driver support for fractional seconds may
+            be limited; current support includes
+            MySQL Connector/Python.
+
+        """
+        super().__init__(timezone=timezone)
+        self.fsp = fsp
+
+
+class DATETIME(sqltypes.DATETIME):
+    """MySQL DATETIME type."""
+
+    __visit_name__ = "DATETIME"
+
+    def __init__(self, timezone=False, fsp=None):
+        """Construct a MySQL DATETIME type.
+
+        :param timezone: not used by the MySQL dialect.
+        :param fsp: fractional seconds precision value.
+         MySQL 5.6.4 supports storage of fractional seconds;
+         this parameter will be used when emitting DDL
+         for the DATETIME type.
+
+         .. note::
+
+            DBAPI driver support for fractional seconds may
+            be limited; current support includes
+            MySQL Connector/Python.
+
+        """
+        super().__init__(timezone=timezone)
+        self.fsp = fsp
+
+
+class YEAR(sqltypes.TypeEngine):
+    """MySQL YEAR type, for single byte storage of years 1901-2155."""
+
+    __visit_name__ = "YEAR"
+
+    def __init__(self, display_width=None):
+        self.display_width = display_width
+
+
+class TEXT(_StringType, sqltypes.TEXT):
+    """MySQL TEXT type, for character storage encoded up to 2^16 bytes."""
+
+    __visit_name__ = "TEXT"
+
+    def __init__(self, length=None, **kw):
+        """Construct a TEXT.
+
+        :param length: Optional, if provided the server may optimize storage
+          by substituting the smallest TEXT type sufficient to store
+          ``length`` bytes of characters.
+
+        :param charset: Optional, a column-level character set for this string
+          value.  Takes precedence to 'ascii' or 'unicode' short-hand.
+
+        :param collation: Optional, a column-level collation for this string
+          value.  Takes precedence to 'binary' short-hand.
+
+        :param ascii: Defaults to False: short-hand for the ``latin1``
+          character set, generates ASCII in schema.
+
+        :param unicode: Defaults to False: short-hand for the ``ucs2``
+          character set, generates UNICODE in schema.
+
+        :param national: Optional. If true, use the server's configured
+          national character set.
+
+        :param binary: Defaults to False: short-hand, pick the binary
+          collation type that matches the column's character set.  Generates
+          BINARY in schema.  This does not affect the type of data stored,
+          only the collation of character data.
+
+        """
+        super().__init__(length=length, **kw)
+
+
+class TINYTEXT(_StringType):
+    """MySQL TINYTEXT type, for character storage encoded up to 2^8 bytes."""
+
+    __visit_name__ = "TINYTEXT"
+
+    def __init__(self, **kwargs):
+        """Construct a TINYTEXT.
+
+        :param charset: Optional, a column-level character set for this string
+          value.  Takes precedence to 'ascii' or 'unicode' short-hand.
+
+        :param collation: Optional, a column-level collation for this string
+          value.  Takes precedence to 'binary' short-hand.
+
+        :param ascii: Defaults to False: short-hand for the ``latin1``
+          character set, generates ASCII in schema.
+
+        :param unicode: Defaults to False: short-hand for the ``ucs2``
+          character set, generates UNICODE in schema.
+
+        :param national: Optional. If true, use the server's configured
+          national character set.
+
+        :param binary: Defaults to False: short-hand, pick the binary
+          collation type that matches the column's character set.  Generates
+          BINARY in schema.  This does not affect the type of data stored,
+          only the collation of character data.
+
+        """
+        super().__init__(**kwargs)
+
+
+class MEDIUMTEXT(_StringType):
+    """MySQL MEDIUMTEXT type, for character storage encoded up
+    to 2^24 bytes."""
+
+    __visit_name__ = "MEDIUMTEXT"
+
+    def __init__(self, **kwargs):
+        """Construct a MEDIUMTEXT.
+
+        :param charset: Optional, a column-level character set for this string
+          value.  Takes precedence to 'ascii' or 'unicode' short-hand.
+
+        :param collation: Optional, a column-level collation for this string
+          value.  Takes precedence to 'binary' short-hand.
+
+        :param ascii: Defaults to False: short-hand for the ``latin1``
+          character set, generates ASCII in schema.
+
+        :param unicode: Defaults to False: short-hand for the ``ucs2``
+          character set, generates UNICODE in schema.
+
+        :param national: Optional. If true, use the server's configured
+          national character set.
+
+        :param binary: Defaults to False: short-hand, pick the binary
+          collation type that matches the column's character set.  Generates
+          BINARY in schema.  This does not affect the type of data stored,
+          only the collation of character data.
+
+        """
+        super().__init__(**kwargs)
+
+
+class LONGTEXT(_StringType):
+    """MySQL LONGTEXT type, for character storage encoded up to 2^32 bytes."""
+
+    __visit_name__ = "LONGTEXT"
+
+    def __init__(self, **kwargs):
+        """Construct a LONGTEXT.
+
+        :param charset: Optional, a column-level character set for this string
+          value.  Takes precedence to 'ascii' or 'unicode' short-hand.
+
+        :param collation: Optional, a column-level collation for this string
+          value.  Takes precedence to 'binary' short-hand.
+
+        :param ascii: Defaults to False: short-hand for the ``latin1``
+          character set, generates ASCII in schema.
+
+        :param unicode: Defaults to False: short-hand for the ``ucs2``
+          character set, generates UNICODE in schema.
+
+        :param national: Optional. If true, use the server's configured
+          national character set.
+
+        :param binary: Defaults to False: short-hand, pick the binary
+          collation type that matches the column's character set.  Generates
+          BINARY in schema.  This does not affect the type of data stored,
+          only the collation of character data.
+
+        """
+        super().__init__(**kwargs)
+
+
+class VARCHAR(_StringType, sqltypes.VARCHAR):
+    """MySQL VARCHAR type, for variable-length character data."""
+
+    __visit_name__ = "VARCHAR"
+
+    def __init__(self, length=None, **kwargs):
+        """Construct a VARCHAR.
+
+        :param charset: Optional, a column-level character set for this string
+          value.  Takes precedence to 'ascii' or 'unicode' short-hand.
+
+        :param collation: Optional, a column-level collation for this string
+          value.  Takes precedence to 'binary' short-hand.
+
+        :param ascii: Defaults to False: short-hand for the ``latin1``
+          character set, generates ASCII in schema.
+
+        :param unicode: Defaults to False: short-hand for the ``ucs2``
+          character set, generates UNICODE in schema.
+
+        :param national: Optional. If true, use the server's configured
+          national character set.
+
+        :param binary: Defaults to False: short-hand, pick the binary
+          collation type that matches the column's character set.  Generates
+          BINARY in schema.  This does not affect the type of data stored,
+          only the collation of character data.
+
+        """
+        super().__init__(length=length, **kwargs)
+
+
+class CHAR(_StringType, sqltypes.CHAR):
+    """MySQL CHAR type, for fixed-length character data."""
+
+    __visit_name__ = "CHAR"
+
+    def __init__(self, length=None, **kwargs):
+        """Construct a CHAR.
+
+        :param length: Maximum data length, in characters.
+
+        :param binary: Optional, use the default binary collation for the
+          national character set.  This does not affect the type of data
+          stored, use a BINARY type for binary data.
+
+        :param collation: Optional, request a particular collation.  Must be
+          compatible with the national character set.
+
+        """
+        super().__init__(length=length, **kwargs)
+
+    @classmethod
+    def _adapt_string_for_cast(cls, type_):
+        # copy the given string type into a CHAR
+        # for the purposes of rendering a CAST expression
+        type_ = sqltypes.to_instance(type_)
+        if isinstance(type_, sqltypes.CHAR):
+            return type_
+        elif isinstance(type_, _StringType):
+            return CHAR(
+                length=type_.length,
+                charset=type_.charset,
+                collation=type_.collation,
+                ascii=type_.ascii,
+                binary=type_.binary,
+                unicode=type_.unicode,
+                national=False,  # not supported in CAST
+            )
+        else:
+            return CHAR(length=type_.length)
+
+
+class NVARCHAR(_StringType, sqltypes.NVARCHAR):
+    """MySQL NVARCHAR type.
+
+    For variable-length character data in the server's configured national
+    character set.
+    """
+
+    __visit_name__ = "NVARCHAR"
+
+    def __init__(self, length=None, **kwargs):
+        """Construct an NVARCHAR.
+
+        :param length: Maximum data length, in characters.
+
+        :param binary: Optional, use the default binary collation for the
+          national character set.  This does not affect the type of data
+          stored, use a BINARY type for binary data.
+
+        :param collation: Optional, request a particular collation.  Must be
+          compatible with the national character set.
+
+        """
+        kwargs["national"] = True
+        super().__init__(length=length, **kwargs)
+
+
+class NCHAR(_StringType, sqltypes.NCHAR):
+    """MySQL NCHAR type.
+
+    For fixed-length character data in the server's configured national
+    character set.
+    """
+
+    __visit_name__ = "NCHAR"
+
+    def __init__(self, length=None, **kwargs):
+        """Construct an NCHAR.
+
+        :param length: Maximum data length, in characters.
+
+        :param binary: Optional, use the default binary collation for the
+          national character set.  This does not affect the type of data
+          stored, use a BINARY type for binary data.
+
+        :param collation: Optional, request a particular collation.  Must be
+          compatible with the national character set.
+
+        """
+        kwargs["national"] = True
+        super().__init__(length=length, **kwargs)
+
+
+class TINYBLOB(sqltypes._Binary):
+    """MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
+
+    __visit_name__ = "TINYBLOB"
+
+
+class MEDIUMBLOB(sqltypes._Binary):
+    """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
+
+    __visit_name__ = "MEDIUMBLOB"
+
+
+class LONGBLOB(sqltypes._Binary):
+    """MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
+
+    __visit_name__ = "LONGBLOB"