diff -pruN 1.7.6-1/alembic/autogenerate/api.py 1.8.1-2/alembic/autogenerate/api.py
--- 1.7.6-1/alembic/autogenerate/api.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/autogenerate/api.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,5 +1,4 @@
-"""Provide the 'autogenerate' feature which can produce migration operations
-automatically."""
+from __future__ import annotations
 
 import contextlib
 from typing import Any
@@ -19,6 +18,9 @@ from . import render
 from .. import util
 from ..operations import ops
 
+"""Provide the 'autogenerate' feature which can produce migration operations
+automatically."""
+
 if TYPE_CHECKING:
     from sqlalchemy.engine import Connection
     from sqlalchemy.engine import Dialect
@@ -370,7 +372,6 @@ class AutogenContext:
         :paramref:`.EnvironmentContext.configure.include_name` parameter.
 
         """
-
         if "schema_name" in parent_names:
             if type_ == "table":
                 table_name = name
@@ -516,7 +517,7 @@ class RevisionContext:
             branch_labels=migration_script.branch_label,
             version_path=migration_script.version_path,
             depends_on=migration_script.depends_on,
-            **template_args
+            **template_args,
         )
 
     def run_autogenerate(
diff -pruN 1.7.6-1/alembic/autogenerate/compare.py 1.8.1-2/alembic/autogenerate/compare.py
--- 1.7.6-1/alembic/autogenerate/compare.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/autogenerate/compare.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import contextlib
 import logging
 import re
@@ -282,7 +284,7 @@ def _make_index(params: Dict[str, Any],
         params["name"],
         *[conn_table.c[cname] for cname in params["column_names"]],
         unique=params["unique"],
-        _table=conn_table
+        _table=conn_table,
     )
     if "duplicates_constraint" in params:
         ix.info["duplicates_constraint"] = params["duplicates_constraint"]
@@ -294,7 +296,7 @@ def _make_unique_constraint(
 ) -> "UniqueConstraint":
     uq = sa_schema.UniqueConstraint(
         *[conn_table.c[cname] for cname in params["column_names"]],
-        name=params["name"]
+        name=params["name"],
     )
     if "duplicates_index" in params:
         uq.info["duplicates_index"] = params["duplicates_index"]
@@ -1245,7 +1247,7 @@ def _compare_foreign_keys(
         if isinstance(fk, sa_schema.ForeignKeyConstraint)
     )
 
-    conn_fks = [
+    conn_fks_list = [
         fk
         for fk in inspector.get_foreign_keys(tname, schema=schema)
         if autogen_context.run_name_filters(
@@ -1255,9 +1257,13 @@ def _compare_foreign_keys(
         )
     ]
 
-    backend_reflects_fk_options = bool(conn_fks and "options" in conn_fks[0])
+    backend_reflects_fk_options = bool(
+        conn_fks_list and "options" in conn_fks_list[0]
+    )
 
-    conn_fks = set(_make_foreign_key(const, conn_table) for const in conn_fks)
+    conn_fks = set(
+        _make_foreign_key(const, conn_table) for const in conn_fks_list
+    )
 
     # give the dialect a chance to correct the FKs to match more
     # closely
@@ -1265,24 +1271,24 @@ def _compare_foreign_keys(
         conn_fks, metadata_fks
     )
 
-    metadata_fks = set(
+    metadata_fks_sig = set(
         _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
         for fk in metadata_fks
     )
 
-    conn_fks = set(
+    conn_fks_sig = set(
         _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
         for fk in conn_fks
     )
 
-    conn_fks_by_sig = dict((c.sig, c) for c in conn_fks)
-    metadata_fks_by_sig = dict((c.sig, c) for c in metadata_fks)
+    conn_fks_by_sig = dict((c.sig, c) for c in conn_fks_sig)
+    metadata_fks_by_sig = dict((c.sig, c) for c in metadata_fks_sig)
 
     metadata_fks_by_name = dict(
-        (c.name, c) for c in metadata_fks if c.name is not None
+        (c.name, c) for c in metadata_fks_sig if c.name is not None
     )
     conn_fks_by_name = dict(
-        (c.name, c) for c in conn_fks if c.name is not None
+        (c.name, c) for c in conn_fks_sig if c.name is not None
     )
 
     def _add_fk(obj, compare_to):
diff -pruN 1.7.6-1/alembic/autogenerate/render.py 1.8.1-2/alembic/autogenerate/render.py
--- 1.7.6-1/alembic/autogenerate/render.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/autogenerate/render.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from collections import OrderedDict
 from io import StringIO
 import re
diff -pruN 1.7.6-1/alembic/autogenerate/rewriter.py 1.8.1-2/alembic/autogenerate/rewriter.py
--- 1.7.6-1/alembic/autogenerate/rewriter.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/autogenerate/rewriter.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,7 +1,10 @@
+from __future__ import annotations
+
 from typing import Any
 from typing import Callable
 from typing import Iterator
 from typing import List
+from typing import Optional
 from typing import Type
 from typing import TYPE_CHECKING
 from typing import Union
@@ -49,12 +52,12 @@ class Rewriter:
 
     _traverse = util.Dispatcher()
 
-    _chained = None
+    _chained: Optional[Rewriter] = None
 
     def __init__(self) -> None:
         self.dispatch = util.Dispatcher()
 
-    def chain(self, other: "Rewriter") -> "Rewriter":
+    def chain(self, other: Rewriter) -> Rewriter:
         """Produce a "chain" of this :class:`.Rewriter` to another.
 
         This allows two rewriters to operate serially on a stream,
diff -pruN 1.7.6-1/alembic/command.py 1.8.1-2/alembic/command.py
--- 1.7.6-1/alembic/command.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/command.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,6 +1,7 @@
+from __future__ import annotations
+
 import os
 from typing import Callable
-from typing import cast
 from typing import List
 from typing import Optional
 from typing import TYPE_CHECKING
@@ -28,7 +29,7 @@ def list_templates(config):
         with open(
             os.path.join(config.get_template_directory(), tempname, "README")
         ) as readme:
-            synopsis = next(readme)
+            synopsis = next(readme).rstrip()
         config.print_stdout("%s - %s", tempname, synopsis)
 
     config.print_stdout("\nTemplates are used via the 'init' command, e.g.:")
@@ -86,8 +87,9 @@ def init(
     for file_ in os.listdir(template_dir):
         file_path = os.path.join(template_dir, file_)
         if file_ == "alembic.ini.mako":
-            config_file = os.path.abspath(cast(str, config.config_file_name))
-            if os.access(cast(str, config_file), os.F_OK):
+            assert config.config_file_name is not None
+            config_file = os.path.abspath(config.config_file_name)
+            if os.access(config_file, os.F_OK):
                 util.msg("File %s already exists, skipping" % config_file)
             else:
                 script._generate_template(
@@ -273,7 +275,7 @@ def merge(
         refresh=True,
         head=revisions,
         branch_labels=branch_label,
-        **template_args  # type:ignore[arg-type]
+        **template_args,  # type:ignore[arg-type]
     )
 
 
@@ -642,6 +644,7 @@ def edit(config: "Config", rev: str) ->
                 "No revision files indicated by symbol '%s'" % rev
             )
         for sc in revs:
+            assert sc
             util.open_in_editor(sc.path)
 
 
diff -pruN 1.7.6-1/alembic/config.py 1.8.1-2/alembic/config.py
--- 1.7.6-1/alembic/config.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/config.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from argparse import ArgumentParser
 from argparse import Namespace
 from configparser import ConfigParser
@@ -559,7 +561,7 @@ class CommandLine:
             fn(
                 config,
                 *[getattr(options, k, None) for k in positional],
-                **dict((k, getattr(options, k, None)) for k in kwarg)
+                **dict((k, getattr(options, k, None)) for k in kwarg),
             )
         except util.CommandError as e:
             if options.raiseerr:
diff -pruN 1.7.6-1/alembic/context.pyi 1.8.1-2/alembic/context.pyi
--- 1.7.6-1/alembic/context.pyi	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/context.pyi	2022-07-13 14:17:20.000000000 +0000
@@ -1,5 +1,6 @@
 # ### this file stubs are generated by tools/write_pyi.py - do not edit ###
 # ### imports are manually managed
+from __future__ import annotations
 
 from typing import Callable
 from typing import ContextManager
@@ -67,7 +68,7 @@ def begin_transaction() -> Union["_Proxy
 config: Config
 
 def configure(
-    connection: Optional["Connection"] = None,
+    connection: Optional[Connection] = None,
     url: Optional[str] = None,
     dialect_name: Optional[str] = None,
     dialect_opts: Optional[dict] = None,
@@ -78,7 +79,7 @@ def configure(
     tag: Optional[str] = None,
     template_args: Optional[dict] = None,
     render_as_batch: bool = False,
-    target_metadata: Optional["MetaData"] = None,
+    target_metadata: Optional[MetaData] = None,
     include_name: Optional[Callable] = None,
     include_object: Optional[Callable] = None,
     include_schemas: bool = False,
@@ -93,7 +94,7 @@ def configure(
     sqlalchemy_module_prefix: str = "sa.",
     user_module_prefix: Optional[str] = None,
     on_version_apply: Optional[Callable] = None,
-    **kw
+    **kw,
 ) -> None:
     """Configure a :class:`.MigrationContext` within this
     :class:`.EnvironmentContext` which will provide database
@@ -553,7 +554,7 @@ def get_bind():
 
     """
 
-def get_context() -> "MigrationContext":
+def get_context() -> MigrationContext:
     """Return the current :class:`.MigrationContext` object.
 
     If :meth:`.EnvironmentContext.configure` has not been
diff -pruN 1.7.6-1/alembic/ddl/base.py 1.8.1-2/alembic/ddl/base.py
--- 1.7.6-1/alembic/ddl/base.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/base.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import functools
 from typing import Optional
 from typing import TYPE_CHECKING
@@ -114,7 +116,7 @@ class ColumnDefault(AlterColumn):
         name: str,
         column_name: str,
         default: Optional[_ServerDefault],
-        **kw
+        **kw,
     ) -> None:
         super(ColumnDefault, self).__init__(name, column_name, **kw)
         self.default = default
@@ -135,7 +137,7 @@ class IdentityColumnDefault(AlterColumn)
         column_name: str,
         default: Optional["Identity"],
         impl: "DefaultImpl",
-        **kw
+        **kw,
     ) -> None:
         super(IdentityColumnDefault, self).__init__(name, column_name, **kw)
         self.default = default
diff -pruN 1.7.6-1/alembic/ddl/impl.py 1.8.1-2/alembic/ddl/impl.py
--- 1.7.6-1/alembic/ddl/impl.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/impl.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from collections import namedtuple
 import re
 from typing import Any
@@ -215,7 +217,7 @@ class DefaultImpl(metaclass=ImplMeta):
         existing_server_default: Optional["_ServerDefault"] = None,
         existing_nullable: Optional[bool] = None,
         existing_autoincrement: Optional[bool] = None,
-        **kw: Any
+        **kw: Any,
     ) -> None:
         if autoincrement is not None or existing_autoincrement is not None:
             util.warn(
@@ -266,7 +268,7 @@ class DefaultImpl(metaclass=ImplMeta):
                     existing_server_default=existing_server_default,
                     existing_nullable=existing_nullable,
                     existing_comment=existing_comment,
-                    **kw
+                    **kw,
                 )
             )
         if type_ is not None:
@@ -324,7 +326,7 @@ class DefaultImpl(metaclass=ImplMeta):
         table_name: str,
         column: "Column",
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         self._exec(base.DropColumn(table_name, column, schema=schema))
 
@@ -369,7 +371,13 @@ class DefaultImpl(metaclass=ImplMeta):
                 self.create_column_comment(column)
 
     def drop_table(self, table: "Table") -> None:
+        table.dispatch.before_drop(
+            table, self.connection, checkfirst=False, _ddl_runner=self
+        )
         self._exec(schema.DropTable(table))
+        table.dispatch.after_drop(
+            table, self.connection, checkfirst=False, _ddl_runner=self
+        )
 
     def create_index(self, index: "Index") -> None:
         self._exec(schema.CreateIndex(index))
diff -pruN 1.7.6-1/alembic/ddl/mssql.py 1.8.1-2/alembic/ddl/mssql.py
--- 1.7.6-1/alembic/ddl/mssql.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/mssql.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from typing import Any
 from typing import List
 from typing import Optional
@@ -96,27 +98,36 @@ class MSSQLImpl(DefaultImpl):
         existing_type: Optional["TypeEngine"] = None,
         existing_server_default: Optional["_ServerDefault"] = None,
         existing_nullable: Optional[bool] = None,
-        **kw: Any
+        **kw: Any,
     ) -> None:
 
         if nullable is not None:
-            if existing_type is None:
-                if type_ is not None:
-                    existing_type = type_
-                    # the NULL/NOT NULL alter will handle
-                    # the type alteration
-                    type_ = None
-                else:
-                    raise util.CommandError(
-                        "MS-SQL ALTER COLUMN operations "
-                        "with NULL or NOT NULL require the "
-                        "existing_type or a new type_ be passed."
-                    )
-            elif type_ is not None:
+            if type_ is not None:
                 # the NULL/NOT NULL alter will handle
                 # the type alteration
                 existing_type = type_
                 type_ = None
+            elif existing_type is None:
+                raise util.CommandError(
+                    "MS-SQL ALTER COLUMN operations "
+                    "with NULL or NOT NULL require the "
+                    "existing_type or a new type_ be passed."
+                )
+        elif existing_nullable is not None and type_ is not None:
+            nullable = existing_nullable
+
+            # the NULL/NOT NULL alter will handle
+            # the type alteration
+            existing_type = type_
+            type_ = None
+
+        elif type_ is not None:
+            util.warn(
+                "MS-SQL ALTER COLUMN operations that specify type_= "
+                "should also specify a nullable= or "
+                "existing_nullable= argument to avoid implicit conversion "
+                "of NOT NULL columns to NULL."
+            )
 
         used_default = False
         if sqla_compat._server_default_is_identity(
@@ -136,7 +147,7 @@ class MSSQLImpl(DefaultImpl):
             schema=schema,
             existing_type=existing_type,
             existing_nullable=existing_nullable,
-            **kw
+            **kw,
         )
 
         if server_default is not False and used_default is False:
@@ -194,7 +205,7 @@ class MSSQLImpl(DefaultImpl):
         table_name: str,
         column: "Column",
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         drop_default = kw.pop("mssql_drop_default", False)
         if drop_default:
diff -pruN 1.7.6-1/alembic/ddl/mysql.py 1.8.1-2/alembic/ddl/mysql.py
--- 1.7.6-1/alembic/ddl/mysql.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/mysql.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import re
 from typing import Any
 from typing import Optional
@@ -60,7 +62,7 @@ class MySQLImpl(DefaultImpl):
         existing_autoincrement: Optional[bool] = None,
         comment: Optional[Union[str, "Literal[False]"]] = False,
         existing_comment: Optional[str] = None,
-        **kw: Any
+        **kw: Any,
     ) -> None:
         if sqla_compat._server_default_is_identity(
             server_default, existing_server_default
@@ -79,7 +81,7 @@ class MySQLImpl(DefaultImpl):
                 existing_nullable=existing_nullable,
                 server_default=server_default,
                 existing_server_default=existing_server_default,
-                **kw
+                **kw,
             )
         if name is not None or self._is_mysql_allowed_functional_default(
             type_ if type_ is not None else existing_type, server_default
diff -pruN 1.7.6-1/alembic/ddl/oracle.py 1.8.1-2/alembic/ddl/oracle.py
--- 1.7.6-1/alembic/ddl/oracle.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/oracle.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from typing import Any
 from typing import Optional
 from typing import TYPE_CHECKING
@@ -36,6 +38,7 @@ class OracleImpl(DefaultImpl):
     type_synonyms = DefaultImpl.type_synonyms + (
         {"VARCHAR", "VARCHAR2"},
         {"BIGINT", "INTEGER", "SMALLINT", "DECIMAL", "NUMERIC", "NUMBER"},
+        {"DOUBLE", "FLOAT", "DOUBLE_PRECISION"},
     )
     identity_attrs_ignore = ()
 
diff -pruN 1.7.6-1/alembic/ddl/postgresql.py 1.8.1-2/alembic/ddl/postgresql.py
--- 1.7.6-1/alembic/ddl/postgresql.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/postgresql.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import logging
 import re
 from typing import Any
@@ -143,7 +145,7 @@ class PostgresqlImpl(DefaultImpl):
         existing_server_default: Optional["_ServerDefault"] = None,
         existing_nullable: Optional[bool] = None,
         existing_autoincrement: Optional[bool] = None,
-        **kw: Any
+        **kw: Any,
     ) -> None:
 
         using = kw.pop("postgresql_using", None)
@@ -179,7 +181,7 @@ class PostgresqlImpl(DefaultImpl):
             existing_server_default=existing_server_default,
             existing_nullable=existing_nullable,
             existing_autoincrement=existing_autoincrement,
-            **kw
+            **kw,
         )
 
     def autogen_column_reflect(self, inspector, table, column_info):
@@ -417,7 +419,7 @@ class CreateExcludeConstraintOp(ops.AddC
         where: Optional[Union["BinaryExpression", str]] = None,
         schema: Optional[str] = None,
         _orig_constraint: Optional["ExcludeConstraint"] = None,
-        **kw
+        **kw,
     ) -> None:
         self.constraint_name = constraint_name
         self.table_name = table_name
@@ -459,7 +461,7 @@ class CreateExcludeConstraintOp(ops.AddC
             *self.elements,
             name=self.constraint_name,
             where=self.where,
-            **self.kw
+            **self.kw,
         )
         for (
             expr,
@@ -477,7 +479,7 @@ class CreateExcludeConstraintOp(ops.AddC
         constraint_name: str,
         table_name: str,
         *elements: Any,
-        **kw: Any
+        **kw: Any,
     ) -> Optional["Table"]:
         """Issue an alter to create an EXCLUDE constraint using the
         current migration context.
diff -pruN 1.7.6-1/alembic/ddl/sqlite.py 1.8.1-2/alembic/ddl/sqlite.py
--- 1.7.6-1/alembic/ddl/sqlite.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/ddl/sqlite.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import re
 from typing import Any
 from typing import Dict
@@ -9,12 +11,17 @@ from sqlalchemy import cast
 from sqlalchemy import JSON
 from sqlalchemy import schema
 from sqlalchemy import sql
+from sqlalchemy.ext.compiler import compiles
 
+from .base import alter_table
+from .base import format_table_name
+from .base import RenameTable
 from .impl import DefaultImpl
 from .. import util
 
 if TYPE_CHECKING:
     from sqlalchemy.engine.reflection import Inspector
+    from sqlalchemy.sql.compiler import DDLCompiler
     from sqlalchemy.sql.elements import Cast
     from sqlalchemy.sql.elements import ClauseElement
     from sqlalchemy.sql.schema import Column
@@ -176,6 +183,16 @@ class SQLiteImpl(DefaultImpl):
             )
 
 
+@compiles(RenameTable, "sqlite")
+def visit_rename_table(
+    element: "RenameTable", compiler: "DDLCompiler", **kw
+) -> str:
+    return "%s RENAME TO %s" % (
+        alter_table(compiler, element.table_name, element.schema),
+        format_table_name(compiler, element.new_table_name, None),
+    )
+
+
 # @compiles(AddColumn, 'sqlite')
 # def visit_add_column(element, compiler, **kw):
 #    return "%s %s" % (
diff -pruN 1.7.6-1/alembic/__init__.py 1.8.1-2/alembic/__init__.py
--- 1.7.6-1/alembic/__init__.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/__init__.py	2022-07-13 14:17:20.000000000 +0000
@@ -3,4 +3,4 @@ import sys
 from . import context
 from . import op
 
-__version__ = "1.7.6"
+__version__ = "1.8.1"
diff -pruN 1.7.6-1/alembic/operations/base.py 1.8.1-2/alembic/operations/base.py
--- 1.7.6-1/alembic/operations/base.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/operations/base.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,12 +1,17 @@
+from __future__ import annotations
+
 from contextlib import contextmanager
 import re
 import textwrap
 from typing import Any
 from typing import Callable
+from typing import Dict
 from typing import Iterator
 from typing import List  # noqa
+from typing import Mapping
 from typing import Optional
 from typing import Sequence  # noqa
+from typing import Tuple
 from typing import Type  # noqa
 from typing import TYPE_CHECKING
 from typing import Union
@@ -25,6 +30,8 @@ from ..util.compat import inspect_getful
 NoneType = type(None)
 
 if TYPE_CHECKING:
+    from typing import Literal
+
     from sqlalchemy import Table  # noqa
     from sqlalchemy.engine import Connection
 
@@ -209,17 +216,17 @@ class Operations(util.ModuleClsProxy):
     @contextmanager
     def batch_alter_table(
         self,
-        table_name,
-        schema=None,
-        recreate="auto",
-        partial_reordering=None,
-        copy_from=None,
-        table_args=(),
-        table_kwargs=util.immutabledict(),
-        reflect_args=(),
-        reflect_kwargs=util.immutabledict(),
-        naming_convention=None,
-    ):
+        table_name: str,
+        schema: Optional[str] = None,
+        recreate: Literal["auto", "always", "never"] = "auto",
+        partial_reordering: Optional[tuple] = None,
+        copy_from: Optional["Table"] = None,
+        table_args: Tuple[Any, ...] = (),
+        table_kwargs: Mapping[str, Any] = util.immutabledict(),
+        reflect_args: Tuple[Any, ...] = (),
+        reflect_kwargs: Mapping[str, Any] = util.immutabledict(),
+        naming_convention: Optional[Dict[str, str]] = None,
+    ) -> Iterator["BatchOperations"]:
         """Invoke a series of per-table migrations in batch.
 
         Batch mode allows a series of operations specific to a table
diff -pruN 1.7.6-1/alembic/operations/batch.py 1.8.1-2/alembic/operations/batch.py
--- 1.7.6-1/alembic/operations/batch.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/operations/batch.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,5 +1,6 @@
+from __future__ import annotations
+
 from typing import Any
-from typing import cast
 from typing import Dict
 from typing import List
 from typing import Optional
@@ -23,8 +24,10 @@ from sqlalchemy.util import topological
 from ..util import exc
 from ..util.sqla_compat import _columns_for_constraint
 from ..util.sqla_compat import _copy
+from ..util.sqla_compat import _copy_expression
 from ..util.sqla_compat import _ensure_scope_for_ddl
 from ..util.sqla_compat import _fk_is_self_referential
+from ..util.sqla_compat import _idx_table_bound_expressions
 from ..util.sqla_compat import _insert_inline
 from ..util.sqla_compat import _is_type_bound
 from ..util.sqla_compat import _remove_column_from_collection
@@ -116,13 +119,28 @@ class BatchOperationsImpl:
                     existing_table = self.copy_from
                     reflected = False
                 else:
+                    if self.operations.migration_context.as_sql:
+                        raise exc.CommandError(
+                            f"This operation cannot proceed in --sql mode; "
+                            f"batch mode with dialect "
+                            f"{self.operations.migration_context.dialect.name} "  # noqa: E501
+                            f"requires a live database connection with which "
+                            f'to reflect the table "{self.table_name}". '
+                            f"To generate a batch SQL migration script using "
+                            "table "
+                            '"move and copy", a complete Table object '
+                            f'should be passed to the "copy_from" argument '
+                            "of the batch_alter_table() method so that table "
+                            "reflection can be skipped."
+                        )
+
                     existing_table = Table(
                         self.table_name,
                         m1,
                         schema=self.schema,
                         autoload_with=self.operations.get_bind(),
                         *self.reflect_args,
-                        **self.reflect_kwargs
+                        **self.reflect_kwargs,
                     )
                     reflected = True
 
@@ -311,7 +329,7 @@ class ApplyBatchImpl:
             m,
             *(list(self.columns.values()) + list(self.table_args)),
             schema=schema,
-            **self.table_kwargs
+            **self.table_kwargs,
         )
 
         for const in (
@@ -353,14 +371,32 @@ class ApplyBatchImpl:
     def _gather_indexes_from_both_tables(self) -> List["Index"]:
         assert self.new_table is not None
         idx: List[Index] = []
-        idx.extend(self.indexes.values())
+
+        for idx_existing in self.indexes.values():
+            # this is a lift-and-move from Table.to_metadata
+
+            if idx_existing._column_flag:  # type: ignore
+                continue
+
+            idx_copy = Index(
+                idx_existing.name,
+                unique=idx_existing.unique,
+                *[
+                    _copy_expression(expr, self.new_table)
+                    for expr in _idx_table_bound_expressions(idx_existing)
+                ],
+                _table=self.new_table,
+                **idx_existing.kwargs,
+            )
+            idx.append(idx_copy)
+
         for index in self.new_indexes.values():
             idx.append(
                 Index(
                     index.name,
                     unique=index.unique,
                     *[self.new_table.c[col] for col in index.columns.keys()],
-                    **index.kwargs
+                    **index.kwargs,
                 )
             )
         return idx
@@ -401,7 +437,7 @@ class ApplyBatchImpl:
                             for elem in constraint.elements
                         ]
                     ],
-                    schema=referent_schema
+                    schema=referent_schema,
                 )
 
     def _create(self, op_impl: "DefaultImpl") -> None:
@@ -453,7 +489,7 @@ class ApplyBatchImpl:
         type_: Optional["TypeEngine"] = None,
         autoincrement: None = None,
         comment: Union[str, "Literal[False]"] = False,
-        **kw
+        **kw,
     ) -> None:
         existing = self.columns[column_name]
         existing_transfer: Dict[str, Any] = self.column_transfers[column_name]
@@ -475,8 +511,8 @@ class ApplyBatchImpl:
                     and resolved_existing_type.name  # type:ignore[attr-defined]  # noqa E501
                 ):
                     self.named_constraints.pop(
-                        resolved_existing_type.name,
-                        None,  # type:ignore[attr-defined]
+                        resolved_existing_type.name,  # type:ignore[attr-defined]  # noqa E501
+                        None,
                     )
 
         if type_ is not None:
@@ -574,7 +610,7 @@ class ApplyBatchImpl:
         column: "Column",
         insert_before: Optional[str] = None,
         insert_after: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         self._setup_dependencies_for_add_column(
             column.name, insert_before, insert_after
@@ -647,7 +683,8 @@ class ApplyBatchImpl:
                     if col_const.name == const.name:
                         self.columns[col.name].constraints.remove(col_const)
             else:
-                const = self.named_constraints.pop(cast(str, const.name))
+                assert const.name
+                const = self.named_constraints.pop(const.name)
         except KeyError:
             if _is_type_bound(const):
                 # type-bound constraints are only included in the new
diff -pruN 1.7.6-1/alembic/operations/ops.py 1.8.1-2/alembic/operations/ops.py
--- 1.7.6-1/alembic/operations/ops.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/operations/ops.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from abc import abstractmethod
 import re
 from typing import Any
@@ -258,7 +260,7 @@ class CreatePrimaryKeyOp(AddConstraintOp
         table_name: str,
         columns: Sequence[str],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         self.constraint_name = constraint_name
         self.table_name = table_name
@@ -383,7 +385,7 @@ class CreateUniqueConstraintOp(AddConstr
         table_name: str,
         columns: Sequence[str],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         self.constraint_name = constraint_name
         self.table_name = table_name
@@ -434,7 +436,7 @@ class CreateUniqueConstraintOp(AddConstr
         table_name: str,
         columns: Sequence[str],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Any:
         """Issue a "create unique constraint" instruction using the
         current migration context.
@@ -483,7 +485,7 @@ class CreateUniqueConstraintOp(AddConstr
         operations: "BatchOperations",
         constraint_name: str,
         columns: Sequence[str],
-        **kw
+        **kw,
     ) -> Any:
         """Issue a "create unique constraint" instruction using the
         current batch migration context.
@@ -518,7 +520,7 @@ class CreateForeignKeyOp(AddConstraintOp
         referent_table: str,
         local_cols: List[str],
         remote_cols: List[str],
-        **kw
+        **kw,
     ) -> None:
         self.constraint_name = constraint_name
         self.source_table = source_table
@@ -600,7 +602,7 @@ class CreateForeignKeyOp(AddConstraintOp
         match: Optional[str] = None,
         source_schema: Optional[str] = None,
         referent_schema: Optional[str] = None,
-        **dialect_kw
+        **dialect_kw,
     ) -> Optional["Table"]:
         """Issue a "create foreign key" instruction using the
         current migration context.
@@ -678,7 +680,7 @@ class CreateForeignKeyOp(AddConstraintOp
         deferrable: None = None,
         initially: None = None,
         match: None = None,
-        **dialect_kw
+        **dialect_kw,
     ) -> None:
         """Issue a "create foreign key" instruction using the
         current batch migration context.
@@ -734,7 +736,7 @@ class CreateCheckConstraintOp(AddConstra
         table_name: str,
         condition: Union[str, "TextClause", "ColumnElement[Any]"],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         self.constraint_name = constraint_name
         self.table_name = table_name
@@ -753,9 +755,7 @@ class CreateCheckConstraintOp(AddConstra
         return cls(
             ck_constraint.name,
             constraint_table.name,
-            cast(
-                "Union[TextClause, ColumnElement[Any]]", ck_constraint.sqltext
-            ),
+            cast("ColumnElement[Any]", ck_constraint.sqltext),
             schema=constraint_table.schema,
             **ck_constraint.dialect_kwargs,
         )
@@ -780,7 +780,7 @@ class CreateCheckConstraintOp(AddConstra
         table_name: str,
         condition: Union[str, "BinaryExpression"],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         """Issue a "create check constraint" instruction using the
         current migration context.
@@ -831,7 +831,7 @@ class CreateCheckConstraintOp(AddConstra
         operations: "BatchOperations",
         constraint_name: str,
         condition: "TextClause",
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         """Issue a "create check constraint" instruction using the
         current batch migration context.
@@ -866,7 +866,7 @@ class CreateIndexOp(MigrateOperation):
         columns: Sequence[Union[str, "TextClause", "ColumnElement[Any]"]],
         schema: Optional[str] = None,
         unique: bool = False,
-        **kw
+        **kw,
     ) -> None:
         self.index_name = index_name
         self.table_name = table_name
@@ -917,7 +917,7 @@ class CreateIndexOp(MigrateOperation):
         columns: Sequence[Union[str, "TextClause", "Function"]],
         schema: Optional[str] = None,
         unique: bool = False,
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         r"""Issue a "create index" instruction using the current
         migration context.
@@ -971,7 +971,7 @@ class CreateIndexOp(MigrateOperation):
         operations: "BatchOperations",
         index_name: str,
         columns: List[str],
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         """Issue a "create index" instruction using the
         current batch migration context.
@@ -1003,7 +1003,7 @@ class DropIndexOp(MigrateOperation):
         table_name: Optional[str] = None,
         schema: Optional[str] = None,
         _reverse: Optional["CreateIndexOp"] = None,
-        **kw
+        **kw,
     ) -> None:
         self.index_name = index_name
         self.table_name = table_name
@@ -1050,7 +1050,7 @@ class DropIndexOp(MigrateOperation):
         index_name: str,
         table_name: Optional[str] = None,
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         r"""Issue a "drop index" instruction using the current
         migration context.
@@ -1109,7 +1109,7 @@ class CreateTableOp(MigrateOperation):
         schema: Optional[str] = None,
         _namespace_metadata: Optional["MetaData"] = None,
         _constraints_included: bool = False,
-        **kw
+        **kw,
     ) -> None:
         self.table_name = table_name
         self.columns = columns
@@ -1326,7 +1326,7 @@ class DropTableOp(MigrateOperation):
         operations: "Operations",
         table_name: str,
         schema: Optional[str] = None,
-        **kw: Any
+        **kw: Any,
     ) -> None:
         r"""Issue a "drop table" instruction using the current
         migration context.
@@ -1607,7 +1607,7 @@ class AlterColumnOp(AlterTableOp):
         modify_server_default: Any = False,
         modify_name: Optional[str] = None,
         modify_type: Optional[Any] = None,
-        **kw
+        **kw,
     ) -> None:
         super(AlterColumnOp, self).__init__(table_name, schema=schema)
         self.column_name = column_name
@@ -1770,7 +1770,7 @@ class AlterColumnOp(AlterTableOp):
         existing_nullable: Optional[bool] = None,
         existing_comment: Optional[str] = None,
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         r"""Issue an "alter column" instruction using the
         current migration context.
@@ -1897,7 +1897,7 @@ class AlterColumnOp(AlterTableOp):
         existing_comment: None = None,
         insert_before: None = None,
         insert_after: None = None,
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         """Issue an "alter column" instruction using the current
         batch migration context.
@@ -1954,7 +1954,7 @@ class AddColumnOp(AlterTableOp):
         table_name: str,
         column: "Column",
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> None:
         super(AddColumnOp, self).__init__(table_name, schema=schema)
         self.column = column
@@ -2089,7 +2089,7 @@ class DropColumnOp(AlterTableOp):
         column_name: str,
         schema: Optional[str] = None,
         _reverse: Optional["AddColumnOp"] = None,
-        **kw
+        **kw,
     ) -> None:
         super(DropColumnOp, self).__init__(table_name, schema=schema)
         self.column_name = column_name
@@ -2146,7 +2146,7 @@ class DropColumnOp(AlterTableOp):
         table_name: str,
         column_name: str,
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Optional["Table"]:
         """Issue a "drop column" instruction using the current
         migration context.
diff -pruN 1.7.6-1/alembic/operations/schemaobj.py 1.8.1-2/alembic/operations/schemaobj.py
--- 1.7.6-1/alembic/operations/schemaobj.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/operations/schemaobj.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from typing import Any
 from typing import Dict
 from typing import List
@@ -44,7 +46,7 @@ class SchemaObjects:
         table_name: str,
         cols: Sequence[str],
         schema: Optional[str] = None,
-        **dialect_kw
+        **dialect_kw,
     ) -> "PrimaryKeyConstraint":
         m = self.metadata()
         columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
@@ -68,7 +70,7 @@ class SchemaObjects:
         referent_schema: Optional[str] = None,
         initially: Optional[str] = None,
         match: Optional[str] = None,
-        **dialect_kw
+        **dialect_kw,
     ) -> "ForeignKeyConstraint":
         m = self.metadata()
         if source == referent and source_schema == referent_schema:
@@ -79,14 +81,14 @@ class SchemaObjects:
                 referent,
                 m,
                 *[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
-                schema=referent_schema
+                schema=referent_schema,
             )
 
         t1 = sa_schema.Table(
             source,
             m,
             *[sa_schema.Column(n, NULLTYPE) for n in t1_cols],
-            schema=source_schema
+            schema=source_schema,
         )
 
         tname = (
@@ -105,7 +107,7 @@ class SchemaObjects:
             ondelete=ondelete,
             deferrable=deferrable,
             initially=initially,
-            **dialect_kw
+            **dialect_kw,
         )
         t1.append_constraint(f)
 
@@ -117,13 +119,13 @@ class SchemaObjects:
         source: str,
         local_cols: Sequence[str],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> "UniqueConstraint":
         t = sa_schema.Table(
             source,
             self.metadata(),
             *[sa_schema.Column(n, NULLTYPE) for n in local_cols],
-            schema=schema
+            schema=schema,
         )
         kw["name"] = name
         uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
@@ -138,7 +140,7 @@ class SchemaObjects:
         source: str,
         condition: Union[str, "TextClause", "ColumnElement[Any]"],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Union["CheckConstraint"]:
         t = sa_schema.Table(
             source,
@@ -156,7 +158,7 @@ class SchemaObjects:
         table_name: str,
         type_: Optional[str],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> Any:
         t = self.table(table_name, schema=schema)
         types: Dict[Optional[str], Any] = {
@@ -214,7 +216,8 @@ class SchemaObjects:
 
         constraints = [
             sqla_compat._copy(elem, target_table=t)
-            if getattr(elem, "parent", None) is not None
+            if getattr(elem, "parent", None) is not t
+            and getattr(elem, "parent", None) is not None
             else elem
             for elem in columns
             if isinstance(elem, (Constraint, Index))
@@ -236,7 +239,7 @@ class SchemaObjects:
         tablename: Optional[str],
         columns: Sequence[Union[str, "TextClause", "ColumnElement[Any]"]],
         schema: Optional[str] = None,
-        **kw
+        **kw,
     ) -> "Index":
         t = sa_schema.Table(
             tablename or "no_table",
@@ -247,7 +250,7 @@ class SchemaObjects:
         idx = sa_schema.Index(
             name,
             *[util.sqla_compat._textual_index_column(t, n) for n in columns],
-            **kw
+            **kw,
         )
         return idx
 
diff -pruN 1.7.6-1/alembic/op.pyi 1.8.1-2/alembic/op.pyi
--- 1.7.6-1/alembic/op.pyi	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/op.pyi	2022-07-13 14:17:20.000000000 +0000
@@ -1,11 +1,16 @@
 # ### this file stubs are generated by tools/write_pyi.py - do not edit ###
 # ### imports are manually managed
-
+from contextlib import contextmanager
 from typing import Any
 from typing import Callable
+from typing import Dict
+from typing import Iterator
 from typing import List
+from typing import Literal
+from typing import Mapping
 from typing import Optional
 from typing import Sequence
+from typing import Tuple
 from typing import Type
 from typing import TYPE_CHECKING
 from typing import Union
@@ -27,14 +32,15 @@ if TYPE_CHECKING:
     from sqlalchemy.sql.type_api import TypeEngine
     from sqlalchemy.util import immutabledict
 
+    from .operations.ops import BatchOperations
     from .operations.ops import MigrateOperation
     from .util.sqla_compat import _literal_bindparam
 
 ### end imports ###
 
 def add_column(
-    table_name: str, column: "Column", schema: Optional[str] = None
-) -> Optional["Table"]:
+    table_name: str, column: Column, schema: Optional[str] = None
+) -> Optional[Table]:
     """Issue an "add column" instruction using the current
     migration context.
 
@@ -91,16 +97,16 @@ def alter_column(
     comment: Union[str, bool, None] = False,
     server_default: Any = False,
     new_column_name: Optional[str] = None,
-    type_: Union["TypeEngine", Type["TypeEngine"], None] = None,
-    existing_type: Union["TypeEngine", Type["TypeEngine"], None] = None,
+    type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+    existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
     existing_server_default: Union[
-        str, bool, "Identity", "Computed", None
+        str, bool, Identity, Computed, None
     ] = False,
     existing_nullable: Optional[bool] = None,
     existing_comment: Optional[str] = None,
     schema: Optional[str] = None,
     **kw
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue an "alter column" instruction using the
     current migration context.
 
@@ -190,18 +196,19 @@ def alter_column(
 
     """
 
+@contextmanager
 def batch_alter_table(
-    table_name,
-    schema=None,
-    recreate="auto",
-    partial_reordering=None,
-    copy_from=None,
-    table_args=(),
-    table_kwargs=immutabledict({}),
-    reflect_args=(),
-    reflect_kwargs=immutabledict({}),
-    naming_convention=None,
-):
+    table_name: str,
+    schema: Optional[str] = None,
+    recreate: Literal["auto", "always", "never"] = "auto",
+    partial_reordering: Optional[tuple] = None,
+    copy_from: Optional["Table"] = None,
+    table_args: Tuple[Any, ...] = (),
+    table_kwargs: Mapping[str, Any] = immutabledict({}),
+    reflect_args: Tuple[Any, ...] = (),
+    reflect_kwargs: Mapping[str, Any] = immutabledict({}),
+    naming_convention: Optional[Dict[str, str]] = None,
+) -> Iterator["BatchOperations"]:
     """Invoke a series of per-table migrations in batch.
 
     Batch mode allows a series of operations specific to a table
@@ -340,7 +347,7 @@ def batch_alter_table(
     """
 
 def bulk_insert(
-    table: Union["Table", "TableClause"],
+    table: Union[Table, TableClause],
     rows: List[dict],
     multiinsert: bool = True,
 ) -> None:
@@ -422,10 +429,10 @@ def bulk_insert(
 def create_check_constraint(
     constraint_name: Optional[str],
     table_name: str,
-    condition: Union[str, "BinaryExpression"],
+    condition: Union[str, BinaryExpression],
     schema: Optional[str] = None,
     **kw
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "create check constraint" instruction using the
     current migration context.
 
@@ -469,7 +476,7 @@ def create_check_constraint(
 
 def create_exclude_constraint(
     constraint_name: str, table_name: str, *elements: Any, **kw: Any
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue an alter to create an EXCLUDE constraint using the
     current migration context.
 
@@ -521,7 +528,7 @@ def create_foreign_key(
     source_schema: Optional[str] = None,
     referent_schema: Optional[str] = None,
     **dialect_kw
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "create foreign key" instruction using the
     current migration context.
 
@@ -570,11 +577,11 @@ def create_foreign_key(
 def create_index(
     index_name: str,
     table_name: str,
-    columns: Sequence[Union[str, "TextClause", "Function"]],
+    columns: Sequence[Union[str, TextClause, Function]],
     schema: Optional[str] = None,
     unique: bool = False,
     **kw
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "create index" instruction using the current
     migration context.
 
@@ -622,7 +629,7 @@ def create_primary_key(
     table_name: str,
     columns: List[str],
     schema: Optional[str] = None,
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "create primary key" instruction using the current
     migration context.
 
@@ -660,7 +667,7 @@ def create_primary_key(
 
     """
 
-def create_table(table_name: str, *columns, **kw) -> Optional["Table"]:
+def create_table(table_name: str, *columns, **kw) -> Optional[Table]:
     """Issue a "create table" instruction using the current migration
     context.
 
@@ -743,7 +750,7 @@ def create_table_comment(
     comment: Optional[str],
     existing_comment: None = None,
     schema: Optional[str] = None,
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Emit a COMMENT ON operation to set the comment for a table.
 
     .. versionadded:: 1.0.6
@@ -811,7 +818,7 @@ def create_unique_constraint(
 
 def drop_column(
     table_name: str, column_name: str, schema: Optional[str] = None, **kw
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "drop column" instruction using the current
     migration context.
 
@@ -854,7 +861,7 @@ def drop_constraint(
     table_name: str,
     type_: Optional[str] = None,
     schema: Optional[str] = None,
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Drop a constraint of the given name, typically via DROP CONSTRAINT.
 
     :param constraint_name: name of the constraint.
@@ -873,7 +880,7 @@ def drop_index(
     table_name: Optional[str] = None,
     schema: Optional[str] = None,
     **kw
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "drop index" instruction using the current
     migration context.
 
@@ -921,7 +928,7 @@ def drop_table_comment(
     table_name: str,
     existing_comment: Optional[str] = None,
     schema: Optional[str] = None,
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Issue a "drop table comment" operation to
     remove an existing comment set on a table.
 
@@ -940,8 +947,8 @@ def drop_table_comment(
     """
 
 def execute(
-    sqltext: Union[str, "TextClause", "Update"], execution_options: None = None
-) -> Optional["Table"]:
+    sqltext: Union[str, TextClause, Update], execution_options: None = None
+) -> Optional[Table]:
     """Execute the given SQL using the current migration context.
 
     The given SQL can be a plain string, e.g.::
@@ -1024,7 +1031,7 @@ def execute(
      :meth:`sqlalchemy.engine.Connection.execution_options`.
     """
 
-def f(name: str) -> "conv":
+def f(name: str) -> conv:
     """Indicate a string name that has already had a naming convention
     applied to it.
 
@@ -1061,7 +1068,7 @@ def f(name: str) -> "conv":
 
     """
 
-def get_bind() -> "Connection":
+def get_bind() -> Connection:
     """Return the current 'bind'.
 
     Under normal circumstances, this is the
@@ -1134,7 +1141,7 @@ def inline_literal(
 
     """
 
-def invoke(operation: "MigrateOperation") -> Any:
+def invoke(operation: MigrateOperation) -> Any:
     """Given a :class:`.MigrateOperation`, invoke it in terms of
     this :class:`.Operations` instance.
 
@@ -1161,7 +1168,7 @@ def register_operation(
 
 def rename_table(
     old_table_name: str, new_table_name: str, schema: Optional[str] = None
-) -> Optional["Table"]:
+) -> Optional[Table]:
     """Emit an ALTER TABLE to rename a table.
 
     :param old_table_name: old name.
diff -pruN 1.7.6-1/alembic/runtime/environment.py 1.8.1-2/alembic/runtime/environment.py
--- 1.7.6-1/alembic/runtime/environment.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/runtime/environment.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from typing import Callable
 from typing import ContextManager
 from typing import Dict
@@ -345,7 +347,7 @@ class EnvironmentContext(util.ModuleClsP
         sqlalchemy_module_prefix: str = "sa.",
         user_module_prefix: Optional[str] = None,
         on_version_apply: Optional[Callable] = None,
-        **kw
+        **kw,
     ) -> None:
         """Configure a :class:`.MigrationContext` within this
         :class:`.EnvironmentContext` which will provide database
diff -pruN 1.7.6-1/alembic/runtime/migration.py 1.8.1-2/alembic/runtime/migration.py
--- 1.7.6-1/alembic/runtime/migration.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/runtime/migration.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from contextlib import contextmanager
 import logging
 import sys
@@ -39,6 +41,7 @@ if TYPE_CHECKING:
     from ..config import Config
     from ..script.base import Script
     from ..script.base import ScriptDirectory
+    from ..script.revision import _RevisionOrBase
     from ..script.revision import Revision
     from ..script.revision import RevisionMap
 
@@ -516,7 +519,7 @@ class MigrationContext:
             elif start_from_rev is not None and self.script:
 
                 start_from_rev = [
-                    self.script.get_revision(sfr).revision
+                    cast("Script", self.script.get_revision(sfr)).revision
                     for sfr in util.to_list(start_from_rev)
                     if sfr not in (None, "base")
                 ]
@@ -860,15 +863,15 @@ class MigrationInfo:
 
     """
 
-    is_upgrade: bool = None  # type:ignore[assignment]
+    is_upgrade: bool
     """True/False: indicates whether this operation ascends or descends the
     version tree."""
 
-    is_stamp: bool = None  # type:ignore[assignment]
+    is_stamp: bool
     """True/False: indicates whether this operation is a stamp (i.e. whether
     it results in any actual database operations)."""
 
-    up_revision_id: Optional[str] = None
+    up_revision_id: Optional[str]
     """Version string corresponding to :attr:`.Revision.revision`.
 
     In the case of a stamp operation, it is advised to use the
@@ -882,10 +885,10 @@ class MigrationInfo:
 
     """
 
-    up_revision_ids: Tuple[str, ...] = None  # type:ignore[assignment]
+    up_revision_ids: Tuple[str, ...]
     """Tuple of version strings corresponding to :attr:`.Revision.revision`.
 
-    In the majority of cases, this tuple will be a single value, synonomous
+    In the majority of cases, this tuple will be a single value, synonymous
     with the scalar value of :attr:`.MigrationInfo.up_revision_id`.
     It can be multiple revision identifiers only in the case of an
     ``alembic stamp`` operation which is moving downwards from multiple
@@ -893,7 +896,7 @@ class MigrationInfo:
 
     """
 
-    down_revision_ids: Tuple[str, ...] = None  # type:ignore[assignment]
+    down_revision_ids: Tuple[str, ...]
     """Tuple of strings representing the base revisions of this migration step.
 
     If empty, this represents a root revision; otherwise, the first item
@@ -901,7 +904,7 @@ class MigrationInfo:
     from dependencies.
     """
 
-    revision_map: "RevisionMap" = None  # type:ignore[assignment]
+    revision_map: "RevisionMap"
     """The revision map inside of which this operation occurs."""
 
     def __init__(
@@ -950,7 +953,7 @@ class MigrationInfo:
         )
 
     @property
-    def up_revision(self) -> "Revision":
+    def up_revision(self) -> Optional[Revision]:
         """Get :attr:`~.MigrationInfo.up_revision_id` as
         a :class:`.Revision`.
 
@@ -958,25 +961,25 @@ class MigrationInfo:
         return self.revision_map.get_revision(self.up_revision_id)
 
     @property
-    def up_revisions(self) -> Tuple["Revision", ...]:
+    def up_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
         """Get :attr:`~.MigrationInfo.up_revision_ids` as a
         :class:`.Revision`."""
         return self.revision_map.get_revisions(self.up_revision_ids)
 
     @property
-    def down_revisions(self) -> Tuple["Revision", ...]:
+    def down_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
         """Get :attr:`~.MigrationInfo.down_revision_ids` as a tuple of
         :class:`Revisions <.Revision>`."""
         return self.revision_map.get_revisions(self.down_revision_ids)
 
     @property
-    def source_revisions(self) -> Tuple["Revision", ...]:
+    def source_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
         """Get :attr:`~MigrationInfo.source_revision_ids` as a tuple of
         :class:`Revisions <.Revision>`."""
         return self.revision_map.get_revisions(self.source_revision_ids)
 
     @property
-    def destination_revisions(self) -> Tuple["Revision", ...]:
+    def destination_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
         """Get :attr:`~MigrationInfo.destination_revision_ids` as a tuple of
         :class:`Revisions <.Revision>`."""
         return self.revision_map.get_revisions(self.destination_revision_ids)
@@ -1059,7 +1062,7 @@ class RevisionStep(MigrationStep):
         )
 
     @property
-    def doc(self):
+    def doc(self) -> str:
         return self.revision.doc
 
     @property
@@ -1264,7 +1267,7 @@ class StampStep(MigrationStep):
         self.migration_fn = self.stamp_revision
         self.revision_map = revision_map
 
-    doc = None
+    doc: None = None
 
     def stamp_revision(self, **kw) -> None:
         return None
diff -pruN 1.7.6-1/alembic/script/base.py 1.8.1-2/alembic/script/base.py
--- 1.7.6-1/alembic/script/base.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/script/base.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from contextlib import contextmanager
 import datetime
 import os
@@ -21,11 +23,13 @@ from . import revision
 from . import write_hooks
 from .. import util
 from ..runtime import migration
+from ..util import not_none
 
 if TYPE_CHECKING:
     from ..config import Config
     from ..runtime.migration import RevisionStep
     from ..runtime.migration import StampStep
+    from ..script.revision import Revision
 
 try:
     from dateutil import tz
@@ -112,7 +116,7 @@ class ScriptDirectory:
         else:
             return (os.path.abspath(os.path.join(self.dir, "versions")),)
 
-    def _load_revisions(self) -> Iterator["Script"]:
+    def _load_revisions(self) -> Iterator[Script]:
         if self.version_locations:
             paths = [
                 vers
@@ -139,7 +143,7 @@ class ScriptDirectory:
                 yield script
 
     @classmethod
-    def from_config(cls, config: "Config") -> "ScriptDirectory":
+    def from_config(cls, config: Config) -> ScriptDirectory:
         """Produce a new :class:`.ScriptDirectory` given a :class:`.Config`
         instance.
 
@@ -152,14 +156,16 @@ class ScriptDirectory:
             raise util.CommandError(
                 "No 'script_location' key " "found in configuration."
             )
-        truncate_slug_length = cast(
-            Optional[int], config.get_main_option("truncate_slug_length")
-        )
-        if truncate_slug_length is not None:
-            truncate_slug_length = int(truncate_slug_length)
-
-        version_locations = config.get_main_option("version_locations")
-        if version_locations:
+        truncate_slug_length: Optional[int]
+        tsl = config.get_main_option("truncate_slug_length")
+        if tsl is not None:
+            truncate_slug_length = int(tsl)
+        else:
+            truncate_slug_length = None
+
+        version_locations_str = config.get_main_option("version_locations")
+        version_locations: Optional[List[str]]
+        if version_locations_str:
             version_path_separator = config.get_main_option(
                 "version_path_separator"
             )
@@ -173,7 +179,9 @@ class ScriptDirectory:
             }
 
             try:
-                split_char = split_on_path[version_path_separator]
+                split_char: Optional[str] = split_on_path[
+                    version_path_separator
+                ]
             except KeyError as ke:
                 raise ValueError(
                     "'%s' is not a valid value for "
@@ -183,17 +191,15 @@ class ScriptDirectory:
             else:
                 if split_char is None:
                     # legacy behaviour for backwards compatibility
-                    vl = _split_on_space_comma.split(
-                        cast(str, version_locations)
+                    version_locations = _split_on_space_comma.split(
+                        version_locations_str
                     )
-                    version_locations: List[str] = vl  # type: ignore[no-redef]
                 else:
-                    vl = [
-                        x
-                        for x in cast(str, version_locations).split(split_char)
-                        if x
+                    version_locations = [
+                        x for x in version_locations_str.split(split_char) if x
                     ]
-                    version_locations: List[str] = vl  # type: ignore[no-redef]
+        else:
+            version_locations = None
 
         prepend_sys_path = config.get_main_option("prepend_sys_path")
         if prepend_sys_path:
@@ -209,7 +215,7 @@ class ScriptDirectory:
             truncate_slug_length=truncate_slug_length,
             sourceless=config.get_main_option("sourceless") == "true",
             output_encoding=config.get_main_option("output_encoding", "utf-8"),
-            version_locations=cast("Optional[List[str]]", version_locations),
+            version_locations=version_locations,
             timezone=config.get_main_option("timezone"),
             hook_config=config.get_section("post_write_hooks", {}),
         )
@@ -262,7 +268,7 @@ class ScriptDirectory:
 
     def walk_revisions(
         self, base: str = "base", head: str = "heads"
-    ) -> Iterator["Script"]:
+    ) -> Iterator[Script]:
         """Iterate through all revisions.
 
         :param base: the base revision, or "base" to start from the
@@ -279,34 +285,24 @@ class ScriptDirectory:
             ):
                 yield cast(Script, rev)
 
-    def get_revisions(self, id_: _RevIdType) -> Tuple["Script", ...]:
+    def get_revisions(self, id_: _RevIdType) -> Tuple[Optional[Script], ...]:
         """Return the :class:`.Script` instance with the given rev identifier,
         symbolic name, or sequence of identifiers.
 
         """
         with self._catch_revision_errors():
             return cast(
-                "Tuple[Script, ...]", self.revision_map.get_revisions(id_)
+                Tuple[Optional[Script], ...],
+                self.revision_map.get_revisions(id_),
             )
 
-    def get_all_current(self, id_: Tuple[str, ...]) -> Set["Script"]:
+    def get_all_current(self, id_: Tuple[str, ...]) -> Set[Optional[Script]]:
         with self._catch_revision_errors():
-            top_revs = cast(
-                "Set[Script]",
-                set(self.revision_map.get_revisions(id_)),
-            )
-            top_revs.update(
-                cast(
-                    "Iterator[Script]",
-                    self.revision_map._get_ancestor_nodes(
-                        list(top_revs), include_dependencies=True
-                    ),
-                )
+            return cast(
+                Set[Optional[Script]], self.revision_map._get_all_current(id_)
             )
-            top_revs = self.revision_map._filter_into_branch_heads(top_revs)
-            return top_revs
 
-    def get_revision(self, id_: str) -> "Script":
+    def get_revision(self, id_: str) -> Optional[Script]:
         """Return the :class:`.Script` instance with the given rev id.
 
         .. seealso::
@@ -316,7 +312,7 @@ class ScriptDirectory:
         """
 
         with self._catch_revision_errors():
-            return cast(Script, self.revision_map.get_revision(id_))
+            return cast(Optional[Script], self.revision_map.get_revision(id_))
 
     def as_revision_number(
         self, id_: Optional[str]
@@ -335,7 +331,12 @@ class ScriptDirectory:
         else:
             return rev[0]
 
-    def iterate_revisions(self, upper, lower):
+    def iterate_revisions(
+        self,
+        upper: Union[str, Tuple[str, ...], None],
+        lower: Union[str, Tuple[str, ...], None],
+        **kw: Any,
+    ) -> Iterator[Script]:
         """Iterate through script revisions, starting at the given
         upper revision identifier and ending at the lower.
 
@@ -351,9 +352,12 @@ class ScriptDirectory:
             :meth:`.RevisionMap.iterate_revisions`
 
         """
-        return self.revision_map.iterate_revisions(upper, lower)
+        return cast(
+            Iterator[Script],
+            self.revision_map.iterate_revisions(upper, lower, **kw),
+        )
 
-    def get_current_head(self):
+    def get_current_head(self) -> Optional[str]:
         """Return the current head revision.
 
         If the script directory has multiple heads
@@ -423,36 +427,36 @@ class ScriptDirectory:
 
     def _upgrade_revs(
         self, destination: str, current_rev: str
-    ) -> List["RevisionStep"]:
+    ) -> List[RevisionStep]:
         with self._catch_revision_errors(
             ancestor="Destination %(end)s is not a valid upgrade "
             "target from current head(s)",
             end=destination,
         ):
-            revs = self.revision_map.iterate_revisions(
+            revs = self.iterate_revisions(
                 destination, current_rev, implicit_base=True
             )
             return [
                 migration.MigrationStep.upgrade_from_script(
-                    self.revision_map, cast(Script, script)
+                    self.revision_map, script
                 )
                 for script in reversed(list(revs))
             ]
 
     def _downgrade_revs(
         self, destination: str, current_rev: Optional[str]
-    ) -> List["RevisionStep"]:
+    ) -> List[RevisionStep]:
         with self._catch_revision_errors(
             ancestor="Destination %(end)s is not a valid downgrade "
             "target from current head(s)",
             end=destination,
         ):
-            revs = self.revision_map.iterate_revisions(
+            revs = self.iterate_revisions(
                 current_rev, destination, select_for_downgrade=True
             )
             return [
                 migration.MigrationStep.downgrade_from_script(
-                    self.revision_map, cast(Script, script)
+                    self.revision_map, script
                 )
                 for script in revs
             ]
@@ -472,12 +476,14 @@ class ScriptDirectory:
             if not revision:
                 revision = "base"
 
-            filtered_heads: List["Script"] = []
+            filtered_heads: List[Script] = []
             for rev in util.to_tuple(revision):
                 if rev:
                     filtered_heads.extend(
                         self.revision_map.filter_for_lineage(
-                            heads_revs, rev, include_dependencies=True
+                            cast(Sequence[Script], heads_revs),
+                            rev,
+                            include_dependencies=True,
                         )
                     )
             filtered_heads = util.unique_list(filtered_heads)
@@ -573,7 +579,7 @@ class ScriptDirectory:
             src,
             dest,
             self.output_encoding,
-            **kw
+            **kw,
         )
 
     def _copy_file(self, src: str, dest: str) -> None:
@@ -621,8 +627,8 @@ class ScriptDirectory:
         branch_labels: Optional[str] = None,
         version_path: Optional[str] = None,
         depends_on: Optional[_RevIdType] = None,
-        **kw: Any
-    ) -> Optional["Script"]:
+        **kw: Any,
+    ) -> Optional[Script]:
         """Generate a new revision file.
 
         This runs the ``script.py.mako`` template, given
@@ -656,7 +662,12 @@ class ScriptDirectory:
                 "or perform a merge."
             )
         ):
-            heads = self.revision_map.get_revisions(head)
+            heads = cast(
+                Tuple[Optional["Revision"], ...],
+                self.revision_map.get_revisions(head),
+            )
+            for h in heads:
+                assert h != "base"
 
         if len(set(heads)) != len(heads):
             raise util.CommandError("Duplicate head revisions specified")
@@ -702,17 +713,20 @@ class ScriptDirectory:
                         % head_.revision
                     )
 
+        resolved_depends_on: Optional[List[str]]
         if depends_on:
             with self._catch_revision_errors():
-                depends_on = [
+                resolved_depends_on = [
                     dep
                     if dep in rev.branch_labels  # maintain branch labels
                     else rev.revision  # resolve partial revision identifiers
                     for rev, dep in [
-                        (self.revision_map.get_revision(dep), dep)
+                        (not_none(self.revision_map.get_revision(dep)), dep)
                         for dep in util.to_list(depends_on)
                     ]
                 ]
+        else:
+            resolved_depends_on = None
 
         self._generate_template(
             os.path.join(self.dir, "script.py.mako"),
@@ -722,13 +736,11 @@ class ScriptDirectory:
                 tuple(h.revision if h is not None else None for h in heads)
             ),
             branch_labels=util.to_tuple(branch_labels),
-            depends_on=revision.tuple_rev_as_scalar(
-                cast("Optional[List[str]]", depends_on)
-            ),
+            depends_on=revision.tuple_rev_as_scalar(resolved_depends_on),
             create_date=create_date,
             comma=util.format_as_comma,
             message=message if message is not None else ("empty message"),
-            **kw
+            **kw,
         )
 
         post_write_hooks = self.hook_config
@@ -759,6 +771,7 @@ class ScriptDirectory:
         message: Optional[str],
         create_date: "datetime.datetime",
     ) -> str:
+        epoch = int(create_date.timestamp())
         slug = "_".join(_slug_re.findall(message or "")).lower()
         if len(slug) > self.truncate_slug_length:
             slug = slug[: self.truncate_slug_length].rsplit("_", 1)[0] + "_"
@@ -767,6 +780,7 @@ class ScriptDirectory:
             % {
                 "rev": rev_id,
                 "slug": slug,
+                "epoch": epoch,
                 "year": create_date.year,
                 "month": create_date.month,
                 "day": create_date.day,
@@ -801,13 +815,13 @@ class Script(revision.Revision):
             ),
         )
 
-    module: ModuleType = None  # type: ignore[assignment]
+    module: ModuleType
     """The Python module representing the actual script itself."""
 
-    path: str = None  # type: ignore[assignment]
+    path: str
     """Filesystem path of the script."""
 
-    _db_current_indicator = None
+    _db_current_indicator: Optional[bool] = None
     """Utility variable which when set will cause string output to indicate
     this is a "current" version in some database"""
 
@@ -939,7 +953,7 @@ class Script(revision.Revision):
     @classmethod
     def _from_path(
         cls, scriptdir: ScriptDirectory, path: str
-    ) -> Optional["Script"]:
+    ) -> Optional[Script]:
         dir_, filename = os.path.split(path)
         return cls._from_filename(scriptdir, dir_, filename)
 
@@ -969,7 +983,7 @@ class Script(revision.Revision):
     @classmethod
     def _from_filename(
         cls, scriptdir: ScriptDirectory, dir_: str, filename: str
-    ) -> Optional["Script"]:
+    ) -> Optional[Script]:
         if scriptdir.sourceless:
             py_match = _sourceless_rev_file.match(filename)
         else:
diff -pruN 1.7.6-1/alembic/script/revision.py 1.8.1-2/alembic/script/revision.py
--- 1.7.6-1/alembic/script/revision.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/script/revision.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import collections
 import re
 from typing import Any
@@ -11,6 +13,7 @@ from typing import Iterable
 from typing import Iterator
 from typing import List
 from typing import Optional
+from typing import overload
 from typing import Sequence
 from typing import Set
 from typing import Tuple
@@ -21,12 +24,11 @@ from typing import Union
 from sqlalchemy import util as sqlautil
 
 from .. import util
+from ..util import not_none
 
 if TYPE_CHECKING:
     from typing import Literal
 
-    from .base import Script
-
 _RevIdType = Union[str, Sequence[str]]
 _RevisionIdentifierType = Union[str, Tuple[str, ...], None]
 _RevisionOrStr = Union["Revision", str]
@@ -439,7 +441,7 @@ class RevisionMap:
                     "Revision %s referenced from %s is not present"
                     % (downrev, revision)
                 )
-            cast("Revision", map_[downrev]).add_nextrev(revision)
+            not_none(map_[downrev]).add_nextrev(revision)
 
         self._normalize_depends_on(revisions, map_)
 
@@ -502,8 +504,8 @@ class RevisionMap:
         return self.filter_for_lineage(self.bases, identifier)
 
     def get_revisions(
-        self, id_: Union[str, Collection[str], None]
-    ) -> Tuple["Revision", ...]:
+        self, id_: Union[str, Collection[Optional[str]], None]
+    ) -> Tuple[Optional[_RevisionOrBase], ...]:
         """Return the :class:`.Revision` instances with the given rev id
         or identifiers.
 
@@ -537,7 +539,8 @@ class RevisionMap:
                             select_heads = tuple(
                                 head
                                 for head in select_heads
-                                if branch_label in head.branch_labels
+                                if branch_label
+                                in is_revision(head).branch_labels
                             )
                         return tuple(
                             self._walk(head, steps=rint)
@@ -551,7 +554,7 @@ class RevisionMap:
                 for rev_id in resolved_id
             )
 
-    def get_revision(self, id_: Optional[str]) -> "Revision":
+    def get_revision(self, id_: Optional[str]) -> Optional[Revision]:
         """Return the :class:`.Revision` instance with the given rev id.
 
         If a symbolic name such as "head" or "base" is given, resolves
@@ -568,12 +571,11 @@ class RevisionMap:
         resolved_id, branch_label = self._resolve_revision_number(id_)
         if len(resolved_id) > 1:
             raise MultipleHeads(resolved_id, id_)
-        elif resolved_id:
-            resolved_id = resolved_id[0]  # type:ignore[assignment]
 
-        return self._revision_for_ident(cast(str, resolved_id), branch_label)
+        resolved: Union[str, Tuple[()]] = resolved_id[0] if resolved_id else ()
+        return self._revision_for_ident(resolved, branch_label)
 
-    def _resolve_branch(self, branch_label: str) -> "Revision":
+    def _resolve_branch(self, branch_label: str) -> Optional[Revision]:
         try:
             branch_rev = self._revision_map[branch_label]
         except KeyError:
@@ -587,25 +589,28 @@ class RevisionMap:
             else:
                 return nonbranch_rev
         else:
-            return cast("Revision", branch_rev)
+            return branch_rev
 
     def _revision_for_ident(
-        self, resolved_id: str, check_branch: Optional[str] = None
-    ) -> "Revision":
-        branch_rev: Optional["Revision"]
+        self,
+        resolved_id: Union[str, Tuple[()]],
+        check_branch: Optional[str] = None,
+    ) -> Optional[Revision]:
+        branch_rev: Optional[Revision]
         if check_branch:
             branch_rev = self._resolve_branch(check_branch)
         else:
             branch_rev = None
 
-        revision: Union["Revision", "Literal[False]"]
+        revision: Union[Optional[Revision], "Literal[False]"]
         try:
-            revision = cast("Revision", self._revision_map[resolved_id])
+            revision = self._revision_map[resolved_id]
         except KeyError:
             # break out to avoid misleading py3k stack traces
             revision = False
         revs: Sequence[str]
         if revision is False:
+            assert resolved_id
             # do a partial lookup
             revs = [
                 x
@@ -637,11 +642,11 @@ class RevisionMap:
                     resolved_id,
                 )
             else:
-                revision = cast("Revision", self._revision_map[revs[0]])
+                revision = self._revision_map[revs[0]]
 
-        revision = cast("Revision", revision)
         if check_branch and revision is not None:
             assert branch_rev is not None
+            assert resolved_id
             if not self._shares_lineage(
                 revision.revision, branch_rev.revision
             ):
@@ -653,11 +658,12 @@ class RevisionMap:
         return revision
 
     def _filter_into_branch_heads(
-        self, targets: Set["Script"]
-    ) -> Set["Script"]:
+        self, targets: Iterable[Optional[_RevisionOrBase]]
+    ) -> Set[Optional[_RevisionOrBase]]:
         targets = set(targets)
 
         for rev in list(targets):
+            assert rev
             if targets.intersection(
                 self._get_descendant_nodes([rev], include_dependencies=False)
             ).difference([rev]):
@@ -695,9 +701,11 @@ class RevisionMap:
         if not test_against_revs:
             return True
         if not isinstance(target, Revision):
-            target = self._revision_for_ident(target)
+            resolved_target = not_none(self._revision_for_ident(target))
+        else:
+            resolved_target = target
 
-        test_against_revs = [
+        resolved_test_against_revs = [
             self._revision_for_ident(test_against_rev)
             if not isinstance(test_against_rev, Revision)
             else test_against_rev
@@ -709,15 +717,17 @@ class RevisionMap:
         return bool(
             set(
                 self._get_descendant_nodes(
-                    [target], include_dependencies=include_dependencies
+                    [resolved_target],
+                    include_dependencies=include_dependencies,
                 )
             )
             .union(
                 self._get_ancestor_nodes(
-                    [target], include_dependencies=include_dependencies
+                    [resolved_target],
+                    include_dependencies=include_dependencies,
                 )
             )
-            .intersection(test_against_revs)
+            .intersection(resolved_test_against_revs)
         )
 
     def _resolve_revision_number(
@@ -768,7 +778,7 @@ class RevisionMap:
         inclusive: bool = False,
         assert_relative_length: bool = True,
         select_for_downgrade: bool = False,
-    ) -> Iterator["Revision"]:
+    ) -> Iterator[Revision]:
         """Iterate through script revisions, starting at the given
         upper revision identifier and ending at the lower.
 
@@ -795,11 +805,11 @@ class RevisionMap:
         )
 
         for node in self._topological_sort(revisions, heads):
-            yield self.get_revision(node)
+            yield not_none(self.get_revision(node))
 
     def _get_descendant_nodes(
         self,
-        targets: Collection["Revision"],
+        targets: Collection[Optional[_RevisionOrBase]],
         map_: Optional[_RevisionMapType] = None,
         check: bool = False,
         omit_immediate_dependencies: bool = False,
@@ -830,11 +840,11 @@ class RevisionMap:
 
     def _get_ancestor_nodes(
         self,
-        targets: Collection["Revision"],
+        targets: Collection[Optional[_RevisionOrBase]],
         map_: Optional[_RevisionMapType] = None,
         check: bool = False,
         include_dependencies: bool = True,
-    ) -> Iterator["Revision"]:
+    ) -> Iterator[Revision]:
 
         if include_dependencies:
 
@@ -853,17 +863,17 @@ class RevisionMap:
     def _iterate_related_revisions(
         self,
         fn: Callable,
-        targets: Collection["Revision"],
+        targets: Collection[Optional[_RevisionOrBase]],
         map_: Optional[_RevisionMapType],
         check: bool = False,
-    ) -> Iterator["Revision"]:
+    ) -> Iterator[Revision]:
         if map_ is None:
             map_ = self._revision_map
 
         seen = set()
-        todo: Deque["Revision"] = collections.deque()
-        for target in targets:
-
+        todo: Deque[Revision] = collections.deque()
+        for target_for in targets:
+            target = is_revision(target_for)
             todo.append(target)
             if check:
                 per_target = set()
@@ -902,7 +912,7 @@ class RevisionMap:
 
     def _topological_sort(
         self,
-        revisions: Collection["Revision"],
+        revisions: Collection[Revision],
         heads: Any,
     ) -> List[str]:
         """Yield revision ids of a collection of Revision objects in
@@ -1007,11 +1017,11 @@ class RevisionMap:
 
     def _walk(
         self,
-        start: Optional[Union[str, "Revision"]],
+        start: Optional[Union[str, Revision]],
         steps: int,
         branch_label: Optional[str] = None,
         no_overwalk: bool = True,
-    ) -> "Revision":
+    ) -> Optional[_RevisionOrBase]:
         """
         Walk the requested number of :steps up (steps > 0) or down (steps < 0)
         the revision tree.
@@ -1030,20 +1040,21 @@ class RevisionMap:
         else:
             initial = start
 
-        children: Sequence[_RevisionOrBase]
+        children: Sequence[Optional[_RevisionOrBase]]
         for _ in range(abs(steps)):
             if steps > 0:
+                assert initial != "base"
                 # Walk up
-                children = [
-                    rev
+                walk_up = [
+                    is_revision(rev)
                     for rev in self.get_revisions(
-                        self.bases
-                        if initial is None
-                        else cast("Revision", initial).nextrev
+                        self.bases if initial is None else initial.nextrev
                     )
                 ]
                 if branch_label:
-                    children = self.filter_for_lineage(children, branch_label)
+                    children = self.filter_for_lineage(walk_up, branch_label)
+                else:
+                    children = walk_up
             else:
                 # Walk down
                 if initial == "base":
@@ -1055,17 +1066,17 @@ class RevisionMap:
                         else initial.down_revision
                     )
                     if not children:
-                        children = cast("Tuple[Literal['base']]", ("base",))
+                        children = ("base",)
             if not children:
                 # This will return an invalid result if no_overwalk, otherwise
                 # further steps will stay where we are.
                 ret = None if no_overwalk else initial
-                return ret  # type:ignore[return-value]
+                return ret
             elif len(children) > 1:
                 raise RevisionError("Ambiguous walk")
             initial = children[0]
 
-        return cast("Revision", initial)
+        return initial
 
     def _parse_downgrade_target(
         self,
@@ -1116,9 +1127,27 @@ class RevisionMap:
                 if relative_revision:
                     # Find target revision relative to current state.
                     if branch_label:
+                        cr_tuple = util.to_tuple(current_revisions)
+                        symbol_list: Sequence[str]
                         symbol_list = self.filter_for_lineage(
-                            util.to_tuple(current_revisions), branch_label
+                            cr_tuple, branch_label
                         )
+                        if not symbol_list:
+                            # check the case where there are multiple branches
+                            # but there is currently a single heads, since all
+                            # other branch heads are dependant of the current
+                            # single heads.
+                            all_current = cast(
+                                Set[Revision], self._get_all_current(cr_tuple)
+                            )
+                            sl_all_current = self.filter_for_lineage(
+                                all_current, branch_label
+                            )
+                            symbol_list = [
+                                r.revision if r else r  # type: ignore[misc]
+                                for r in sl_all_current
+                            ]
+
                         assert len(symbol_list) == 1
                         symbol = symbol_list[0]
                     else:
@@ -1170,7 +1199,7 @@ class RevisionMap:
         current_revisions: _RevisionIdentifierType,
         target: _RevisionIdentifierType,
         assert_relative_length: bool,
-    ) -> Tuple["Revision", ...]:
+    ) -> Tuple[Optional[_RevisionOrBase], ...]:
         """
         Parse upgrade command syntax :target to retrieve the target revision
         and given the :current_revisons stamp of the database.
@@ -1188,26 +1217,27 @@ class RevisionMap:
             # No relative destination, target is absolute.
             return self.get_revisions(target)
 
-        current_revisions = util.to_tuple(current_revisions)
+        current_revisions_tup: Union[str, Collection[Optional[str]], None]
+        current_revisions_tup = util.to_tuple(current_revisions)
 
         branch_label, symbol, relative_str = match.groups()
         relative = int(relative_str)
         if relative > 0:
             if symbol is None:
-                if not current_revisions:
-                    current_revisions = (None,)
+                if not current_revisions_tup:
+                    current_revisions_tup = (None,)
                 # Try to filter to a single target (avoid ambiguous branches).
-                start_revs = current_revisions
+                start_revs = current_revisions_tup
                 if branch_label:
                     start_revs = self.filter_for_lineage(
-                        self.get_revisions(current_revisions), branch_label
+                        self.get_revisions(current_revisions_tup), branch_label
                     )
                     if not start_revs:
                         # The requested branch is not a head, so we need to
                         # backtrack to find a branchpoint.
                         active_on_branch = self.filter_for_lineage(
                             self._get_ancestor_nodes(
-                                self.get_revisions(current_revisions)
+                                self.get_revisions(current_revisions_tup)
                             ),
                             branch_label,
                         )
@@ -1294,6 +1324,7 @@ class RevisionMap:
             target_revision = None
         assert target_revision is None or isinstance(target_revision, Revision)
 
+        roots: List[Revision]
         # Find candidates to drop.
         if target_revision is None:
             # Downgrading back to base: find all tree roots.
@@ -1307,7 +1338,10 @@ class RevisionMap:
             roots = [target_revision]
         else:
             # Downgrading to fixed target: find all direct children.
-            roots = list(self.get_revisions(target_revision.nextrev))
+            roots = [
+                is_revision(rev)
+                for rev in self.get_revisions(target_revision.nextrev)
+            ]
 
         if branch_label and len(roots) > 1:
             # Need to filter roots.
@@ -1320,11 +1354,12 @@ class RevisionMap:
             }
             # Intersection gives the root revisions we are trying to
             # rollback with the downgrade.
-            roots = list(
-                self.get_revisions(
+            roots = [
+                is_revision(rev)
+                for rev in self.get_revisions(
                     {rev.revision for rev in roots}.intersection(ancestors)
                 )
-            )
+            ]
 
             # Ensure we didn't throw everything away when filtering branches.
             if len(roots) == 0:
@@ -1374,7 +1409,7 @@ class RevisionMap:
         inclusive: bool,
         implicit_base: bool,
         assert_relative_length: bool,
-    ) -> Tuple[Set["Revision"], Tuple[Optional[_RevisionOrBase]]]:
+    ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase]]]:
         """
         Compute the set of required revisions specified by :upper, and the
         current set of active revisions specified by :lower. Find the
@@ -1386,11 +1421,14 @@ class RevisionMap:
         of the current/lower revisions. Dependencies from branches with
         different bases will not be included.
         """
-        targets: Collection["Revision"] = self._parse_upgrade_target(
-            current_revisions=lower,
-            target=upper,
-            assert_relative_length=assert_relative_length,
-        )
+        targets: Collection[Revision] = [
+            is_revision(rev)
+            for rev in self._parse_upgrade_target(
+                current_revisions=lower,
+                target=upper,
+                assert_relative_length=assert_relative_length,
+            )
+        ]
 
         # assert type(targets) is tuple, "targets should be a tuple"
 
@@ -1432,6 +1470,7 @@ class RevisionMap:
                 target=lower,
                 assert_relative_length=assert_relative_length,
             )
+            assert rev
             if rev == "base":
                 current_revisions = tuple()
                 lower = None
@@ -1449,19 +1488,31 @@ class RevisionMap:
 
         # Include the lower revision (=current_revisions?) in the iteration
         if inclusive:
-            needs.update(self.get_revisions(lower))
+            needs.update(is_revision(rev) for rev in self.get_revisions(lower))
         # By default, base is implicit as we want all dependencies returned.
         # Base is also implicit if lower = base
         # implicit_base=False -> only return direct downstreams of
         # current_revisions
         if current_revisions and not implicit_base:
             lower_descendents = self._get_descendant_nodes(
-                current_revisions, check=True, include_dependencies=False
+                [is_revision(rev) for rev in current_revisions],
+                check=True,
+                include_dependencies=False,
             )
             needs.intersection_update(lower_descendents)
 
         return needs, tuple(targets)  # type:ignore[return-value]
 
+    def _get_all_current(
+        self, id_: Tuple[str, ...]
+    ) -> Set[Optional[_RevisionOrBase]]:
+        top_revs: Set[Optional[_RevisionOrBase]]
+        top_revs = set(self.get_revisions(id_))
+        top_revs.update(
+            self._get_ancestor_nodes(list(top_revs), include_dependencies=True)
+        )
+        return self._filter_into_branch_heads(top_revs)
+
 
 class Revision:
     """Base class for revisioned objects.
@@ -1520,8 +1571,8 @@ class Revision:
         self,
         revision: str,
         down_revision: Optional[Union[str, Tuple[str, ...]]],
-        dependencies: Optional[Tuple[str, ...]] = None,
-        branch_labels: Optional[Tuple[str, ...]] = None,
+        dependencies: Optional[Union[str, Tuple[str, ...]]] = None,
+        branch_labels: Optional[Union[str, Tuple[str, ...]]] = None,
     ) -> None:
         if down_revision and revision in util.to_tuple(down_revision):
             raise LoopDetected(revision)
@@ -1545,7 +1596,7 @@ class Revision:
             args.append("branch_labels=%r" % (self.branch_labels,))
         return "%s(%s)" % (self.__class__.__name__, ", ".join(args))
 
-    def add_nextrev(self, revision: "Revision") -> None:
+    def add_nextrev(self, revision: Revision) -> None:
         self._all_nextrev = self._all_nextrev.union([revision.revision])
         if self.revision in revision._versioned_down_revisions:
             self.nextrev = self.nextrev.union([revision.revision])
@@ -1630,12 +1681,29 @@ class Revision:
         return len(self._versioned_down_revisions) > 1
 
 
+@overload
 def tuple_rev_as_scalar(
     rev: Optional[Sequence[str]],
 ) -> Optional[Union[str, Sequence[str]]]:
+    ...
+
+
+@overload
+def tuple_rev_as_scalar(
+    rev: Optional[Sequence[Optional[str]]],
+) -> Optional[Union[Optional[str], Sequence[Optional[str]]]]:
+    ...
+
+
+def tuple_rev_as_scalar(rev):
     if not rev:
         return None
     elif len(rev) == 1:
         return rev[0]
     else:
         return rev
+
+
+def is_revision(rev: Any) -> Revision:
+    assert isinstance(rev, Revision)
+    return rev
diff -pruN 1.7.6-1/alembic/script/write_hooks.py 1.8.1-2/alembic/script/write_hooks.py
--- 1.7.6-1/alembic/script/write_hooks.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/script/write_hooks.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import shlex
 import subprocess
 import sys
@@ -14,7 +16,7 @@ from ..util import compat
 
 REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME"
 
-_registry = {}
+_registry: dict = {}
 
 
 def register(name: str) -> Callable:
diff -pruN 1.7.6-1/alembic/templates/async/alembic.ini.mako 1.8.1-2/alembic/templates/async/alembic.ini.mako
--- 1.7.6-1/alembic/templates/async/alembic.ini.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/async/alembic.ini.mako	2022-07-13 14:17:20.000000000 +0000
@@ -4,8 +4,9 @@
 # path to migration scripts
 script_location = ${script_location}
 
-# template used to generate migration files
-# file_template = %%(rev)s_%%(slug)s
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
 
 # sys.path path, will be prepended to sys.path if present.
 # defaults to the current working directory.
diff -pruN 1.7.6-1/alembic/templates/async/env.py 1.8.1-2/alembic/templates/async/env.py
--- 1.7.6-1/alembic/templates/async/env.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/async/env.py	2022-07-13 14:17:20.000000000 +0000
@@ -3,6 +3,7 @@ from logging.config import fileConfig
 
 from sqlalchemy import engine_from_config
 from sqlalchemy import pool
+from sqlalchemy.engine import Connection
 from sqlalchemy.ext.asyncio import AsyncEngine
 
 from alembic import context
@@ -13,7 +14,8 @@ config = context.config
 
 # Interpret the config file for Python logging.
 # This line sets up loggers basically.
-fileConfig(config.config_file_name)
+if config.config_file_name is not None:
+    fileConfig(config.config_file_name)
 
 # add your model's MetaData object here
 # for 'autogenerate' support
@@ -27,7 +29,7 @@ target_metadata = None
 # ... etc.
 
 
-def run_migrations_offline():
+def run_migrations_offline() -> None:
     """Run migrations in 'offline' mode.
 
     This configures the context with just a URL
@@ -51,14 +53,14 @@ def run_migrations_offline():
         context.run_migrations()
 
 
-def do_run_migrations(connection):
+def do_run_migrations(connection: Connection) -> None:
     context.configure(connection=connection, target_metadata=target_metadata)
 
     with context.begin_transaction():
         context.run_migrations()
 
 
-async def run_migrations_online():
+async def run_migrations_online() -> None:
     """Run migrations in 'online' mode.
 
     In this scenario we need to create an Engine
diff -pruN 1.7.6-1/alembic/templates/async/script.py.mako 1.8.1-2/alembic/templates/async/script.py.mako
--- 1.7.6-1/alembic/templates/async/script.py.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/async/script.py.mako	2022-07-13 14:17:20.000000000 +0000
@@ -16,9 +16,9 @@ branch_labels = ${repr(branch_labels)}
 depends_on = ${repr(depends_on)}
 
 
-def upgrade():
+def upgrade() -> None:
     ${upgrades if upgrades else "pass"}
 
 
-def downgrade():
+def downgrade() -> None:
     ${downgrades if downgrades else "pass"}
diff -pruN 1.7.6-1/alembic/templates/generic/alembic.ini.mako 1.8.1-2/alembic/templates/generic/alembic.ini.mako
--- 1.7.6-1/alembic/templates/generic/alembic.ini.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/generic/alembic.ini.mako	2022-07-13 14:17:20.000000000 +0000
@@ -4,8 +4,11 @@
 # path to migration scripts
 script_location = ${script_location}
 
-# template used to generate migration files
-# file_template = %%(rev)s_%%(slug)s
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
+# for all available tokens
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
 
 # sys.path path, will be prepended to sys.path if present.
 # defaults to the current working directory.
diff -pruN 1.7.6-1/alembic/templates/generic/env.py 1.8.1-2/alembic/templates/generic/env.py
--- 1.7.6-1/alembic/templates/generic/env.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/generic/env.py	2022-07-13 14:17:20.000000000 +0000
@@ -11,7 +11,8 @@ config = context.config
 
 # Interpret the config file for Python logging.
 # This line sets up loggers basically.
-fileConfig(config.config_file_name)
+if config.config_file_name is not None:
+    fileConfig(config.config_file_name)
 
 # add your model's MetaData object here
 # for 'autogenerate' support
@@ -25,7 +26,7 @@ target_metadata = None
 # ... etc.
 
 
-def run_migrations_offline():
+def run_migrations_offline() -> None:
     """Run migrations in 'offline' mode.
 
     This configures the context with just a URL
@@ -49,7 +50,7 @@ def run_migrations_offline():
         context.run_migrations()
 
 
-def run_migrations_online():
+def run_migrations_online() -> None:
     """Run migrations in 'online' mode.
 
     In this scenario we need to create an Engine
diff -pruN 1.7.6-1/alembic/templates/generic/script.py.mako 1.8.1-2/alembic/templates/generic/script.py.mako
--- 1.7.6-1/alembic/templates/generic/script.py.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/generic/script.py.mako	2022-07-13 14:17:20.000000000 +0000
@@ -16,9 +16,9 @@ branch_labels = ${repr(branch_labels)}
 depends_on = ${repr(depends_on)}
 
 
-def upgrade():
+def upgrade() -> None:
     ${upgrades if upgrades else "pass"}
 
 
-def downgrade():
+def downgrade() -> None:
     ${downgrades if downgrades else "pass"}
diff -pruN 1.7.6-1/alembic/templates/multidb/alembic.ini.mako 1.8.1-2/alembic/templates/multidb/alembic.ini.mako
--- 1.7.6-1/alembic/templates/multidb/alembic.ini.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/multidb/alembic.ini.mako	2022-07-13 14:17:20.000000000 +0000
@@ -4,8 +4,11 @@
 # path to migration scripts
 script_location = ${script_location}
 
-# template used to generate migration files
-# file_template = %%(rev)s_%%(slug)s
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
+# for all available tokens
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
 
 # sys.path path, will be prepended to sys.path if present.
 # defaults to the current working directory.
diff -pruN 1.7.6-1/alembic/templates/multidb/env.py 1.8.1-2/alembic/templates/multidb/env.py
--- 1.7.6-1/alembic/templates/multidb/env.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/multidb/env.py	2022-07-13 14:17:20.000000000 +0000
@@ -15,7 +15,8 @@ config = context.config
 
 # Interpret the config file for Python logging.
 # This line sets up loggers basically.
-fileConfig(config.config_file_name)
+if config.config_file_name is not None:
+    fileConfig(config.config_file_name)
 logger = logging.getLogger("alembic.env")
 
 # gather section names referring to different
@@ -42,7 +43,7 @@ target_metadata = {}
 # ... etc.
 
 
-def run_migrations_offline():
+def run_migrations_offline() -> None:
     """Run migrations in 'offline' mode.
 
     This configures the context with just a URL
@@ -78,7 +79,7 @@ def run_migrations_offline():
                 context.run_migrations(engine_name=name)
 
 
-def run_migrations_online():
+def run_migrations_online() -> None:
     """Run migrations in 'online' mode.
 
     In this scenario we need to create an Engine
diff -pruN 1.7.6-1/alembic/templates/multidb/script.py.mako 1.8.1-2/alembic/templates/multidb/script.py.mako
--- 1.7.6-1/alembic/templates/multidb/script.py.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/multidb/script.py.mako	2022-07-13 14:17:20.000000000 +0000
@@ -19,11 +19,11 @@ branch_labels = ${repr(branch_labels)}
 depends_on = ${repr(depends_on)}
 
 
-def upgrade(engine_name):
+def upgrade(engine_name: str) -> None:
     globals()["upgrade_%s" % engine_name]()
 
 
-def downgrade(engine_name):
+def downgrade(engine_name: str) -> None:
     globals()["downgrade_%s" % engine_name]()
 
 <%
@@ -35,11 +35,11 @@ def downgrade(engine_name):
 
 % for db_name in re.split(r',\s*', db_names):
 
-def upgrade_${db_name}():
+def upgrade_${db_name}() -> None:
     ${context.get("%s_upgrades" % db_name, "pass")}
 
 
-def downgrade_${db_name}():
+def downgrade_${db_name}() -> None:
     ${context.get("%s_downgrades" % db_name, "pass")}
 
 % endfor
diff -pruN 1.7.6-1/alembic/templates/pylons/alembic.ini.mako 1.8.1-2/alembic/templates/pylons/alembic.ini.mako
--- 1.7.6-1/alembic/templates/pylons/alembic.ini.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/pylons/alembic.ini.mako	1970-01-01 00:00:00.000000000 +0000
@@ -1,68 +0,0 @@
-# a Pylons configuration.
-
-[alembic]
-# path to migration scripts
-script_location = ${script_location}
-
-# template used to generate migration files
-# file_template = %%(rev)s_%%(slug)s
-
-# sys.path path, will be prepended to sys.path if present.
-# defaults to the current working directory.
-prepend_sys_path = .
-
-# timezone to use when rendering the date within the migration file
-# as well as the filename.
-# If specified, requires the python-dateutil library that can be
-# installed by adding `alembic[tz]` to the pip requirements
-# string value is passed to dateutil.tz.gettz()
-# leave blank for localtime
-# timezone =
-
-# max length of characters to apply to the
-# "slug" field
-# truncate_slug_length = 40
-
-# set to 'true' to run the environment during
-# the 'revision' command, regardless of autogenerate
-# revision_environment = false
-
-# set to 'true' to allow .pyc and .pyo files without
-# a source .py file to be detected as revisions in the
-# versions/ directory
-# sourceless = false
-
-# version location specification; This defaults
-# to ${script_location}/versions.  When using multiple version
-# directories, initial revisions must be specified with --version-path.
-# The path separator used here should be the separator specified by "version_path_separator" below.
-# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions
-
-# version path separator; As mentioned above, this is the character used to split
-# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
-# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
-# Valid values for version_path_separator are:
-#
-# version_path_separator = :
-# version_path_separator = ;
-# version_path_separator = space
-version_path_separator = os  # Use os.pathsep. Default configuration used for new projects.
-
-# the output encoding used when revision files
-# are written from script.py.mako
-# output_encoding = utf-8
-
-[post_write_hooks]
-# post_write_hooks defines scripts or Python functions that are run
-# on newly generated revision scripts.  See the documentation for further
-# detail and examples
-
-# format using "black" - use the console_scripts runner, against the "black" entrypoint
-# hooks = black
-# black.type = console_scripts
-# black.entrypoint = black
-# black.options = -l 79 REVISION_SCRIPT_FILENAME
-
-pylons_config_file = ./development.ini
-
-# that's it !
diff -pruN 1.7.6-1/alembic/templates/pylons/env.py 1.8.1-2/alembic/templates/pylons/env.py
--- 1.7.6-1/alembic/templates/pylons/env.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/pylons/env.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,85 +0,0 @@
-"""Pylons bootstrap environment.
-
-Place 'pylons_config_file' into alembic.ini, and the application will
-be loaded from there.
-
-"""
-from logging.config import fileConfig
-
-from paste.deploy import loadapp
-
-from alembic import context
-
-
-try:
-    # if pylons app already in, don't create a new app
-    from pylons import config as pylons_config
-
-    pylons_config["__file__"]
-except:
-    config = context.config
-    # can use config['__file__'] here, i.e. the Pylons
-    # ini file, instead of alembic.ini
-    config_file = config.get_main_option("pylons_config_file")
-    fileConfig(config_file)
-    wsgi_app = loadapp("config:%s" % config_file, relative_to=".")
-
-
-# customize this section for non-standard engine configurations.
-meta = __import__(
-    "%s.model.meta" % wsgi_app.config["pylons.package"]
-).model.meta
-
-# add your model's MetaData object here
-# for 'autogenerate' support
-# from myapp import mymodel
-# target_metadata = mymodel.Base.metadata
-target_metadata = None
-
-
-def run_migrations_offline():
-    """Run migrations in 'offline' mode.
-
-    This configures the context with just a URL
-    and not an Engine, though an Engine is acceptable
-    here as well.  By skipping the Engine creation
-    we don't even need a DBAPI to be available.
-
-    Calls to context.execute() here emit the given string to the
-    script output.
-
-    """
-    context.configure(
-        url=meta.engine.url,
-        target_metadata=target_metadata,
-        literal_binds=True,
-        dialect_opts={"paramstyle": "named"},
-    )
-    with context.begin_transaction():
-        context.run_migrations()
-
-
-def run_migrations_online():
-    """Run migrations in 'online' mode.
-
-    In this scenario we need to create an Engine
-    and associate a connection with the context.
-
-    """
-    # specify here how the engine is acquired
-    # engine = meta.engine
-    raise NotImplementedError("Please specify engine connectivity here")
-
-    with engine.connect() as connection:  # noqa
-        context.configure(
-            connection=connection, target_metadata=target_metadata
-        )
-
-        with context.begin_transaction():
-            context.run_migrations()
-
-
-if context.is_offline_mode():
-    run_migrations_offline()
-else:
-    run_migrations_online()
diff -pruN 1.7.6-1/alembic/templates/pylons/README 1.8.1-2/alembic/templates/pylons/README
--- 1.7.6-1/alembic/templates/pylons/README	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/pylons/README	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-Configuration that reads from a Pylons project environment.
\ No newline at end of file
diff -pruN 1.7.6-1/alembic/templates/pylons/script.py.mako 1.8.1-2/alembic/templates/pylons/script.py.mako
--- 1.7.6-1/alembic/templates/pylons/script.py.mako	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/templates/pylons/script.py.mako	1970-01-01 00:00:00.000000000 +0000
@@ -1,24 +0,0 @@
-"""${message}
-
-Revision ID: ${up_revision}
-Revises: ${down_revision | comma,n}
-Create Date: ${create_date}
-
-"""
-from alembic import op
-import sqlalchemy as sa
-${imports if imports else ""}
-
-# revision identifiers, used by Alembic.
-revision = ${repr(up_revision)}
-down_revision = ${repr(down_revision)}
-branch_labels = ${repr(branch_labels)}
-depends_on = ${repr(depends_on)}
-
-
-def upgrade():
-    ${upgrades if upgrades else "pass"}
-
-
-def downgrade():
-    ${downgrades if downgrades else "pass"}
diff -pruN 1.7.6-1/alembic/testing/assertions.py 1.8.1-2/alembic/testing/assertions.py
--- 1.7.6-1/alembic/testing/assertions.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/testing/assertions.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import contextlib
 import re
 import sys
@@ -69,7 +71,7 @@ def _assert_raises(
 
 
 class _ErrorContainer:
-    error = None
+    error: Any = None
 
 
 @contextlib.contextmanager
diff -pruN 1.7.6-1/alembic/testing/env.py 1.8.1-2/alembic/testing/env.py
--- 1.7.6-1/alembic/testing/env.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/testing/env.py	2022-07-13 14:17:20.000000000 +0000
@@ -91,10 +91,12 @@ config = context.config
         f.write(txt)
 
 
-def _sqlite_file_db(tempname="foo.db", future=False):
+def _sqlite_file_db(tempname="foo.db", future=False, scope=None, **options):
     dir_ = os.path.join(_get_staging_directory(), "scripts")
     url = "sqlite:///%s/%s" % (dir_, tempname)
-    return testing_util.testing_engine(url=url, future=future)
+    if scope and util.sqla_14:
+        options["scope"] = scope
+    return testing_util.testing_engine(url=url, future=future, options=options)
 
 
 def _sqlite_testing_config(sourceless=False, future=False):
diff -pruN 1.7.6-1/alembic/testing/fixtures.py 1.8.1-2/alembic/testing/fixtures.py
--- 1.7.6-1/alembic/testing/fixtures.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/testing/fixtures.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,4 +1,6 @@
 # coding: utf-8
+from __future__ import annotations
+
 import configparser
 from contextlib import contextmanager
 import io
diff -pruN 1.7.6-1/alembic/testing/suite/_autogen_fixtures.py 1.8.1-2/alembic/testing/suite/_autogen_fixtures.py
--- 1.7.6-1/alembic/testing/suite/_autogen_fixtures.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/testing/suite/_autogen_fixtures.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,5 +1,8 @@
+from __future__ import annotations
+
 from typing import Any
 from typing import Dict
+from typing import Set
 
 from sqlalchemy import CHAR
 from sqlalchemy import CheckConstraint
@@ -28,7 +31,7 @@ from ...testing import eq_
 from ...testing.env import clear_staging_env
 from ...testing.env import staging_env
 
-names_in_this_test = set()
+names_in_this_test: Set[Any] = set()
 
 
 @event.listens_for(Table, "after_parent_attach")
@@ -43,15 +46,15 @@ def _default_include_object(obj, name, t
         return True
 
 
-_default_object_filters = _default_include_object
+_default_object_filters: Any = _default_include_object
 
-_default_name_filters = None
+_default_name_filters: Any = None
 
 
 class ModelOne:
     __requires__ = ("unique_constraint_reflection",)
 
-    schema = None
+    schema: Any = None
 
     @classmethod
     def _get_db_schema(cls):
@@ -323,8 +326,6 @@ class AutogenFixtureTest(_ComparesFKs):
                     dialect._user_defined_max_identifier_length
                 ) = existing_length
 
-    reports_unnamed_constraints = False
-
     def setUp(self):
         staging_env()
         self.bind = config.db
diff -pruN 1.7.6-1/alembic/testing/util.py 1.8.1-2/alembic/testing/util.py
--- 1.7.6-1/alembic/testing/util.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/testing/util.py	2022-07-13 14:17:20.000000000 +0000
@@ -4,6 +4,7 @@
 #
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
 
 import re
 import types
@@ -55,7 +56,7 @@ def flag_combinations(*combinations):
             for d in combinations
         ],
         id_="i" + ("a" * len(keys)),
-        argnames=",".join(keys)
+        argnames=",".join(keys),
     )
 
 
diff -pruN 1.7.6-1/alembic/util/compat.py 1.8.1-2/alembic/util/compat.py
--- 1.7.6-1/alembic/util/compat.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/compat.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
 import io
 import os
 import sys
-from typing import Tuple
+from typing import Sequence
 
 from sqlalchemy.util import inspect_getfullargspec  # noqa
 from sqlalchemy.util.compat import inspect_formatargspec  # noqa
@@ -10,7 +12,6 @@ is_posix = os.name == "posix"
 
 py39 = sys.version_info >= (3, 9)
 py38 = sys.version_info >= (3, 8)
-py37 = sys.version_info >= (3, 7)
 
 
 # produce a wrapper that allows encoded text to stream
@@ -26,17 +27,17 @@ if py39:
     from importlib import metadata as importlib_metadata
     from importlib.metadata import EntryPoint
 else:
-    import importlib_resources  # type:ignore[no-redef] # noqa
-    import importlib_metadata  # type:ignore[no-redef] # noqa
+    import importlib_resources  # type:ignore # noqa
+    import importlib_metadata  # type:ignore # noqa
     from importlib_metadata import EntryPoint  # type:ignore # noqa
 
 
-def importlib_metadata_get(group: str) -> Tuple[EntryPoint, ...]:
+def importlib_metadata_get(group: str) -> Sequence[EntryPoint]:
     ep = importlib_metadata.entry_points()
     if hasattr(ep, "select"):
-        return ep.select(group=group)  # type:ignore[attr-defined]
+        return ep.select(group=group)  # type: ignore
     else:
-        return ep.get(group, ())
+        return ep.get(group, ())  # type: ignore
 
 
 def formatannotation_fwdref(annotation, base_module=None):
diff -pruN 1.7.6-1/alembic/util/editor.py 1.8.1-2/alembic/util/editor.py
--- 1.7.6-1/alembic/util/editor.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/editor.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 from os.path import exists
 from os.path import join
diff -pruN 1.7.6-1/alembic/util/__init__.py 1.8.1-2/alembic/util/__init__.py
--- 1.7.6-1/alembic/util/__init__.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/__init__.py	2022-07-13 14:17:20.000000000 +0000
@@ -7,6 +7,7 @@ from .langhelpers import Dispatcher
 from .langhelpers import immutabledict
 from .langhelpers import memoized_property
 from .langhelpers import ModuleClsProxy
+from .langhelpers import not_none
 from .langhelpers import rev_id
 from .langhelpers import to_list
 from .langhelpers import to_tuple
diff -pruN 1.7.6-1/alembic/util/langhelpers.py 1.8.1-2/alembic/util/langhelpers.py
--- 1.7.6-1/alembic/util/langhelpers.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/langhelpers.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import collections
 from collections.abc import Iterable
 import textwrap
@@ -280,3 +282,8 @@ class Dispatcher:
         else:
             d._registry.update(self._registry)
         return d
+
+
+def not_none(value: Optional[_T]) -> _T:
+    assert value is not None
+    return value
diff -pruN 1.7.6-1/alembic/util/messaging.py 1.8.1-2/alembic/util/messaging.py
--- 1.7.6-1/alembic/util/messaging.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/messaging.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from collections.abc import Iterable
 import logging
 import sys
diff -pruN 1.7.6-1/alembic/util/pyfiles.py 1.8.1-2/alembic/util/pyfiles.py
--- 1.7.6-1/alembic/util/pyfiles.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/pyfiles.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import atexit
 from contextlib import ExitStack
 import importlib
diff -pruN 1.7.6-1/alembic/util/sqla_compat.py 1.8.1-2/alembic/util/sqla_compat.py
--- 1.7.6-1/alembic/util/sqla_compat.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/alembic/util/sqla_compat.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,5 +1,9 @@
+from __future__ import annotations
+
 import contextlib
 import re
+from typing import Any
+from typing import Iterable
 from typing import Iterator
 from typing import Mapping
 from typing import Optional
@@ -56,6 +60,7 @@ _vers = tuple(
 sqla_13 = _vers >= (1, 3)
 sqla_14 = _vers >= (1, 4)
 sqla_14_26 = _vers >= (1, 4, 26)
+sqla_2 = _vers >= (1, 5)
 
 
 if sqla_14:
@@ -109,6 +114,11 @@ def _ensure_scope_for_ddl(
         in_transaction = connection.in_transaction  # type: ignore[union-attr]
     except AttributeError:
         # catch for MockConnection, None
+        in_transaction = None
+        pass
+
+    # yield outside the catch
+    if in_transaction is None:
         yield
     else:
         if not in_transaction():
@@ -155,6 +165,10 @@ def _get_connection_in_transaction(conne
         return in_transaction()
 
 
+def _idx_table_bound_expressions(idx: Index) -> Iterable[ColumnElement[Any]]:
+    return idx.expressions  # type: ignore
+
+
 def _copy(schema_item: _CE, **kw) -> _CE:
     if hasattr(schema_item, "_copy"):
         return schema_item._copy(**kw)  # type: ignore[union-attr]
@@ -266,7 +280,6 @@ if hasattr(sqltypes.TypeEngine, "_varian
     def _get_variant_mapping(type_):
         return type_, type_._variant_mapping
 
-
 else:
 
     def _type_has_variants(type_):
@@ -337,7 +350,13 @@ def _remove_column_from_collection(
     # same object that's present
     assert column.key is not None
     to_remove = collection[column.key]
-    collection.remove(to_remove)
+
+    # SQLAlchemy 2.0 will use more ReadOnlyColumnCollection
+    # (renamed from ImmutableColumnCollection)
+    if hasattr(collection, "_immutable") or hasattr(collection, "_readonly"):
+        collection._parent.remove(to_remove)
+    else:
+        collection.remove(to_remove)
 
 
 def _textual_index_column(
diff -pruN 1.7.6-1/debian/changelog 1.8.1-2/debian/changelog
--- 1.7.6-1/debian/changelog	2022-02-21 09:59:51.000000000 +0000
+++ 1.8.1-2/debian/changelog	2022-09-23 11:47:38.000000000 +0000
@@ -1,3 +1,21 @@
+alembic (1.8.1-2) unstable; urgency=medium
+
+  * Uploading to unstable.
+
+ -- Thomas Goirand <zigo@debian.org>  Fri, 23 Sep 2022 13:47:38 +0200
+
+alembic (1.8.1-1) experimental; urgency=medium
+
+  [ Debian Janitor ]
+  * Update standards version to 4.6.0, no changes needed.
+
+  [ Thomas Goirand ]
+  * New upstream release.
+  * Refreshed 0001-Do-not-use-intersphinx.patch.
+  * Add python3-sphinx-copybutton as build-depends.
+
+ -- Thomas Goirand <zigo@debian.org>  Tue, 20 Sep 2022 16:59:04 +0200
+
 alembic (1.7.6-1) unstable; urgency=medium
 
   [ Debian Janitor ]
diff -pruN 1.7.6-1/debian/control 1.8.1-2/debian/control
--- 1.7.6-1/debian/control	2022-02-21 09:59:51.000000000 +0000
+++ 1.8.1-2/debian/control	2022-09-23 11:47:38.000000000 +0000
@@ -12,6 +12,7 @@ Build-Depends:
  python3-changelog,
  python3-setuptools,
  python3-sphinx,
+ python3-sphinx-copybutton,
  python3-sphinx-paramlinks,
 Build-Depends-Indep:
  python3-dateutil,
@@ -20,7 +21,7 @@ Build-Depends-Indep:
  python3-mock,
  python3-pytest,
  python3-sqlalchemy,
-Standards-Version: 4.5.1
+Standards-Version: 4.6.0
 Vcs-Browser: https://salsa.debian.org/python-team/packages/alembic
 Vcs-Git: https://salsa.debian.org/python-team/packages/alembic.git
 Homepage: https://github.com/sqlalchemy/alembic
diff -pruN 1.7.6-1/debian/patches/0001-Do-not-use-intersphinx.patch 1.8.1-2/debian/patches/0001-Do-not-use-intersphinx.patch
--- 1.7.6-1/debian/patches/0001-Do-not-use-intersphinx.patch	2022-02-21 09:59:51.000000000 +0000
+++ 1.8.1-2/debian/patches/0001-Do-not-use-intersphinx.patch	2022-09-23 11:47:38.000000000 +0000
@@ -10,13 +10,15 @@ Last-Update: 2015-02-08
  docs/build/conf.py | 2 +-
  1 file changed, 1 insertion(+), 1 deletion(-)
 
---- a/docs/build/conf.py
-+++ b/docs/build/conf.py
-@@ -35,7 +35,6 @@
+Index: alembic/docs/build/conf.py
+===================================================================
+--- alembic.orig/docs/build/conf.py
++++ alembic/docs/build/conf.py
+@@ -35,7 +35,6 @@ if True:
  # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
  extensions = [
      "sphinx.ext.autodoc",
 -    "sphinx.ext.intersphinx",
      "changelog",
      "sphinx_paramlinks",
- ]
+     "sphinx_copybutton",
diff -pruN 1.7.6-1/docs/build/changelog.rst 1.8.1-2/docs/build/changelog.rst
--- 1.7.6-1/docs/build/changelog.rst	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/docs/build/changelog.rst	2022-07-13 14:17:20.000000000 +0000
@@ -4,6 +4,135 @@ Changelog
 ==========
 
 .. changelog::
+    :version: 1.8.1
+    :released: July 13, 2022
+
+    .. change::
+        :tags: bug, sqlite
+        :tickets: 1065
+
+        Fixed bug where the SQLite implementation of
+        :meth:`.Operations.rename_table` would render an explicit schema name for
+        both the old and new table name, which while is the standard ALTER syntax,
+        is not accepted by SQLite's syntax which doesn't support a rename across
+        schemas. In particular, the syntax issue would prevent batch mode from
+        working for SQLite databases that made use of attached databases (which are
+        treated as "schemas" in SQLAlchemy).
+
+    .. change::
+        :tags: bug, batch
+        :tickets: 1021
+
+        Added an error raise for the condition where
+        :meth:`.Operations.batch_alter_table` is used in ``--sql`` mode, where the
+        operation requires table reflection, as is the case when running against
+        SQLite without giving it a fixed ``Table`` object. Previously the operation
+        would fail with an internal error.   To get a "move and copy" batch
+        operation as a SQL script without connecting to a database,
+        a ``Table`` object should be passed to the
+        :paramref:`.Operations.batch_alter_table.copy_from` parameter so that
+        reflection may be skipped.
+
+.. changelog::
+    :version: 1.8.0
+    :released: May 31, 2022
+
+    .. change::
+        :tags: feature, typing
+        :tickets: 764
+
+        :pep:`484` typing annotations have been added to the ``env.py`` and
+        revision template files within migration templates. Pull request by Nikita
+        Sobolev.
+
+    .. change::
+        :tags: usecase, operations
+        :tickets: 1037
+
+        The ``op.drop_table()`` operation directive will now trigger the
+        ``before_drop()`` and ``after_drop()`` DDL event hooks at the table level,
+        which is similar to how the ``before_create()`` and ``after_create()``
+        hooks are triggered by the ``op.create_table()`` directive. Note that as
+        ``op.drop_table()`` accepts only a table name and optional schema name, the
+        ``Table`` object received by the event will not have any information within
+        it other than the table name and schema name.
+
+    .. change::
+        :tags: installation, changed
+        :tickets: 1025
+
+        Alembic 1.8 now supports Python 3.7 and above.
+
+    .. change::
+        :tags: changed, environment
+        :tickets: 987
+
+        The "Pylons" environment template has been removed as of Alembic 1.8. This
+        template was based on the very old pre-Pyramid Pylons web framework which
+        has been long superseded by Pyramid.
+
+    .. change::
+        :tags: bug, revisioning
+        :tickets: 1026
+
+        Fixed issue where a downgrade using a relative revision would
+        fail in case of multiple branches with a single effectively
+        head due to interdependencies between revisions.
+
+    .. change::
+      :tags: usecase, commands
+      :tickets: 1027
+
+      Added new token ``epoch`` to the ``file_template`` option, which will
+      populate the integer epoch as determined by ``int(create_date.timestamp())``.
+      Pull request courtesy Caio Carvalho.
+
+    .. change::
+        :tags: bug, batch
+        :tickets: 1034
+
+        Fixed issue in batch mode where CREATE INDEX would not use a new column
+        name in the case of a column rename.
+
+.. changelog::
+    :version: 1.7.7
+    :released: March 14, 2022
+
+    .. change::
+        :tags: bug, operations
+        :tickets: 1004
+
+        Fixed issue where using :meth:`.Operations.create_table` in conjunction
+        with a :class:`.CheckConstraint` that referred to table-bound
+        :class:`.Column` objects rather than string expressions would be added to
+        the parent table potentially multiple times, resulting in an incorrect DDL
+        sequence. Pull request courtesy Nicolas CANIART.
+
+    .. change::
+        :tags: bug, environment
+        :tickets: 986
+
+        The ``logging.fileConfig()`` line in ``env.py`` templates, which is used
+        to setup Python logging for the migration run, is now conditional on
+        :attr:`.Config.config_file_name` not being ``None``.  Otherwise, the line
+        is skipped as there is no default logging configuration present.
+
+
+    .. change::
+        :tags: bug, mssql
+        :tickets: 977
+
+        Fixed bug where an :meth:`.Operations.alter_column` operation would change
+        a "NOT NULL" column to "NULL" by emitting an ALTER COLUMN statement that
+        did not specify "NOT NULL". (In the absence of "NOT NULL" T-SQL was
+        implicitly assuming "NULL"). An :meth:`.Operations.alter_column` operation
+        that specifies :paramref:`.Operations.alter_column.type` should also
+        specify include either :paramref:`.Operations.alter_column.nullable` or
+        :paramref:`.Operations.alter_column.existing_nullable` to inform Alembic as
+        to whether the emitted DDL should include "NULL" or "NOT NULL"; a warning
+        is now emitted if this is missing under this scenario.
+
+.. changelog::
     :version: 1.7.6
     :released: February 1, 2022
 
@@ -486,7 +615,7 @@ Changelog
         also changing the column type, would cause an ALTER COLUMN operation to
         incorrectly render a second ALTER statement without the nullability if a
         new type were also present, as the MSSQL-specific contract did not
-        anticipate all three of "nullability", "type_" and "existing_type" being
+        anticipate all three of "nullability", ``"type_"`` and "existing_type" being
         sent at the same time.
 
 
@@ -767,7 +896,7 @@ Changelog
         yet been updated, these can be modified directly in order to maintain
         compatibility:
 
-        * :meth:`.Operations.drop_constraint` - "type" (use "type_") and "name"
+        * :meth:`.Operations.drop_constraint` - "type" (use ``"type_"``) and "name"
           (use "constraint_name")
 
         * :meth:`.Operations.create_primary_key` - "cols" (use "columns") and
@@ -2193,10 +2322,10 @@ Changelog
       :tags: bug, batch
       :tickets: 391
 
-      Batch mode will not use CAST() to copy data if type_ is given, however
+      Batch mode will not use CAST() to copy data if ``type_`` is given, however
       the basic type affinity matches that of the existing type.  This to
       avoid SQLite's CAST of TIMESTAMP which results in truncation of the
-      data, in those cases where the user needs to add redundant type_ for
+      data, in those cases where the user needs to add redundant ``type_`` for
       other reasons.
 
     .. change::
@@ -3583,8 +3712,8 @@ Changelog
 
       Added quoting to the table name when the special EXEC is run to
       drop any existing server defaults or constraints when the
-      :paramref:`.drop_column.mssql_drop_check` or
-      :paramref:`.drop_column.mssql_drop_default`
+      :paramref:`.Operations.drop_column.mssql_drop_check` or
+      :paramref:`.Operations.drop_column.mssql_drop_default`
       arguments are used.
 
     .. change::
diff -pruN 1.7.6-1/docs/build/conf.py 1.8.1-2/docs/build/conf.py
--- 1.7.6-1/docs/build/conf.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/docs/build/conf.py	2022-07-13 14:17:20.000000000 +0000
@@ -38,8 +38,15 @@ extensions = [
     "sphinx.ext.intersphinx",
     "changelog",
     "sphinx_paramlinks",
+    "sphinx_copybutton",
 ]
 
+copybutton_prompt_text = (
+    r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: "
+)
+copybutton_prompt_is_regexp = True
+
+
 # tags to sort on inside of sections
 changelog_sections = [
     "changed",
@@ -69,14 +76,17 @@ source_suffix = ".rst"
 # The encoding of source files.
 # source_encoding = 'utf-8'
 
-nitpicky = True
+nitpicky = False
+
+exclude_patterns = ["build", "unreleased*/*", "**/*_include.rst"]
+
 
 # The master toctree document.
 master_doc = "index"
 
 # General information about the project.
-project = u"Alembic"
-copyright = u"2010-2022, Mike Bayer"  # noqa
+project = "Alembic"
+copyright = "2010-2022, Mike Bayer"  # noqa
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -85,8 +95,8 @@ copyright = u"2010-2022, Mike Bayer"  #
 # The short X.Y version.
 version = alembic.__version__
 # The full version, including alpha/beta/rc tags.
-release = "1.7.6"
-release_date = "February 1, 2022"
+release = "1.8.1"
+release_date = "July 13, 2022"
 
 
 # The language for content autogenerated by Sphinx. Refer to documentation
@@ -223,7 +233,7 @@ htmlhelp_basename = "Alembicdoc"
 # Grouping the document tree into LaTeX files. List of tuples (source start
 # file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-    ("index", "Alembic.tex", u"Alembic Documentation", u"Mike Bayer", "manual")
+    ("index", "Alembic.tex", "Alembic Documentation", "Mike Bayer", "manual")
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
diff -pruN 1.7.6-1/docs/build/cookbook.rst 1.8.1-2/docs/build/cookbook.rst
--- 1.7.6-1/docs/build/cookbook.rst	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/docs/build/cookbook.rst	2022-07-13 14:17:20.000000000 +0000
@@ -188,7 +188,7 @@ new commandline options within environme
 
 .. _connection_sharing:
 
-Sharing a Connection with a Series of Migration Commands and Environments
+Sharing a Connection across one or more programmatic migration commands
 =========================================================================
 
 It is often the case that an application will need to call upon a series
@@ -211,7 +211,9 @@ The steps to take here are:
    :class:`~sqlalchemy.engine.Connection` and makes use of it, in lieu
    of building up its own :class:`~sqlalchemy.engine.Engine` instance.
 
-We illustrate using :attr:`.Config.attributes`::
+We illustrate using :attr:`.Config.attributes` a script that will run
+the :func:`.command.upgrade` command programmatically within a
+transaction declared in a Python file::
 
     from alembic import command, config
 
@@ -233,21 +235,22 @@ Then in ``env.py``::
                 prefix='sqlalchemy.',
                 poolclass=pool.NullPool)
 
-        context.configure(
-            connection=connectable,
-            target_metadata=target_metadata
-        )
+            with connectable.connect() as connection:
+                context.configure(
+                    connection=connection, target_metadata=target_metadata
+                )
 
-        with context.begin_transaction():
-            context.run_migrations()
+                with context.begin_transaction():
+                    context.run_migrations()
+        else:
+            context.configure(
+                connection=connectable,
+                target_metadata=target_metadata
+            )
 
-.. versionchanged:: 1.4
+            with context.begin_transaction():
+                context.run_migrations()
 
-    Prior to this version, we used a "branched connection", by calling
-    :meth:`~sqlalchemy.engine.Connection.connect`.
-    This is now deprecated and unnecessary,
-    since we no longer have to guess if the given "connection"
-    is an ``Engine`` or ``Connection``, it is always a ``Connection``.
 
 .. _replaceable_objects:
 
@@ -1147,6 +1150,12 @@ causes Alembic to treat them as tables i
 
     my_view = Table('my_view', metadata, autoload=True, info=dict(is_view=True))    # Flag this as a view
 
+Or, if you use declarative tables::
+
+    class MyView(Base):
+        __tablename__ = 'my_view'
+        __table_args__ = {'info': {'is_view': True}} # Flag this as a view
+
 Then define ``include_object`` as::
 
     def include_object(object, name, type_, reflected, compare_to):
@@ -1437,6 +1446,8 @@ branched revision tree::
     :meth:`.ScriptDirectory.get_heads`
 
 
+.. _asyncio_recipe:
+
 Using Asyncio with Alembic
 ==========================
 
@@ -1456,7 +1467,7 @@ file that's used by Alembic to start its
 
     import asyncio
 
-    from sqlalchemy.ext.asyncio import AsyncEngine
+    from sqlalchemy.ext.asyncio import async_engine_from_config
 
     # ... no change required to the rest of the code
 
@@ -1475,13 +1486,10 @@ file that's used by Alembic to start its
         and associate a connection with the context.
 
         """
-        connectable = AsyncEngine(
-            engine_from_config(
-                config.get_section(config.config_ini_section),
-                prefix="sqlalchemy.",
-                poolclass=pool.NullPool,
-                future=True,
-            )
+        connectable = async_engine_from_config(
+            config.get_section(config.config_ini_section),
+            prefix="sqlalchemy.",
+            poolclass=pool.NullPool,
         )
 
         async with connectable.connect() as connection:
@@ -1495,6 +1503,80 @@ file that's used by Alembic to start its
     else:
         asyncio.run(run_migrations_online())
 
-An asnyc application can also interact with the Alembic api directly by using
+An async application can also interact with the Alembic api directly by using
 the SQLAlchemy ``run_sync`` method to adapt the non-async api of Alembic to
 an async consumer.
+
+
+.. _connection_sharing_plus_asyncio:
+
+Programmatic API use (connection sharing) With Asyncio
+------------------------------------------------------
+
+Combining the examples of :ref:`connection_sharing` with :ref:`asyncio_recipe`
+together, and ``env.py`` as follows works::
+
+    import asyncio
+
+    from sqlalchemy.ext.asyncio import async_engine_from_config
+
+    # ... no change required to the rest of the code
+
+
+    def do_run_migrations(connection):
+        context.configure(connection=connection, target_metadata=target_metadata)
+
+        with context.begin_transaction():
+            context.run_migrations()
+
+
+    async def run_async_migrations():
+        connectable = async_engine_from_config(
+            config.get_section(config.config_ini_section),
+            prefix="sqlalchemy.",
+            poolclass=pool.NullPool,
+        )
+
+        async with connectable.connect() as connection:
+            await connection.run_sync(do_run_migrations)
+
+        await connectable.dispose()
+
+
+    def run_migrations_online():
+        """Run migrations in 'online' mode.
+
+        In this scenario we need to create an Engine
+        and associate a connection with the context.
+
+        """
+
+        connectable = config.attributes.get("connection", None)
+
+        if connectable is None:
+            asyncio.run(run_async_migrations())
+        else:
+            do_run_migrations(connectable)
+
+Above, using an asyncio database URL in ``alembic.ini`` one can run
+commands such as ``alembic upgrade`` from the command line.  Programmatically,
+the same ``env.py`` file can be invoked using asyncio as::
+
+    import asyncio
+    from sqlalchemy.ext.asyncio import create_async_engine
+
+    from alembic import command, config
+
+
+    def run_upgrade(connection, cfg):
+        cfg.attributes["connection"] = connection
+        command.upgrade(cfg, "head")
+
+
+    async def run_async_upgrade():
+        async_engine = create_async_engine("sqlite+aiosqlite://", echo=True)
+        async with async_engine.begin() as conn:
+            await conn.run_sync(run_upgrade, config.Config("alembic.ini"))
+
+
+    asyncio.run(run_async_upgrade())
\ No newline at end of file
diff -pruN 1.7.6-1/docs/build/front.rst 1.8.1-2/docs/build/front.rst
--- 1.7.6-1/docs/build/front.rst	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/docs/build/front.rst	2022-07-13 14:17:20.000000000 +0000
@@ -83,8 +83,9 @@ SQLAlchemy as of version **1.3.0**.
 
 .. versionchanged:: 1.5.0 Support for SQLAlchemy older than 1.3.0 was dropped.
 
-Alembic supports Python versions **3.6 and above**
+Alembic supports Python versions **3.7 and above**
 
+.. versionchanged::  1.8  Alembic now supports Python 3.7 and newer.
 .. versionchanged::  1.7  Alembic now supports Python 3.6 and newer; support
    for Python 2.7 has been dropped.
 
@@ -92,11 +93,10 @@ Community
 =========
 
 Alembic is developed by `Mike Bayer <http://techspot.zzzeek.org>`_, and is
-loosely associated with the SQLAlchemy_, `Pylons <http://www.pylonsproject.org>`_,
-and `Openstack <http://www.openstack.org>`_ projects.
+part of the SQLAlchemy_ project.
 
-User issues, discussion of potential bugs and features should be posted
-to the Alembic Google Group at `sqlalchemy-alembic <https://groups.google.com/group/sqlalchemy-alembic>`_.
+User issues, discussion of potential bugs and features are most easily
+discussed using `Github Discussions <https://github.com/sqlalchemy/alembic/discussions/>`_.
 
 .. _bugs:
 
diff -pruN 1.7.6-1/docs/build/requirements.txt 1.8.1-2/docs/build/requirements.txt
--- 1.7.6-1/docs/build/requirements.txt	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/docs/build/requirements.txt	2022-07-13 14:17:20.000000000 +0000
@@ -5,4 +5,5 @@ python-dateutil
 # because there's a dependency in pyfiles.py
 Mako
 importlib-metadata;python_version<"3.8"
-importlib-resources;python_version<"3.9"
\ No newline at end of file
+importlib-resources;python_version<"3.9"
+sphinx_copybutton
diff -pruN 1.7.6-1/docs/build/tutorial.rst 1.8.1-2/docs/build/tutorial.rst
--- 1.7.6-1/docs/build/tutorial.rst	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/docs/build/tutorial.rst	2022-07-13 14:17:20.000000000 +0000
@@ -108,11 +108,12 @@ command::
     generic - Generic single-database configuration.
     async - Generic single-database configuration with an async dbapi.
     multidb - Rudimentary multi-database configuration.
-    pylons - Configuration that reads from a Pylons project environment.
 
     Templates are used via the 'init' command, e.g.:
 
-      alembic init --template pylons ./scripts
+      alembic init --template generic ./scripts
+
+.. versionchanged:: 1.8  The "pylons" environment template has been removed.
 
 Editing the .ini File
 =====================
@@ -130,8 +131,9 @@ The file generated with the "generic" co
     # path to migration scripts
     script_location = alembic
 
-    # template used to generate migration files
-    # file_template = %%(rev)s_%%(slug)s
+    # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+    # Uncomment the line below if you want the files to be prepended with date and time
+    # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
 
     # sys.path path, will be prepended to sys.path if present.
     # defaults to the current working directory.
@@ -235,10 +237,19 @@ with the path to the Alembic script loca
 
 This file contains the following features:
 
-* ``[alembic]`` - this is the section read by Alembic to determine configuration.  Alembic
-  itself does not directly read any other areas of the file.    The name "alembic" can
-  be customized using the ``--name`` commandline flag; see :ref:`multiple_environments`
-  for a basic example of this.
+* ``[alembic]`` - this is the section read by Alembic to determine configuration.  Alembic's
+  core implementation does not directly read any other areas of the file, not
+  including additional directives that may be consumed from the
+  end-user-customizable ``env.py`` file (see note below). The name "alembic"
+  can be customized using the ``--name`` commandline flag; see
+  :ref:`multiple_environments` for a basic example of this.
+
+  .. note:: The default ``env.py`` file included with Alembic's environment
+     templates will also read from the logging sections ``[logging]``,
+     ``[handlers]`` etc. If the configuration file in use does not contain
+     logging directives, please remove the ``fileConfig()`` directive within
+     the generated ``env.py`` file to prevent it from attempting to configure
+     logging.
 
 * ``script_location`` - this is the location of the Alembic environment.   It is normally
   specified as a filesystem location, either relative or absolute.  If the location is
@@ -256,16 +267,24 @@ This file contains the following feature
   URI which contains colons is interpreted here as a resource name, rather than
   a straight filename.
 
-* ``file_template`` - this is the naming scheme used to generate new migration files.
-  The value present is the default, so is commented out.   Tokens available include:
+* ``file_template`` - this is the naming scheme used to generate new migration
+  files. Uncomment the presented value if you would like the migration files to
+  be prepended with date and time, so that they are listed in chronological
+  order.  The default value is ``%%(rev)s_%%(slug)s``.  Tokens available
+  include:
 
     * ``%%(rev)s`` - revision id
     * ``%%(slug)s`` - a truncated string derived from the revision message
+    * ``%%(epoch)s`` - epoch timestamp based on the create date; this makes
+      use of the Python ``datetime.timestamp()`` method to produce an epoch
+      value.
     * ``%%(year)d``, ``%%(month).2d``, ``%%(day).2d``, ``%%(hour).2d``,
       ``%%(minute).2d``, ``%%(second).2d`` - components of the create date,
       by default ``datetime.datetime.now()`` unless the ``timezone``
       configuration option is also used.
 
+  .. versionadded:: 1.8  added 'epoch'
+
 * ``timezone`` - an optional timezone name (e.g. ``UTC``, ``EST5EDT``, etc.)
   that will be applied to the timestamp which renders inside the migration
   file's comment as well as within the filename. This option requires installing
@@ -567,10 +586,29 @@ to get the current migration::
 
   $ alembic history -r-3:current
 
+.. note::
+
+   As illustrated above, to use ranges that start with a negative number (i.e.
+   a dash), due to a
+   `bug in argparse <https://github.com/python/cpython/issues/53580>`_ , either
+   the syntax ``-r-<base>:<head>``, without any space, must be used as above::
+
+     $ alembic history -r-3:current
+
+   or if using ``--rev-range``, an equals sign must be used::
+
+     $ alembic history --rev-range=-3:current
+
+   Using quotes or escape symbols will not work if there's a space after
+   the argument name.
+
 View all revisions from 1975 to the head::
 
   $ alembic history -r1975ea:
 
+
+
+
 Downgrading
 ===========
 
diff -pruN 1.7.6-1/.github/FUNDING.yml 1.8.1-2/.github/FUNDING.yml
--- 1.7.6-1/.github/FUNDING.yml	1970-01-01 00:00:00.000000000 +0000
+++ 1.8.1-2/.github/FUNDING.yml	2022-07-13 14:17:20.000000000 +0000
@@ -0,0 +1,6 @@
+# These are supported funding model platforms
+
+github: alembic
+patreon: zzzeek
+tidelift: "pypi/SQLAlchemy"
+
diff -pruN 1.7.6-1/.github/workflows/run-on-pr.yaml 1.8.1-2/.github/workflows/run-on-pr.yaml
--- 1.7.6-1/.github/workflows/run-on-pr.yaml	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/.github/workflows/run-on-pr.yaml	2022-07-13 14:17:20.000000000 +0000
@@ -12,6 +12,9 @@ env:
   # global env to all steps
   TOX_WORKERS: -n2
 
+permissions:
+  contents: read
+
 jobs:
   run-test-amd64:
     name: ${{ matrix.python-version }}-${{ matrix.sqlalchemy }}-${{ matrix.os }}
diff -pruN 1.7.6-1/.github/workflows/run-test.yaml 1.8.1-2/.github/workflows/run-test.yaml
--- 1.7.6-1/.github/workflows/run-test.yaml	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/.github/workflows/run-test.yaml	2022-07-13 14:17:20.000000000 +0000
@@ -15,6 +15,9 @@ env:
   # global env to all steps
   TOX_WORKERS: -n2
 
+permissions:
+  contents: read
+
 jobs:
   run-test:
     name: ${{ matrix.python-version }}-${{ matrix.sqlalchemy }}-${{ matrix.os }}
@@ -27,7 +30,6 @@ jobs:
           - "windows-latest"
           - "macos-latest"
         python-version:
-          - "3.6"
           - "3.7"
           - "3.8"
           - "3.9"
@@ -37,11 +39,6 @@ jobs:
           - sqla14
           - sqlamain
 
-        exclude:
-          # main no longer support 3.6
-          - sqlalchemy: sqlamain
-            python-version: "3.6"
-
       fail-fast: false
 
     # steps to run in each job. Some are github actions, others run shell commands
diff -pruN 1.7.6-1/MANIFEST.in 1.8.1-2/MANIFEST.in
--- 1.7.6-1/MANIFEST.in	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/MANIFEST.in	2022-07-13 14:17:20.000000000 +0000
@@ -7,6 +7,4 @@ recursive-include tools *.py
 include README* LICENSE CHANGES* tox.ini
 
 prune docs/build/output
-exclude pyproject.toml
-
 
diff -pruN 1.7.6-1/.pre-commit-config.yaml 1.8.1-2/.pre-commit-config.yaml
--- 1.7.6-1/.pre-commit-config.yaml	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/.pre-commit-config.yaml	2022-07-13 14:17:20.000000000 +0000
@@ -2,7 +2,7 @@
 # See https://pre-commit.com/hooks.html for more hooks
 repos:
 -   repo: https://github.com/python/black
-    rev: 21.5b1
+    rev: 22.3.0
     hooks:
     -   id: black
 
diff -pruN 1.7.6-1/pyproject.toml 1.8.1-2/pyproject.toml
--- 1.7.6-1/pyproject.toml	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/pyproject.toml	2022-07-13 14:17:20.000000000 +0000
@@ -1,2 +1,23 @@
+[build-system]
+build-backend = "setuptools.build_meta"
+requires = [
+    "setuptools>=47",
+]
+
 [tool.black]
 line-length = 79
+
+[tool.mypy]
+
+exclude = [
+    'alembic/template',
+    'alembic.testing.*',
+]
+show_error_codes = true
+
+[[tool.mypy.overrides]]
+module = [
+    'mako.*',
+    'sqlalchemy.testing.*'
+]
+ignore_missing_imports = true
diff -pruN 1.7.6-1/setup.cfg 1.8.1-2/setup.cfg
--- 1.7.6-1/setup.cfg	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/setup.cfg	2022-07-13 14:17:20.000000000 +0000
@@ -1,12 +1,7 @@
 [metadata]
 
 name = alembic
-
-# version comes from setup.py; setuptools
-# can't read the "attr:" here without importing
-# until version 47.0.0 which is too recent
-
-
+version = attr: alembic.__version__
 description = A database migration tool for SQLAlchemy.
 long_description = file: README.rst
 long_description_content_type = text/x-rst
@@ -14,7 +9,7 @@ url=https://alembic.sqlalchemy.org
 author = Mike Bayer
 author_email = mike_mp@zzzcomputing.com
 license = MIT
-license_file = LICENSE
+license_files = LICENSE
 
 
 classifiers =
@@ -25,19 +20,23 @@ classifiers =
     Operating System :: OS Independent
     Programming Language :: Python
     Programming Language :: Python :: 3
-    Programming Language :: Python :: 3.6
     Programming Language :: Python :: 3.7
     Programming Language :: Python :: 3.8
     Programming Language :: Python :: 3.9
+    Programming Language :: Python :: 3.10
     Programming Language :: Python :: Implementation :: CPython
     Programming Language :: Python :: Implementation :: PyPy
     Topic :: Database :: Front-Ends
+project_urls =
+    Source = https://github.com/sqlalchemy/alembic/
+    Documentation = https://alembic.sqlalchemy.org/en/latest/
+    Issue Tracker = https://github.com/sqlalchemy/alembic/issues/
 
 [options]
 packages = find:
 include_package_data = true
 zip_safe = false
-python_requires = >=3.6
+python_requires = >=3.7
 
 install_requires =
     SQLAlchemy>=1.3.0
@@ -117,6 +116,8 @@ oracle8=oracle://scott:tiger@127.0.0.1:1
 [tool:pytest]
 addopts= --tb native -v -r sfxX -p no:warnings -p no:logging --maxfail=25
 python_files=tests/test_*.py
+markers =
+    backend: tests that should run on all backends; typically dialect-sensitive
 
 [mypy]
 show_error_codes = True
diff -pruN 1.7.6-1/setup.py 1.8.1-2/setup.py
--- 1.7.6-1/setup.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/setup.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,36 +1,7 @@
-import os
-import re
-import sys
-
+from setuptools import __version__
 from setuptools import setup
-from setuptools.command.test import test as TestCommand
-
-
-v = open(os.path.join(os.path.dirname(__file__), "alembic", "__init__.py"))
-VERSION = (
-    re.compile(r""".*__version__ = ["'](.*?)["']""", re.S)
-    .match(v.read())
-    .group(1)
-)
-v.close()
-
-
-class UseTox(TestCommand):
-    RED = 31
-    RESET_SEQ = "\033[0m"
-    BOLD_SEQ = "\033[1m"
-    COLOR_SEQ = "\033[1;%dm"
-
-    def run_tests(self):
-        sys.stderr.write(
-            "%s%spython setup.py test is deprecated by pypa.  Please invoke "
-            "'tox' with no arguments for a basic test run.\n%s"
-            % (self.COLOR_SEQ % self.RED, self.BOLD_SEQ, self.RESET_SEQ)
-        )
-        sys.exit(1)
 
+if not int(__version__.partition(".")[0]) >= 47:
+    raise RuntimeError(f"Setuptools >= 47 required. Found {__version__}")
 
-setup(
-    version=VERSION,
-    cmdclass={"test": UseTox},
-)
+setup()
diff -pruN 1.7.6-1/tests/requirements.py 1.8.1-2/tests/requirements.py
--- 1.7.6-1/tests/requirements.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/requirements.py	2022-07-13 14:17:20.000000000 +0000
@@ -124,7 +124,15 @@ class DefaultRequirements(SuiteRequireme
 
     @property
     def reflects_unique_constraints_unambiguously(self):
-        return exclusions.fails_on(["mysql", "mariadb", "oracle"])
+        return exclusions.fails_on(["mysql", "mariadb", "oracle", "mssql"])
+
+    @property
+    def reports_unique_constraints_as_indexes(self):
+        return exclusions.only_on(["mysql", "mariadb", "oracle"])
+
+    @property
+    def reports_unnamed_constraints(self):
+        return exclusions.skip_if(["sqlite"])
 
     @property
     def reflects_indexes_w_sorting(self):
@@ -170,6 +178,10 @@ class DefaultRequirements(SuiteRequireme
         return exclusions.only_on(["mssql"])
 
     @property
+    def covering_indexes(self):
+        return exclusions.only_on(["postgresql >= 11", "mssql"])
+
+    @property
     def postgresql_uuid_ossp(self):
         def check_uuid_ossp(config):
             if not exclusions.against(config, "postgresql"):
@@ -392,4 +404,9 @@ class DefaultRequirements(SuiteRequireme
         version = exclusions.only_if(
             lambda _: compat.py39, "python 3.9 is required"
         )
-        return imports + version
+
+        sqlalchemy = exclusions.only_if(
+            lambda _: sqla_compat.sqla_2, "sqlalchemy 2 is required"
+        )
+
+        return imports + version + sqlalchemy
diff -pruN 1.7.6-1/tests/test_autogen_diffs.py 1.8.1-2/tests/test_autogen_diffs.py
--- 1.7.6-1/tests/test_autogen_diffs.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_autogen_diffs.py	2022-07-13 14:17:20.000000000 +0000
@@ -825,6 +825,7 @@ class CompareTypeSpecificityTest(TestBas
         (VARCHAR(30), String(30), False),
         (VARCHAR(30), String(40), True),
         (VARCHAR(30), Integer(), True),
+        (VARCHAR(30), String(), False),
         (Text(), String(255), True),
         # insp + metadata types same number of
         # args but are different; they're different
diff -pruN 1.7.6-1/tests/test_autogen_indexes.py 1.8.1-2/tests/test_autogen_indexes.py
--- 1.7.6-1/tests/test_autogen_indexes.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_autogen_indexes.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,7 +1,6 @@
 from sqlalchemy import Column
 from sqlalchemy import ForeignKey
 from sqlalchemy import ForeignKeyConstraint
-from sqlalchemy import func
 from sqlalchemy import Index
 from sqlalchemy import Integer
 from sqlalchemy import MetaData
@@ -13,10 +12,10 @@ from sqlalchemy import UniqueConstraint
 from sqlalchemy.sql.expression import column
 from sqlalchemy.sql.expression import desc
 
-from alembic.testing import assertions
 from alembic.testing import combinations
 from alembic.testing import config
 from alembic.testing import eq_
+from alembic.testing import exclusions
 from alembic.testing import schemacompare
 from alembic.testing import TestBase
 from alembic.testing import util
@@ -24,15 +23,15 @@ from alembic.testing.env import staging_
 from alembic.testing.suite._autogen_fixtures import AutogenFixtureTest
 from alembic.util import sqla_compat
 
-# TODO: create new suites that are taking tests from this suite, with a
-#       separate class for AutogenIndexes, AutogenUniqueConstraint, and a
-#       subset of the tests here. @zzzeek can work on this at a later point.
-#       (2021-06-10)
-
 
 class NoUqReflection:
+    """mixin used to simulate dialects where unique constraints are
+    not reflected."""
+
     __requires__ = ()
 
+    reports_unique_constraints = False
+
     def setUp(self):
         staging_env()
         self.bind = eng = util.testing_engine()
@@ -42,19 +41,32 @@ class NoUqReflection:
 
         eng.dialect.get_unique_constraints = unimpl
 
-    def test_add_ix_on_table_create(self):
-        return super(NoUqReflection, self).test_add_ix_on_table_create()
 
-    def test_add_idx_non_col(self):
-        return super(NoUqReflection, self).test_add_idx_non_col()
+class AutogenerateUniqueIndexTest(AutogenFixtureTest, TestBase):
+    """tests that involve unique constraint reflection, or the lack of
+    this feature and the expected behaviors, and its interaction with index
+    reflection.
 
+    Tests that do not involve unique constraint reflection, but involve
+    indexes, should go into AutogenerateIndexTest.
 
-class AutogenerateUniqueIndexTest(AutogenFixtureTest, TestBase):
-    reports_unique_constraints = True
-    reports_unique_constraints_as_indexes = False
+    """
 
-    __requires__ = ("unique_constraint_reflection",)
-    __only_on__ = "sqlite"
+    __backend__ = True
+
+    @property
+    def reports_unique_constraints(self):
+        return config.requirements.unique_constraint_reflection.enabled
+
+    @property
+    def reports_unique_constraints_as_indexes(self):
+        return (
+            config.requirements.reports_unique_constraints_as_indexes.enabled
+        )
+
+    @property
+    def reports_unnamed_constraints(self):
+        return config.requirements.reports_unnamed_constraints.enabled
 
     def test_index_flag_becomes_named_unique_constraint(self):
         m1 = MetaData()
@@ -196,32 +208,6 @@ class AutogenerateUniqueIndexTest(Autoge
 
         eq_(diffs, [])
 
-    def test_new_table_added(self):
-        m1 = MetaData()
-        m2 = MetaData()
-        Table(
-            "extra",
-            m2,
-            Column("foo", Integer, index=True),
-            Column("bar", Integer),
-            Index("newtable_idx", "bar"),
-        )
-
-        diffs = self._fixture(m1, m2)
-
-        eq_(diffs[0][0], "add_table")
-
-        eq_(diffs[1][0], "add_index")
-        eq_(
-            sqla_compat._get_constraint_final_name(
-                diffs[1][1], config.db.dialect
-            ),
-            "ix_extra_foo",
-        )
-
-        eq_(diffs[2][0], "add_index")
-        eq_(diffs[2][1].name, "newtable_idx")
-
     def test_named_cols_changed(self):
         m1 = MetaData()
         m2 = MetaData()
@@ -339,41 +325,6 @@ class AutogenerateUniqueIndexTest(Autoge
         eq_(diffs, [])
 
     @config.requirements.long_names
-    def test_nothing_ix_changed_labels_were_truncated(self):
-        m1 = MetaData(
-            naming_convention={
-                "ix": "index_%(table_name)s_%(column_0_label)s",
-                "uq": "unique_%(table_name)s_%(column_0_label)s",
-            }
-        )
-        m2 = MetaData(
-            naming_convention={
-                "ix": "index_%(table_name)s_%(column_0_label)s",
-                "uq": "unique_%(table_name)s_%(column_0_label)s",
-            }
-        )
-
-        Table(
-            "nothing_changed",
-            m1,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
-            Column("a_particularly_long_column_name", String(20), index=True),
-            mysql_engine="InnoDB",
-        )
-
-        Table(
-            "nothing_changed",
-            m2,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
-            Column("a_particularly_long_column_name", String(20), index=True),
-            mysql_engine="InnoDB",
-        )
-        diffs = self._fixture(m1, m2, max_identifier_length=30)
-        eq_(diffs, [])
-
-    @config.requirements.long_names
     def test_nothing_changed_uq_w_mixed_case_nconv_name(self):
         m1 = MetaData(
             naming_convention={
@@ -433,64 +384,6 @@ class AutogenerateUniqueIndexTest(Autoge
         diffs = self._fixture(m1, m2)
         eq_(diffs, [])
 
-    def test_nothing_changed_ix_w_mixed_case_plain_name(self):
-        m1 = MetaData()
-        m2 = MetaData()
-
-        Table(
-            "nothing_changed",
-            m1,
-            Column("id", Integer, primary_key=True),
-            Column("x", Integer),
-            Index("SomeIndex", "x"),
-            mysql_engine="InnoDB",
-        )
-
-        Table(
-            "nothing_changed",
-            m2,
-            Column("id", Integer, primary_key=True),
-            Column("x", Integer),
-            Index("SomeIndex", "x"),
-            mysql_engine="InnoDB",
-        )
-        diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
-
-    @config.requirements.long_names
-    def test_nothing_changed_ix_w_mixed_case_nconv_name(self):
-        m1 = MetaData(
-            naming_convention={
-                "ix": "index_%(table_name)s_%(column_0_label)s",
-                "uq": "unique_%(table_name)s_%(column_0_label)s",
-            }
-        )
-        m2 = MetaData(
-            naming_convention={
-                "ix": "index_%(table_name)s_%(column_0_label)s",
-                "uq": "unique_%(table_name)s_%(column_0_label)s",
-            }
-        )
-
-        Table(
-            "NothingChanged",
-            m1,
-            Column("id", Integer, primary_key=True),
-            Column("XCol", Integer, index=True),
-            mysql_engine="InnoDB",
-        )
-
-        Table(
-            "NothingChanged",
-            m2,
-            Column("id", Integer, primary_key=True),
-            Column("XCol", Integer, index=True),
-            mysql_engine="InnoDB",
-        )
-
-        diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
-
     def test_nothing_changed_two(self):
         m1 = MetaData()
         m2 = MetaData()
@@ -557,478 +450,630 @@ class AutogenerateUniqueIndexTest(Autoge
         diffs = self._fixture(m1, m2)
         eq_(diffs, [])
 
-    def test_nothing_changed_index_w_colkeys(self):
+    @config.requirements.unique_constraint_reflection
+    def test_uq_casing_convention_changed_so_put_drops_first(self):
         m1 = MetaData()
         m2 = MetaData()
 
+        uq1 = UniqueConstraint("x", name="SomeCasingConvention")
         Table(
-            "nothing_changed",
+            "new_idx",
             m1,
-            Column("x", String(20), key="nx"),
-            Index("foobar", "nx"),
+            Column("id1", Integer, primary_key=True),
+            Column("x", String(20)),
+            uq1,
         )
 
+        uq2 = UniqueConstraint("x", name="somecasingconvention")
         Table(
-            "nothing_changed",
+            "new_idx",
             m2,
-            Column("x", String(20), key="nx"),
-            Index("foobar", "nx"),
+            Column("id1", Integer, primary_key=True),
+            Column("x", String(20)),
+            uq2,
         )
 
         diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
 
-    def test_nothing_changed_index_named_as_column(self):
+        if self.reports_unique_constraints_as_indexes:
+            eq_(
+                [(d[0], d[1].name) for d in diffs],
+                [
+                    ("remove_index", "SomeCasingConvention"),
+                    ("add_constraint", "somecasingconvention"),
+                ],
+            )
+        else:
+            eq_(
+                [(d[0], d[1].name) for d in diffs],
+                [
+                    ("remove_constraint", "SomeCasingConvention"),
+                    ("add_constraint", "somecasingconvention"),
+                ],
+            )
+
+    def test_drop_table_w_uq_constraint(self):
         m1 = MetaData()
         m2 = MetaData()
 
         Table(
-            "nothing_changed",
+            "some_table",
             m1,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
-            Column("x", String(20)),
-            Index("x", "x"),
-        )
-
-        Table(
-            "nothing_changed",
-            m2,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
+            Column("id", Integer, primary_key=True),
             Column("x", String(20)),
-            Index("x", "x"),
+            Column("y", String(20)),
+            UniqueConstraint("y", name="uq_y"),
         )
 
         diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
 
-    def test_nothing_changed_implicit_fk_index_named(self):
+        if self.reports_unique_constraints_as_indexes:
+            # for MySQL this UQ will look like an index, so
+            # make sure it at least sets it up correctly
+            eq_(diffs[0][0], "remove_index")
+            eq_(diffs[1][0], "remove_table")
+            eq_(len(diffs), 2)
+
+            constraints = [
+                c
+                for c in diffs[1][1].constraints
+                if isinstance(c, UniqueConstraint)
+            ]
+            eq_(len(constraints), 0)
+        else:
+            eq_(diffs[0][0], "remove_table")
+            eq_(len(diffs), 1)
+            constraints = [
+                c
+                for c in diffs[0][1].constraints
+                if isinstance(c, UniqueConstraint)
+            ]
+            if self.reports_unique_constraints:
+                eq_(len(constraints), 1)
+
+    @config.requirements.unique_constraint_reflection
+    def test_unnamed_cols_changed(self):
         m1 = MetaData()
         m2 = MetaData()
-
-        Table(
-            "nothing_changed",
-            m1,
-            Column("id", Integer, primary_key=True),
-            Column(
-                "other_id",
-                ForeignKey("nc2.id", name="fk_my_table_other_table"),
-                nullable=False,
-            ),
-            Column("foo", Integer),
-            mysql_engine="InnoDB",
-        )
         Table(
-            "nc2",
+            "col_change",
             m1,
-            Column("id", Integer, primary_key=True),
-            mysql_engine="InnoDB",
-        )
-
-        Table(
-            "nothing_changed",
-            m2,
-            Column("id", Integer, primary_key=True),
-            Column(
-                "other_id",
-                ForeignKey("nc2.id", name="fk_my_table_other_table"),
-                nullable=False,
-            ),
-            Column("foo", Integer),
-            mysql_engine="InnoDB",
+            Column("x", Integer),
+            Column("y", Integer),
+            UniqueConstraint("x"),
         )
         Table(
-            "nc2",
+            "col_change",
             m2,
-            Column("id", Integer, primary_key=True),
-            mysql_engine="InnoDB",
+            Column("x", Integer),
+            Column("y", Integer),
+            UniqueConstraint("x", "y"),
         )
+
         diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
 
-    def test_nothing_changed_implicit_composite_fk_index_named(self):
+        diffs = set(
+            (
+                cmd,
+                isinstance(obj, (UniqueConstraint, Index))
+                if obj.name is not None
+                else False,
+            )
+            for cmd, obj in diffs
+        )
+
+        if self.reports_unnamed_constraints:
+            if self.reports_unique_constraints_as_indexes:
+                eq_(
+                    diffs,
+                    set([("remove_index", True), ("add_constraint", False)]),
+                )
+            else:
+                eq_(
+                    diffs,
+                    set(
+                        [
+                            ("remove_constraint", True),
+                            ("add_constraint", False),
+                        ]
+                    ),
+                )
+
+    def test_remove_named_unique_index(self):
         m1 = MetaData()
         m2 = MetaData()
 
         Table(
-            "nothing_changed",
+            "remove_idx",
             m1,
-            Column("id", Integer, primary_key=True),
-            Column("other_id_1", Integer),
-            Column("other_id_2", Integer),
-            Column("foo", Integer),
-            ForeignKeyConstraint(
-                ["other_id_1", "other_id_2"],
-                ["nc2.id1", "nc2.id2"],
-                name="fk_my_table_other_table",
+            Column("x", Integer),
+            Index("xidx", "x", unique=True),
+        )
+        Table("remove_idx", m2, Column("x", Integer))
+
+        diffs = self._fixture(m1, m2)
+
+        if self.reports_unique_constraints:
+            diffs = set((cmd, obj.name) for cmd, obj in diffs)
+            eq_(diffs, set([("remove_index", "xidx")]))
+        else:
+            eq_(diffs, [])
+
+    def test_remove_named_unique_constraint(self):
+        m1 = MetaData()
+        m2 = MetaData()
+
+        Table(
+            "remove_idx",
+            m1,
+            Column("x", Integer),
+            UniqueConstraint("x", name="xidx"),
+        )
+        Table("remove_idx", m2, Column("x", Integer))
+
+        diffs = self._fixture(m1, m2)
+
+        if self.reports_unique_constraints:
+            diffs = set((cmd, obj.name) for cmd, obj in diffs)
+            if self.reports_unique_constraints_as_indexes:
+                eq_(diffs, set([("remove_index", "xidx")]))
+            else:
+                eq_(diffs, set([("remove_constraint", "xidx")]))
+        else:
+            eq_(diffs, [])
+
+    def test_dont_add_uq_on_table_create(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table("no_uq", m2, Column("x", String(50), unique=True))
+        diffs = self._fixture(m1, m2)
+
+        eq_(diffs[0][0], "add_table")
+        eq_(len(diffs), 1)
+
+        # checking for dupes also
+        eq_(
+            sorted(
+                [type(cons) for cons in diffs[0][1].constraints],
+                key=lambda c: c.__name__,
             ),
-            mysql_engine="InnoDB",
+            [PrimaryKeyConstraint, UniqueConstraint],
         )
+
+    @config.requirements.reflects_unique_constraints_unambiguously
+    def test_dont_add_uq_on_reverse_table_drop(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table("no_uq", m1, Column("x", String(50), unique=True))
+        diffs = self._fixture(m1, m2)
+
+        eq_(diffs[0][0], "remove_table")
+        eq_(len(diffs), 1)
+
+        # because the drop comes from reflection, the "unique=True" flag
+        # is lost in any case.
+        eq_(
+            sorted(
+                [type(cons) for cons in diffs[0][1].constraints],
+                key=lambda c: c.__name__,
+            ),
+            [PrimaryKeyConstraint, UniqueConstraint],
+        )
+
+    def test_add_uq_ix_on_table_create(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table("add_ix", m2, Column("x", String(50), unique=True, index=True))
+        diffs = self._fixture(m1, m2)
+
+        eq_(diffs[0][0], "add_table")
+        eq_(len(diffs), 2)
+        assert UniqueConstraint not in set(
+            type(c) for c in diffs[0][1].constraints
+        )
+
+        eq_(diffs[1][0], "add_index")
+        d_table = diffs[0][1]
+        d_idx = diffs[1][1]
+        eq_(d_idx.unique, True)
+
+        # check for dupes
+        eq_(len(diffs), 2)
+        assert not d_table.indexes
+
+
+class AutogenerateIndexTest(AutogenFixtureTest, TestBase):
+    """tests involving indexes but not unique constraints, as mssql
+    doesn't have these (?)...at least the dialect seems to not
+    reflect unique constraints which seems odd
+
+    """
+
+    __backend__ = True
+
+    def test_nothing_changed_one(self):
+        m1 = MetaData()
+        m2 = MetaData()
+
         Table(
-            "nc2",
+            "nothing_changed",
             m1,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
-            mysql_engine="InnoDB",
+            Column("x", String(20), index=True),
         )
 
         Table(
             "nothing_changed",
             m2,
-            Column("id", Integer, primary_key=True),
-            Column("other_id_1", Integer),
-            Column("other_id_2", Integer),
-            Column("foo", Integer),
-            ForeignKeyConstraint(
-                ["other_id_1", "other_id_2"],
-                ["nc2.id1", "nc2.id2"],
-                name="fk_my_table_other_table",
-            ),
+            Column("x", String(20), index=True),
+        )
+
+        diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
+
+    @config.requirements.long_names
+    def test_nothing_ix_changed_labels_were_truncated(self):
+        m1 = MetaData(
+            naming_convention={
+                "ix": "index_%(table_name)s_%(column_0_label)s",
+                "uq": "unique_%(table_name)s_%(column_0_label)s",
+            }
+        )
+        m2 = MetaData(
+            naming_convention={
+                "ix": "index_%(table_name)s_%(column_0_label)s",
+                "uq": "unique_%(table_name)s_%(column_0_label)s",
+            }
+        )
+
+        Table(
+            "nothing_changed",
+            m1,
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            Column("a_particularly_long_column_name", String(20), index=True),
             mysql_engine="InnoDB",
         )
+
         Table(
-            "nc2",
+            "nothing_changed",
             m2,
             Column("id1", Integer, primary_key=True),
             Column("id2", Integer, primary_key=True),
+            Column("a_particularly_long_column_name", String(20), index=True),
             mysql_engine="InnoDB",
         )
-        diffs = self._fixture(m1, m2)
+        diffs = self._fixture(m1, m2, max_identifier_length=30)
         eq_(diffs, [])
 
-    def test_ix_casing_convention_changed_so_put_drops_first(self):
+    def test_nothing_changed_ix_w_mixed_case_plain_name(self):
         m1 = MetaData()
         m2 = MetaData()
 
-        ix1 = Index("SomeCasingConvention", "x")
         Table(
-            "new_idx",
+            "nothing_changed",
             m1,
-            Column("id1", Integer, primary_key=True),
-            Column("x", String(20)),
-            ix1,
+            Column("id", Integer, primary_key=True),
+            Column("x", Integer),
+            Index("SomeIndex", "x"),
+            mysql_engine="InnoDB",
         )
 
-        ix2 = Index("somecasingconvention", "x")
         Table(
-            "new_idx",
+            "nothing_changed",
             m2,
-            Column("id1", Integer, primary_key=True),
-            Column("x", String(20)),
-            ix2,
+            Column("id", Integer, primary_key=True),
+            Column("x", Integer),
+            Index("SomeIndex", "x"),
+            mysql_engine="InnoDB",
         )
-
         diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
 
-        eq_(
-            [(d[0], d[1].name) for d in diffs],
-            [
-                ("remove_index", "SomeCasingConvention"),
-                ("add_index", "somecasingconvention"),
-            ],
+    @config.requirements.long_names
+    def test_nothing_changed_ix_w_mixed_case_nconv_name(self):
+        m1 = MetaData(
+            naming_convention={
+                "ix": "index_%(table_name)s_%(column_0_label)s",
+                "uq": "unique_%(table_name)s_%(column_0_label)s",
+            }
+        )
+        m2 = MetaData(
+            naming_convention={
+                "ix": "index_%(table_name)s_%(column_0_label)s",
+                "uq": "unique_%(table_name)s_%(column_0_label)s",
+            }
         )
 
-    def test_uq_casing_convention_changed_so_put_drops_first(self):
-        m1 = MetaData()
-        m2 = MetaData()
-
-        uq1 = UniqueConstraint("x", name="SomeCasingConvention")
         Table(
-            "new_idx",
+            "NothingChanged",
             m1,
-            Column("id1", Integer, primary_key=True),
-            Column("x", String(20)),
-            uq1,
+            Column("id", Integer, primary_key=True),
+            Column("XCol", Integer, index=True),
+            mysql_engine="InnoDB",
         )
 
-        uq2 = UniqueConstraint("x", name="somecasingconvention")
         Table(
-            "new_idx",
+            "NothingChanged",
             m2,
-            Column("id1", Integer, primary_key=True),
-            Column("x", String(20)),
-            uq2,
+            Column("id", Integer, primary_key=True),
+            Column("XCol", Integer, index=True),
+            mysql_engine="InnoDB",
         )
 
         diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
 
-        if self.reports_unique_constraints_as_indexes:
-            eq_(
-                [(d[0], d[1].name) for d in diffs],
-                [
-                    ("remove_index", "SomeCasingConvention"),
-                    ("add_constraint", "somecasingconvention"),
-                ],
-            )
-        else:
-            eq_(
-                [(d[0], d[1].name) for d in diffs],
-                [
-                    ("remove_constraint", "SomeCasingConvention"),
-                    ("add_constraint", "somecasingconvention"),
-                ],
-            )
+    def test_new_table_added(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table(
+            "extra",
+            m2,
+            Column("foo", Integer, index=True),
+            Column("bar", Integer),
+            Index("newtable_idx", "bar"),
+        )
 
-    def test_new_idx_index_named_as_column(self):
+        diffs = self._fixture(m1, m2)
+
+        eq_(diffs[0][0], "add_table")
+
+        eq_(diffs[1][0], "add_index")
+        eq_(
+            sqla_compat._get_constraint_final_name(
+                diffs[1][1], config.db.dialect
+            ),
+            "ix_extra_foo",
+        )
+
+        eq_(diffs[2][0], "add_index")
+        eq_(diffs[2][1].name, "newtable_idx")
+
+    def test_nothing_changed_index_w_colkeys(self):
         m1 = MetaData()
         m2 = MetaData()
 
         Table(
-            "new_idx",
+            "nothing_changed",
             m1,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
-            Column("x", String(20)),
+            Column("x", String(20), key="nx"),
+            Index("foobar", "nx"),
         )
 
-        idx = Index("x", "x")
         Table(
-            "new_idx",
+            "nothing_changed",
             m2,
-            Column("id1", Integer, primary_key=True),
-            Column("id2", Integer, primary_key=True),
-            Column("x", String(20)),
-            idx,
+            Column("x", String(20), key="nx"),
+            Index("foobar", "nx"),
         )
 
         diffs = self._fixture(m1, m2)
-        eq_(diffs, [("add_index", schemacompare.CompareIndex(idx))])
+        eq_(diffs, [])
 
-    def test_removed_idx_index_named_as_column(self):
+    def test_nothing_changed_index_named_as_column(self):
         m1 = MetaData()
         m2 = MetaData()
 
-        idx = Index("x", "x")
         Table(
-            "new_idx",
+            "nothing_changed",
             m1,
             Column("id1", Integer, primary_key=True),
             Column("id2", Integer, primary_key=True),
             Column("x", String(20)),
-            idx,
+            Index("x", "x"),
         )
 
         Table(
-            "new_idx",
+            "nothing_changed",
             m2,
             Column("id1", Integer, primary_key=True),
             Column("id2", Integer, primary_key=True),
             Column("x", String(20)),
+            Index("x", "x"),
         )
 
         diffs = self._fixture(m1, m2)
-        eq_(diffs[0][0], "remove_index")
+        eq_(diffs, [])
 
-    def test_drop_table_w_indexes(self):
+    def test_nothing_changed_implicit_fk_index_named(self):
         m1 = MetaData()
         m2 = MetaData()
 
-        t = Table(
-            "some_table",
+        Table(
+            "nothing_changed",
             m1,
             Column("id", Integer, primary_key=True),
-            Column("x", String(20)),
-            Column("y", String(20)),
-        )
-        Index("xy_idx", t.c.x, t.c.y)
-        Index("y_idx", t.c.y)
-
-        diffs = self._fixture(m1, m2)
-        eq_(diffs[0][0], "remove_index")
-        eq_(diffs[1][0], "remove_index")
-        eq_(diffs[2][0], "remove_table")
-
-        eq_(
-            set([diffs[0][1].name, diffs[1][1].name]), set(["xy_idx", "y_idx"])
+            Column(
+                "other_id",
+                ForeignKey("nc2.id", name="fk_my_table_other_table"),
+                nullable=False,
+            ),
+            Column("foo", Integer),
+            mysql_engine="InnoDB",
         )
-
-    def test_drop_table_w_uq_constraint(self):
-        m1 = MetaData()
-        m2 = MetaData()
-
         Table(
-            "some_table",
+            "nc2",
             m1,
             Column("id", Integer, primary_key=True),
-            Column("x", String(20)),
-            Column("y", String(20)),
-            UniqueConstraint("y", name="uq_y"),
+            mysql_engine="InnoDB",
         )
 
+        Table(
+            "nothing_changed",
+            m2,
+            Column("id", Integer, primary_key=True),
+            Column(
+                "other_id",
+                ForeignKey("nc2.id", name="fk_my_table_other_table"),
+                nullable=False,
+            ),
+            Column("foo", Integer),
+            mysql_engine="InnoDB",
+        )
+        Table(
+            "nc2",
+            m2,
+            Column("id", Integer, primary_key=True),
+            mysql_engine="InnoDB",
+        )
         diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
 
-        if self.reports_unique_constraints_as_indexes:
-            # for MySQL this UQ will look like an index, so
-            # make sure it at least sets it up correctly
-            eq_(diffs[0][0], "remove_index")
-            eq_(diffs[1][0], "remove_table")
-            eq_(len(diffs), 2)
-
-            constraints = [
-                c
-                for c in diffs[1][1].constraints
-                if isinstance(c, UniqueConstraint)
-            ]
-            eq_(len(constraints), 0)
-        else:
-            eq_(diffs[0][0], "remove_table")
-            eq_(len(diffs), 1)
-            constraints = [
-                c
-                for c in diffs[0][1].constraints
-                if isinstance(c, UniqueConstraint)
-            ]
-            if self.reports_unique_constraints:
-                eq_(len(constraints), 1)
-
-    def test_unnamed_cols_changed(self):
+    def test_nothing_changed_implicit_composite_fk_index_named(self):
         m1 = MetaData()
         m2 = MetaData()
+
         Table(
-            "col_change",
+            "nothing_changed",
             m1,
-            Column("x", Integer),
-            Column("y", Integer),
-            UniqueConstraint("x"),
+            Column("id", Integer, primary_key=True),
+            Column("other_id_1", Integer),
+            Column("other_id_2", Integer),
+            Column("foo", Integer),
+            ForeignKeyConstraint(
+                ["other_id_1", "other_id_2"],
+                ["nc2.id1", "nc2.id2"],
+                name="fk_my_table_other_table",
+            ),
+            mysql_engine="InnoDB",
         )
         Table(
-            "col_change",
-            m2,
-            Column("x", Integer),
-            Column("y", Integer),
-            UniqueConstraint("x", "y"),
+            "nc2",
+            m1,
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            mysql_engine="InnoDB",
         )
 
-        diffs = self._fixture(m1, m2)
-
-        diffs = set(
-            (
-                cmd,
-                isinstance(obj, (UniqueConstraint, Index))
-                if obj.name is not None
-                else False,
-            )
-            for cmd, obj in diffs
-        )
-        if self.reports_unnamed_constraints:
-            if self.reports_unique_constraints_as_indexes:
-                eq_(
-                    diffs,
-                    set([("remove_index", True), ("add_constraint", False)]),
-                )
-            else:
-                eq_(
-                    diffs,
-                    set(
-                        [
-                            ("remove_constraint", True),
-                            ("add_constraint", False),
-                        ]
-                    ),
-                )
+        Table(
+            "nothing_changed",
+            m2,
+            Column("id", Integer, primary_key=True),
+            Column("other_id_1", Integer),
+            Column("other_id_2", Integer),
+            Column("foo", Integer),
+            ForeignKeyConstraint(
+                ["other_id_1", "other_id_2"],
+                ["nc2.id1", "nc2.id2"],
+                name="fk_my_table_other_table",
+            ),
+            mysql_engine="InnoDB",
+        )
+        Table(
+            "nc2",
+            m2,
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            mysql_engine="InnoDB",
+        )
+        diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
 
-    def test_remove_named_unique_index(self):
+    def test_ix_casing_convention_changed_so_put_drops_first(self):
         m1 = MetaData()
         m2 = MetaData()
 
+        ix1 = Index("SomeCasingConvention", "x")
         Table(
-            "remove_idx",
+            "new_idx",
             m1,
-            Column("x", Integer),
-            Index("xidx", "x", unique=True),
+            Column("id1", Integer, primary_key=True),
+            Column("x", String(20)),
+            ix1,
+        )
+
+        ix2 = Index("somecasingconvention", "x")
+        Table(
+            "new_idx",
+            m2,
+            Column("id1", Integer, primary_key=True),
+            Column("x", String(20)),
+            ix2,
         )
-        Table("remove_idx", m2, Column("x", Integer))
 
         diffs = self._fixture(m1, m2)
 
-        if self.reports_unique_constraints:
-            diffs = set((cmd, obj.name) for cmd, obj in diffs)
-            eq_(diffs, set([("remove_index", "xidx")]))
-        else:
-            eq_(diffs, [])
+        eq_(
+            [(d[0], d[1].name) for d in diffs],
+            [
+                ("remove_index", "SomeCasingConvention"),
+                ("add_index", "somecasingconvention"),
+            ],
+        )
 
-    def test_remove_named_unique_constraint(self):
+    def test_new_idx_index_named_as_column(self):
         m1 = MetaData()
         m2 = MetaData()
 
         Table(
-            "remove_idx",
+            "new_idx",
             m1,
-            Column("x", Integer),
-            UniqueConstraint("x", name="xidx"),
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            Column("x", String(20)),
         )
-        Table("remove_idx", m2, Column("x", Integer))
 
-        diffs = self._fixture(m1, m2)
+        idx = Index("x", "x")
+        Table(
+            "new_idx",
+            m2,
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            Column("x", String(20)),
+            idx,
+        )
 
-        if self.reports_unique_constraints:
-            diffs = set((cmd, obj.name) for cmd, obj in diffs)
-            if self.reports_unique_constraints_as_indexes:
-                eq_(diffs, set([("remove_index", "xidx")]))
-            else:
-                eq_(diffs, set([("remove_constraint", "xidx")]))
-        else:
-            eq_(diffs, [])
+        diffs = self._fixture(m1, m2)
+        eq_(diffs, [("add_index", schemacompare.CompareIndex(idx))])
 
-    def test_dont_add_uq_on_table_create(self):
+    @exclusions.fails_on(["mysql", "mariadb"])
+    def test_removed_idx_index_named_as_column(self):
         m1 = MetaData()
         m2 = MetaData()
-        Table("no_uq", m2, Column("x", String(50), unique=True))
-        diffs = self._fixture(m1, m2)
 
-        eq_(diffs[0][0], "add_table")
-        eq_(len(diffs), 1)
+        idx = Index("x", "x")
+        Table(
+            "new_idx",
+            m1,
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            Column("x", String(20)),
+            idx,
+        )
 
-        # checking for dupes also
-        eq_(
-            sorted(
-                [type(cons) for cons in diffs[0][1].constraints],
-                key=lambda c: c.__name__,
-            ),
-            [PrimaryKeyConstraint, UniqueConstraint],
+        Table(
+            "new_idx",
+            m2,
+            Column("id1", Integer, primary_key=True),
+            Column("id2", Integer, primary_key=True),
+            Column("x", String(20)),
         )
 
-    @config.requirements.reflects_unique_constraints_unambiguously
-    def test_dont_add_uq_on_reverse_table_drop(self):
-        m1 = MetaData()
-        m2 = MetaData()
-        Table("no_uq", m1, Column("x", String(50), unique=True))
         diffs = self._fixture(m1, m2)
+        eq_(diffs[0][0], "remove_index")
 
-        eq_(diffs[0][0], "remove_table")
-        eq_(len(diffs), 1)
-
-        # because the drop comes from reflection, the "unique=True" flag
-        # is lost in any case.
-        eq_(
-            sorted(
-                [type(cons) for cons in diffs[0][1].constraints],
-                key=lambda c: c.__name__,
-            ),
-            [PrimaryKeyConstraint, UniqueConstraint],
-        )
-
-    def test_add_uq_ix_on_table_create(self):
+    def test_drop_table_w_indexes(self):
         m1 = MetaData()
         m2 = MetaData()
-        Table("add_ix", m2, Column("x", String(50), unique=True, index=True))
-        diffs = self._fixture(m1, m2)
 
-        eq_(diffs[0][0], "add_table")
-        eq_(len(diffs), 2)
-        assert UniqueConstraint not in set(
-            type(c) for c in diffs[0][1].constraints
+        t = Table(
+            "some_table",
+            m1,
+            Column("id", Integer, primary_key=True),
+            Column("x", String(20)),
+            Column("y", String(20)),
         )
+        Index("xy_idx", t.c.x, t.c.y)
+        Index("y_idx", t.c.y)
 
-        eq_(diffs[1][0], "add_index")
-        d_table = diffs[0][1]
-        d_idx = diffs[1][1]
-        eq_(d_idx.unique, True)
+        diffs = self._fixture(m1, m2)
+        eq_(diffs[0][0], "remove_index")
+        eq_(diffs[1][0], "remove_index")
+        eq_(diffs[2][0], "remove_table")
 
-        # check for dupes
-        eq_(len(diffs), 2)
-        assert not d_table.indexes
+        eq_(
+            set([diffs[0][1].name, diffs[1][1].name]), set(["xy_idx", "y_idx"])
+        )
 
     def test_add_ix_on_table_create(self):
         m1 = MetaData()
@@ -1106,237 +1151,36 @@ class AutogenerateUniqueIndexTest(Autoge
 
         eq_(diffs, [])
 
-
-class PGUniqueIndexTest(AutogenerateUniqueIndexTest):
-    reports_unnamed_constraints = True
-    __only_on__ = "postgresql"
-    __backend__ = True
-
-    def test_idx_added_schema(self):
-        m1 = MetaData()
-        m2 = MetaData()
-        Table("add_ix", m1, Column("x", String(50)), schema="test_schema")
-        Table(
-            "add_ix",
-            m2,
-            Column("x", String(50)),
-            Index("ix_1", "x"),
-            schema="test_schema",
-        )
-
-        diffs = self._fixture(m1, m2, include_schemas=True)
-        eq_(diffs[0][0], "add_index")
-        eq_(diffs[0][1].name, "ix_1")
-
-    def test_idx_unchanged_schema(self):
-        m1 = MetaData()
-        m2 = MetaData()
-        Table(
-            "add_ix",
-            m1,
-            Column("x", String(50)),
-            Index("ix_1", "x"),
-            schema="test_schema",
-        )
-        Table(
-            "add_ix",
-            m2,
-            Column("x", String(50)),
-            Index("ix_1", "x"),
-            schema="test_schema",
-        )
-
-        diffs = self._fixture(m1, m2, include_schemas=True)
-        eq_(diffs, [])
-
-    def test_uq_added_schema(self):
-        m1 = MetaData()
-        m2 = MetaData()
-        Table("add_uq", m1, Column("x", String(50)), schema="test_schema")
-        Table(
-            "add_uq",
-            m2,
-            Column("x", String(50)),
-            UniqueConstraint("x", name="ix_1"),
-            schema="test_schema",
-        )
-
-        diffs = self._fixture(m1, m2, include_schemas=True)
-        eq_(diffs[0][0], "add_constraint")
-        eq_(diffs[0][1].name, "ix_1")
-
-    def test_uq_unchanged_schema(self):
+    @config.requirements.covering_indexes
+    @config.requirements.sqlalchemy_14
+    def test_nothing_changed_covering_index(self):
         m1 = MetaData()
         m2 = MetaData()
-        Table(
-            "add_uq",
-            m1,
-            Column("x", String(50)),
-            UniqueConstraint("x", name="ix_1"),
-            schema="test_schema",
-        )
-        Table(
-            "add_uq",
-            m2,
-            Column("x", String(50)),
-            UniqueConstraint("x", name="ix_1"),
-            schema="test_schema",
-        )
-
-        diffs = self._fixture(m1, m2, include_schemas=True)
-        eq_(diffs, [])
 
-    @config.requirements.btree_gist
-    def test_exclude_const_unchanged(self):
-        from sqlalchemy.dialects.postgresql import TSRANGE, ExcludeConstraint
-
-        m1 = MetaData()
-        m2 = MetaData()
+        cov_opt = {f"{config.db.name}_include": ["y"]}
 
         Table(
-            "add_excl",
+            "nothing_changed",
             m1,
             Column("id", Integer, primary_key=True),
-            Column("period", TSRANGE),
-            ExcludeConstraint(("period", "&&"), name="quarters_period_excl"),
+            Column("x", Integer),
+            Column("y", Integer),
+            Index("SomeIndex", "x", **cov_opt),
         )
 
         Table(
-            "add_excl",
+            "nothing_changed",
             m2,
             Column("id", Integer, primary_key=True),
-            Column("period", TSRANGE),
-            ExcludeConstraint(("period", "&&"), name="quarters_period_excl"),
+            Column("x", Integer),
+            Column("y", Integer),
+            Index("SomeIndex", "x", **cov_opt),
         )
-
         diffs = self._fixture(m1, m2)
         eq_(diffs, [])
 
-    def test_same_tname_two_schemas(self):
-        m1 = MetaData()
-        m2 = MetaData()
-
-        Table("add_ix", m1, Column("x", String(50)), Index("ix_1", "x"))
-
-        Table("add_ix", m2, Column("x", String(50)), Index("ix_1", "x"))
-        Table("add_ix", m2, Column("x", String(50)), schema="test_schema")
-
-        diffs = self._fixture(m1, m2, include_schemas=True)
-        eq_(diffs[0][0], "add_table")
-        eq_(len(diffs), 1)
-
-    def test_uq_dropped(self):
-        m1 = MetaData()
-        m2 = MetaData()
-        Table(
-            "add_uq",
-            m1,
-            Column("id", Integer, primary_key=True),
-            Column("name", String),
-            UniqueConstraint("name", name="uq_name"),
-        )
-        Table(
-            "add_uq",
-            m2,
-            Column("id", Integer, primary_key=True),
-            Column("name", String),
-        )
-        diffs = self._fixture(m1, m2, include_schemas=True)
-        eq_(diffs[0][0], "remove_constraint")
-        eq_(diffs[0][1].name, "uq_name")
-        eq_(len(diffs), 1)
-
-    def test_functional_ix_one(self):
-        m1 = MetaData()
-        m2 = MetaData()
-
-        t1 = Table(
-            "foo",
-            m1,
-            Column("id", Integer, primary_key=True),
-            Column("email", String(50)),
-        )
-        Index("email_idx", func.lower(t1.c.email), unique=True)
-
-        t2 = Table(
-            "foo",
-            m2,
-            Column("id", Integer, primary_key=True),
-            Column("email", String(50)),
-        )
-        Index("email_idx", func.lower(t2.c.email), unique=True)
-
-        with assertions.expect_warnings(
-            "Skipped unsupported reflection",
-            "autogenerate skipping functional index",
-        ):
-            diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
-
-    def test_functional_ix_two(self):
-        m1 = MetaData()
-        m2 = MetaData()
-
-        t1 = Table(
-            "foo",
-            m1,
-            Column("id", Integer, primary_key=True),
-            Column("email", String(50)),
-            Column("name", String(50)),
-        )
-        Index(
-            "email_idx",
-            func.coalesce(t1.c.email, t1.c.name).desc(),
-            unique=True,
-        )
-
-        t2 = Table(
-            "foo",
-            m2,
-            Column("id", Integer, primary_key=True),
-            Column("email", String(50)),
-            Column("name", String(50)),
-        )
-        Index(
-            "email_idx",
-            func.coalesce(t2.c.email, t2.c.name).desc(),
-            unique=True,
-        )
-
-        with assertions.expect_warnings(
-            "Skipped unsupported reflection",
-            "autogenerate skipping functional index",
-        ):
-            diffs = self._fixture(m1, m2)
-        eq_(diffs, [])
-
-
-class MySQLUniqueIndexTest(AutogenerateUniqueIndexTest):
-    reports_unnamed_constraints = True
-    reports_unique_constraints_as_indexes = True
-    __only_on__ = "mysql", "mariadb"
-    __backend__ = True
-
-    def test_removed_idx_index_named_as_column(self):
-        try:
-            super(
-                MySQLUniqueIndexTest, self
-            ).test_removed_idx_index_named_as_column()
-        except IndexError:
-            assert True
-        else:
-            assert False, "unexpected success"
-
-
-class OracleUniqueIndexTest(AutogenerateUniqueIndexTest):
-    reports_unnamed_constraints = True
-    reports_unique_constraints_as_indexes = True
-    __only_on__ = "oracle"
-    __backend__ = True
-
 
 class NoUqReflectionIndexTest(NoUqReflection, AutogenerateUniqueIndexTest):
-    reports_unique_constraints = False
     __only_on__ = "sqlite"
 
     def test_uq_casing_convention_changed_so_put_drops_first(self):
diff -pruN 1.7.6-1/tests/test_batch.py 1.8.1-2/tests/test_batch.py
--- 1.7.6-1/tests/test_batch.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_batch.py	2022-07-13 14:17:20.000000000 +0000
@@ -25,19 +25,29 @@ from sqlalchemy.schema import CreateTabl
 from sqlalchemy.sql import column
 from sqlalchemy.sql import text
 
+from alembic import command
 from alembic import testing
+from alembic import util
 from alembic.ddl import sqlite
 from alembic.operations import Operations
 from alembic.operations.batch import ApplyBatchImpl
 from alembic.runtime.migration import MigrationContext
+from alembic.script import ScriptDirectory
 from alembic.testing import assert_raises_message
 from alembic.testing import config
 from alembic.testing import eq_
 from alembic.testing import exclusions
+from alembic.testing import expect_raises_message
 from alembic.testing import is_
 from alembic.testing import mock
 from alembic.testing import TestBase
+from alembic.testing.env import _no_sql_testing_config
+from alembic.testing.env import clear_staging_env
+from alembic.testing.env import staging_env
+from alembic.testing.env import write_script
+from alembic.testing.fixtures import capture_context_buffer
 from alembic.testing.fixtures import op_fixture
+from alembic.util import CommandError
 from alembic.util import exc as alembic_exc
 from alembic.util.sqla_compat import _safe_commit_connection_transaction
 from alembic.util.sqla_compat import _select
@@ -281,16 +291,20 @@ class BatchApplyTest(TestBase):
         create_stmt = re.sub(r"[\n\t]", "", create_stmt)
 
         idx_stmt = ""
-        for idx in impl.indexes.values():
-            idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect))
-        for idx in impl.new_indexes.values():
-            impl.new_table.name = impl.table.name
+
+        # create indexes; these should be created in terms of the
+        # final table name
+        impl.new_table.name = impl.table.name
+
+        for idx in impl._gather_indexes_from_both_tables():
             idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect))
-            impl.new_table.name = ApplyBatchImpl._calc_temp_name(
-                impl.table.name
-            )
+
         idx_stmt = re.sub(r"[\n\t]", "", idx_stmt)
 
+        # revert new table name to the temp name, assertions below
+        # are looking for the temp name
+        impl.new_table.name = ApplyBatchImpl._calc_temp_name(impl.table.name)
+
         if ddl_contains:
             assert ddl_contains in create_stmt + idx_stmt
         if ddl_not_contains:
@@ -357,6 +371,20 @@ class BatchApplyTest(TestBase):
         new_table = self._assert_impl(impl)
         eq_(new_table.c.x.name, "q")
 
+    def test_rename_col_w_index(self):
+        impl = self._ix_fixture()
+        impl.alter_column("tname", "y", name="y2")
+        new_table = self._assert_impl(
+            impl, ddl_contains="CREATE INDEX ix1 ON tname (y2)"
+        )
+        eq_(new_table.c.y.name, "y2")
+
+    def test_rename_col_w_uq(self):
+        impl = self._uq_fixture()
+        impl.alter_column("tname", "y", name="y2")
+        new_table = self._assert_impl(impl, ddl_contains="UNIQUE (y2)")
+        eq_(new_table.c.y.name, "y2")
+
     def test_alter_column_comment(self):
         impl = self._simple_fixture()
         impl.alter_column("tname", "x", comment="some comment")
@@ -1713,6 +1741,8 @@ class BatchRoundTripTest(TestBase):
         ck_consts[0]["sqltext"] = re.sub(
             r"[\'\"`\(\)]", "", ck_consts[0]["sqltext"]
         )
+        for ck in ck_consts:
+            ck.pop("comment", None)
         eq_(ck_consts, [{"sqltext": "x > 0", "name": "newck"}])
 
     @testing.combinations(("always",), ("auto",), argnames="recreate")
@@ -2364,3 +2394,164 @@ class BatchRoundTripPostgresqlTest(Batch
             ],
             [Boolean],
         )
+
+
+class OfflineTest(TestBase):
+    @testing.fixture
+    def no_reflect_batch_fixture(self):
+        staging_env()
+
+        def go():
+            self.cfg = cfg = _no_sql_testing_config(dialect="sqlite")
+
+            self.a = a = util.rev_id()
+
+            script = ScriptDirectory.from_config(cfg)
+            script.generate_revision(
+                a, "revision a", refresh=True, head="base"
+            )
+            write_script(
+                script,
+                a,
+                """\
+    "Rev A"
+    revision = '%s'
+    down_revision = None
+
+    from alembic import op
+    from sqlalchemy import Column
+    from sqlalchemy import Integer
+    from sqlalchemy import String, Table, MetaData
+
+    some_table_up = Table(
+        "some_table", MetaData(),
+        Column('id', Integer),
+        Column('bar', String)
+    )
+
+    some_table_down = Table(
+        "some_table", MetaData(),
+        Column('id', Integer),
+        Column('foo', Integer)
+    )
+
+    def upgrade():
+        with op.batch_alter_table("some_table", copy_from=some_table_up) as batch_op:
+            batch_op.add_column(Column('foo', Integer))
+            batch_op.drop_column('bar')
+
+    def downgrade():
+        with op.batch_alter_table("some_table", copy_from=some_table_down) as batch_op:
+            batch_op.drop_column('foo')
+            batch_op.add_column(Column('bar', String))
+
+    """  # noqa: E501
+                % a,
+            )
+
+        yield go
+        clear_staging_env()
+
+    @testing.fixture
+    def batch_fixture(self):
+        staging_env()
+
+        def go(dialect):
+            self.cfg = cfg = _no_sql_testing_config(dialect=dialect)
+
+            self.a = a = util.rev_id()
+
+            script = ScriptDirectory.from_config(cfg)
+            script.generate_revision(
+                a, "revision a", refresh=True, head="base"
+            )
+            write_script(
+                script,
+                a,
+                """\
+    "Rev A"
+    revision = '%s'
+    down_revision = None
+
+    from alembic import op
+    from sqlalchemy import Column
+    from sqlalchemy import Integer
+    from sqlalchemy import String
+
+    def upgrade():
+        with op.batch_alter_table("some_table") as batch_op:
+            batch_op.add_column(Column('foo', Integer))
+            batch_op.drop_column('bar')
+
+    def downgrade():
+        with op.batch_alter_table("some_table") as batch_op:
+            batch_op.drop_column('foo')
+            batch_op.add_column(Column('bar', String))
+
+    """
+                % a,
+            )
+
+        yield go
+        clear_staging_env()
+
+    def test_upgrade_non_batch(self, batch_fixture):
+        batch_fixture("postgresql")
+
+        with capture_context_buffer() as buf:
+            command.upgrade(self.cfg, self.a, sql=True)
+
+        assert re.search(
+            r"ALTER TABLE some_table ADD COLUMN foo INTEGER", buf.getvalue()
+        )
+
+    def test_downgrade_non_batch(self, batch_fixture):
+        batch_fixture("postgresql")
+
+        with capture_context_buffer() as buf:
+            command.downgrade(self.cfg, f"{self.a}:base", sql=True)
+        assert re.search(
+            r"ALTER TABLE some_table DROP COLUMN foo", buf.getvalue()
+        )
+
+    def test_upgrade_batch_fails_gracefully(self, batch_fixture):
+        batch_fixture("sqlite")
+
+        with expect_raises_message(
+            CommandError,
+            "This operation cannot proceed in --sql mode; batch mode with "
+            "dialect sqlite requires a live database connection with which "
+            'to reflect the table "some_table"',
+        ):
+            command.upgrade(self.cfg, self.a, sql=True)
+
+    def test_downgrade_batch_fails_gracefully(self, batch_fixture):
+        batch_fixture("sqlite")
+
+        with expect_raises_message(
+            CommandError,
+            "This operation cannot proceed in --sql mode; batch mode with "
+            "dialect sqlite requires a live database connection with which "
+            'to reflect the table "some_table"',
+        ):
+            command.downgrade(self.cfg, f"{self.a}:base", sql=True)
+
+    def test_upgrade_batch_no_reflection(self, no_reflect_batch_fixture):
+        no_reflect_batch_fixture()
+
+        with capture_context_buffer() as buf:
+            command.upgrade(self.cfg, self.a, sql=True)
+
+        assert re.search(
+            r"CREATE TABLE _alembic_tmp_some_table", buf.getvalue()
+        )
+
+    def test_downgrade_batch_no_reflection(self, no_reflect_batch_fixture):
+        no_reflect_batch_fixture()
+
+        with capture_context_buffer() as buf:
+            command.downgrade(self.cfg, f"{self.a}:base", sql=True)
+
+        assert re.search(
+            r"CREATE TABLE _alembic_tmp_some_table", buf.getvalue()
+        )
diff -pruN 1.7.6-1/tests/test_bulk_insert.py 1.8.1-2/tests/test_bulk_insert.py
--- 1.7.6-1/tests/test_bulk_insert.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_bulk_insert.py	2022-07-13 14:17:20.000000000 +0000
@@ -322,5 +322,5 @@ class RoundTripTest(TestBase):
             self.conn.execute(
                 text("select id, v1, v2 from ins_table order by id")
             ).fetchall(),
-            [(1, u"row v1", u"row v5"), (2, u"row v2", u"row v6")],
+            [(1, "row v1", "row v5"), (2, "row v2", "row v6")],
         )
diff -pruN 1.7.6-1/tests/test_command.py 1.8.1-2/tests/test_command.py
--- 1.7.6-1/tests/test_command.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_command.py	2022-07-13 14:17:20.000000000 +0000
@@ -87,7 +87,7 @@ try:
         context.run_migrations()
 finally:
     connection.close()
-
+    engine.dispose()
 """
         )
 
@@ -204,7 +204,7 @@ finally:
 class CurrentTest(_BufMixin, TestBase):
     @classmethod
     def setup_class(cls):
-        cls.bind = _sqlite_file_db()
+        cls.bind = _sqlite_file_db(scope="class")
         cls.env = env = staging_env()
         cls.cfg = _sqlite_testing_config()
         cls.a1 = env.generate_revision("a1", "a1")
@@ -309,6 +309,7 @@ try:
         context.run_migrations()
 finally:
     connection.close()
+    engine.dispose()
 
 """
             % (version_table_pk,)
@@ -1192,7 +1193,7 @@ class CommandLineTest(TestBase):
 class EnureVersionTest(TestBase):
     @classmethod
     def setup_class(cls):
-        cls.bind = _sqlite_file_db()
+        cls.bind = _sqlite_file_db(scope="class")
         cls.env = staging_env()
         cls.cfg = _sqlite_testing_config()
 
diff -pruN 1.7.6-1/tests/test_mssql.py 1.8.1-2/tests/test_mssql.py
--- 1.7.6-1/tests/test_mssql.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_mssql.py	2022-07-13 14:17:20.000000000 +0000
@@ -1,4 +1,8 @@
 """Test op functions against MSSQL."""
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
 
 from sqlalchemy import Column
 from sqlalchemy import exc
@@ -12,6 +16,7 @@ from alembic.testing import assert_raise
 from alembic.testing import combinations
 from alembic.testing import config
 from alembic.testing import eq_
+from alembic.testing import expect_warnings
 from alembic.testing.env import _no_sql_testing_config
 from alembic.testing.env import clear_staging_env
 from alembic.testing.env import staging_env
@@ -72,34 +77,68 @@ class OpTest(TestBase):
 
     def test_alter_column_rename_mssql(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", new_column_name="x")
-        context.assert_("EXEC sp_rename 't.c', x, 'COLUMN'")
+        op.alter_column(
+            "t",
+            "c",
+            new_column_name="x",
+        )
+        context.assert_(
+            "EXEC sp_rename 't.c', x, 'COLUMN'",
+        )
 
     def test_alter_column_rename_quoted_mssql(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", new_column_name="SomeFancyName")
-        context.assert_("EXEC sp_rename 't.c', [SomeFancyName], 'COLUMN'")
+        op.alter_column(
+            "t",
+            "c",
+            new_column_name="SomeFancyName",
+        )
+        context.assert_(
+            "EXEC sp_rename 't.c', [SomeFancyName], 'COLUMN'",
+        )
 
     @combinations((True,), (False,), argnames="pass_existing_type")
+    @combinations((True,), (False,), argnames="existing_nullability")
     @combinations((True,), (False,), argnames="change_nullability")
     def test_alter_column_type_and_nullability(
-        self, pass_existing_type, change_nullability
+        self, pass_existing_type, existing_nullability, change_nullability
     ):
         context = op_fixture("mssql")
 
-        args = dict(type_=Integer)
-        if pass_existing_type:
-            args["existing_type"] = String(15)
+        args: Dict[str, Any] = dict(type_=Integer)
 
         if change_nullability:
-            args["nullable"] = False
+            expected_nullability = not existing_nullability
+            args["nullable"] = expected_nullability
+        else:
+            args[
+                "existing_nullable"
+            ] = expected_nullability = existing_nullability
 
         op.alter_column("t", "c", **args)
 
-        if change_nullability:
-            context.assert_("ALTER TABLE t ALTER COLUMN c INTEGER NOT NULL")
-        else:
-            context.assert_("ALTER TABLE t ALTER COLUMN c INTEGER")
+        context.assert_(
+            f"ALTER TABLE t ALTER COLUMN c INTEGER "
+            f"{'NOT NULL' if not expected_nullability else 'NULL'}"
+        )
+
+    def test_alter_column_type_dont_change_nullability(self):
+        context = op_fixture("mssql")
+
+        op.alter_column("t", "c", type_=String(99), existing_nullable=False)
+        context.assert_contains("ALTER COLUMN c VARCHAR(99) NOT NULL")
+
+    def test_alter_column_type_should_have_existing_nullable(self):
+        context = op_fixture("mssql")  # noqa
+        with expect_warnings(
+            "MS-SQL ALTER COLUMN operations that specify type_= should"
+        ):
+            op.alter_column(
+                "t",
+                "c",
+                type_=String(99),
+            )
+        context.assert_contains("ALTER COLUMN c VARCHAR(99)")
 
     def test_alter_column_dont_touch_constraints(self):
         context = op_fixture("mssql")
@@ -136,7 +175,11 @@ class OpTest(TestBase):
 
     def test_alter_column_drop_default(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", server_default=None)
+        op.alter_column(
+            "t",
+            "c",
+            server_default=None,
+        )
         context.assert_contains(
             "declare @const_name varchar(256)select @const_name = [name] "
             "from sys.default_constraintswhere parent_object_id = "
@@ -149,7 +192,12 @@ class OpTest(TestBase):
 
     def test_alter_column_drop_default_w_schema(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", server_default=None, schema="xyz")
+        op.alter_column(
+            "t",
+            "c",
+            server_default=None,
+            schema="xyz",
+        )
         context.assert_contains(
             "declare @const_name varchar(256)select @const_name = [name] "
             "from sys.default_constraintswhere parent_object_id = "
@@ -162,7 +210,11 @@ class OpTest(TestBase):
 
     def test_alter_column_dont_drop_default(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", server_default=False)
+        op.alter_column(
+            "t",
+            "c",
+            server_default=False,
+        )
         context.assert_()
 
     def test_drop_column_w_schema(self):
@@ -275,13 +327,22 @@ class OpTest(TestBase):
 
     def test_alter_add_server_default(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", server_default="5")
-        context.assert_("ALTER TABLE t ADD DEFAULT '5' FOR c")
+        op.alter_column(
+            "t",
+            "c",
+            server_default="5",
+        )
+        context.assert_(
+            "ALTER TABLE t ADD DEFAULT '5' FOR c",
+        )
 
     def test_alter_replace_server_default(self):
         context = op_fixture("mssql")
         op.alter_column(
-            "t", "c", server_default="5", existing_server_default="6"
+            "t",
+            "c",
+            server_default="5",
+            existing_server_default="6",
         )
         context.assert_contains(
             "exec('alter table t drop constraint ' + @const_name)"
@@ -290,7 +351,11 @@ class OpTest(TestBase):
 
     def test_alter_remove_server_default(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", server_default=None)
+        op.alter_column(
+            "t",
+            "c",
+            server_default=None,
+        )
         context.assert_contains(
             "exec('alter table t drop constraint ' + @const_name)"
         )
@@ -348,8 +413,15 @@ class OpTest(TestBase):
 
     def test_alter_column_rename_mssql_schema(self):
         context = op_fixture("mssql")
-        op.alter_column("t", "c", new_column_name="x", schema="y")
-        context.assert_("EXEC sp_rename 'y.t.c', x, 'COLUMN'")
+        op.alter_column(
+            "t",
+            "c",
+            new_column_name="x",
+            schema="y",
+        )
+        context.assert_(
+            "EXEC sp_rename 'y.t.c', x, 'COLUMN'",
+        )
 
     def test_create_index_mssql_include(self):
         context = op_fixture("mssql")
diff -pruN 1.7.6-1/tests/test_mysql.py 1.8.1-2/tests/test_mysql.py
--- 1.7.6-1/tests/test_mysql.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_mysql.py	2022-07-13 14:17:20.000000000 +0000
@@ -472,7 +472,7 @@ class MySQLOpTest(TestBase):
     )
     @config.requirements.computed_columns_api
     def test_alter_column_computed_not_supported(self, sd, esd):
-        op_fixture("mssql")
+        op_fixture("mysql")
         assert_raises_message(
             exc.CompileError,
             'Adding or removing a "computed" construct, e.g. '
diff -pruN 1.7.6-1/tests/test_op.py 1.8.1-2/tests/test_op.py
--- 1.7.6-1/tests/test_op.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_op.py	2022-07-13 14:17:20.000000000 +0000
@@ -3,6 +3,7 @@
 from sqlalchemy import Boolean
 from sqlalchemy import CheckConstraint
 from sqlalchemy import Column
+from sqlalchemy import event
 from sqlalchemy import exc
 from sqlalchemy import ForeignKey
 from sqlalchemy import Index
@@ -23,6 +24,7 @@ from alembic.testing import assert_raise
 from alembic.testing import combinations
 from alembic.testing import config
 from alembic.testing import eq_
+from alembic.testing import expect_warnings
 from alembic.testing import is_not_
 from alembic.testing import mock
 from alembic.testing.fixtures import op_fixture
@@ -409,31 +411,59 @@ class OpTest(TestBase):
             existing_server_default=esd(),
         )
 
-    def test_alter_column_schema_type_unnamed(self):
+    @combinations((True,), (False,), (None,), argnames="existing_nullable")
+    def test_alter_column_schema_type_unnamed(self, existing_nullable):
         context = op_fixture("mssql", native_boolean=False)
-        op.alter_column("t", "c", type_=Boolean(create_constraint=True))
-        context.assert_(
-            "ALTER TABLE t ALTER COLUMN c BIT",
-            "ALTER TABLE t ADD CHECK (c IN (0, 1))",
-        )
+        if existing_nullable is None:
+            with expect_warnings(
+                "MS-SQL ALTER COLUMN operations that specify type_= should"
+            ):
+                op.alter_column(
+                    "t",
+                    "c",
+                    type_=Boolean(create_constraint=True),
+                )
+            context.assert_(
+                "ALTER TABLE t ALTER COLUMN c BIT",
+                "ALTER TABLE t ADD CHECK (c IN (0, 1))",
+            )
+        else:
+            op.alter_column(
+                "t",
+                "c",
+                type_=Boolean(create_constraint=True),
+                existing_nullable=existing_nullable,
+            )
+            context.assert_(
+                f"ALTER TABLE t ALTER COLUMN c BIT "
+                f"{'NULL' if existing_nullable else 'NOT NULL'}",
+                "ALTER TABLE t ADD CHECK (c IN (0, 1))",
+            )
 
     def test_alter_column_schema_schema_type_unnamed(self):
         context = op_fixture("mssql", native_boolean=False)
         op.alter_column(
-            "t", "c", type_=Boolean(create_constraint=True), schema="foo"
+            "t",
+            "c",
+            type_=Boolean(create_constraint=True),
+            existing_nullable=False,
+            schema="foo",
         )
         context.assert_(
-            "ALTER TABLE foo.t ALTER COLUMN c BIT",
+            "ALTER TABLE foo.t ALTER COLUMN c BIT NOT NULL",
             "ALTER TABLE foo.t ADD CHECK (c IN (0, 1))",
         )
 
     def test_alter_column_schema_type_named(self):
         context = op_fixture("mssql", native_boolean=False)
         op.alter_column(
-            "t", "c", type_=Boolean(name="xyz", create_constraint=True)
+            "t",
+            "c",
+            type_=Boolean(name="xyz", create_constraint=True),
+            existing_nullable=False,
         )
         context.assert_(
-            "ALTER TABLE t ALTER COLUMN c BIT",
+            "ALTER TABLE t ALTER COLUMN c BIT NOT NULL",
             "ALTER TABLE t ADD CONSTRAINT xyz CHECK (c IN (0, 1))",
         )
 
@@ -443,10 +473,11 @@ class OpTest(TestBase):
             "t",
             "c",
             type_=Boolean(name="xyz", create_constraint=True),
+            existing_nullable=False,
             schema="foo",
         )
         context.assert_(
-            "ALTER TABLE foo.t ALTER COLUMN c BIT",
+            "ALTER TABLE foo.t ALTER COLUMN c BIT NOT NULL",
             "ALTER TABLE foo.t ADD CONSTRAINT xyz CHECK (c IN (0, 1))",
         )
 
@@ -482,10 +513,11 @@ class OpTest(TestBase):
             "c",
             type_=String(10),
             existing_type=Boolean(name="xyz", create_constraint=True),
+            existing_nullable=False,
         )
         context.assert_(
             "ALTER TABLE t DROP CONSTRAINT xyz",
-            "ALTER TABLE t ALTER COLUMN c VARCHAR(10)",
+            "ALTER TABLE t ALTER COLUMN c VARCHAR(10) NOT NULL",
         )
 
     def test_alter_column_schema_schema_type_existing_type(self):
@@ -495,11 +527,12 @@ class OpTest(TestBase):
             "c",
             type_=String(10),
             existing_type=Boolean(name="xyz", create_constraint=True),
+            existing_nullable=False,
             schema="foo",
         )
         context.assert_(
             "ALTER TABLE foo.t DROP CONSTRAINT xyz",
-            "ALTER TABLE foo.t ALTER COLUMN c VARCHAR(10)",
+            "ALTER TABLE foo.t ALTER COLUMN c VARCHAR(10) NOT NULL",
         )
 
     def test_alter_column_schema_type_existing_type_no_const(self):
@@ -850,6 +883,27 @@ class OpTest(TestBase):
         ck = [c for c in t1.constraints if isinstance(c, CheckConstraint)]
         eq_(ck[0].name, "ck_1")
 
+    def test_create_table_with_check_constraint_with_expr(self):
+        context = op_fixture()
+        foo_id = Column("foo_id", Integer)
+        t1 = op.create_table(
+            "some_table",
+            Column("id", Integer, primary_key=True),
+            foo_id,
+            CheckConstraint(foo_id > 5, name="ck_1"),
+        )
+        context.assert_(
+            "CREATE TABLE some_table ("
+            "id INTEGER NOT NULL, "
+            "foo_id INTEGER, "
+            "PRIMARY KEY (id), "
+            "CONSTRAINT ck_1 CHECK (foo_id > 5))"
+        )
+
+        ck = [c for c in t1.constraints if isinstance(c, CheckConstraint)]
+        eq_(ck[0].name, "ck_1")
+        eq_(len(ck), 1)
+
     def test_create_table_unique_constraint(self):
         context = op_fixture()
         t1 = op.create_table(
@@ -1034,6 +1088,58 @@ class OpTest(TestBase):
 
         context.assert_("COMMENT ON TABLE some_table IS NULL")
 
+    def test_create_table_event(self):
+        context = op_fixture()
+
+        events_triggered = []
+
+        TestTable = Table(
+            "tb_test", MetaData(), Column("c1", Integer, nullable=False)
+        )
+
+        @event.listens_for(Table, "before_create")
+        def record_before_event(table, conn, **kwargs):
+            events_triggered.append(("before_create", table.name))
+
+        @event.listens_for(Table, "after_create")
+        def record_after_event(table, conn, **kwargs):
+            events_triggered.append(("after_create", table.name))
+
+        op.create_table(TestTable)
+        op.drop_table(TestTable)
+        context.assert_("CREATE TABLE tb_test ()", "DROP TABLE tb_test")
+
+        assert events_triggered == [
+            ("before_create", "tb_test"),
+            ("after_create", "tb_test"),
+        ]
+
+    def test_drop_table_event(self):
+        context = op_fixture()
+
+        events_triggered = []
+
+        TestTable = Table(
+            "tb_test", MetaData(), Column("c1", Integer, nullable=False)
+        )
+
+        @event.listens_for(Table, "before_drop")
+        def record_before_event(table, conn, **kwargs):
+            events_triggered.append(("before_drop", table.name))
+
+        @event.listens_for(Table, "after_drop")
+        def record_after_event(table, conn, **kwargs):
+            events_triggered.append(("after_drop", table.name))
+
+        op.create_table(TestTable)
+        op.drop_table(TestTable)
+        context.assert_("CREATE TABLE tb_test ()", "DROP TABLE tb_test")
+
+        assert events_triggered == [
+            ("before_drop", "tb_test"),
+            ("after_drop", "tb_test"),
+        ]
+
 
 class SQLModeOpTest(TestBase):
     def test_auto_literals(self):
diff -pruN 1.7.6-1/tests/test_postgresql.py 1.8.1-2/tests/test_postgresql.py
--- 1.7.6-1/tests/test_postgresql.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_postgresql.py	2022-07-13 14:17:20.000000000 +0000
@@ -16,6 +16,7 @@ from sqlalchemy import String
 from sqlalchemy import Table
 from sqlalchemy import text
 from sqlalchemy import types
+from sqlalchemy import UniqueConstraint
 from sqlalchemy.dialects.postgresql import ARRAY
 from sqlalchemy.dialects.postgresql import BYTEA
 from sqlalchemy.dialects.postgresql import HSTORE
@@ -38,6 +39,7 @@ from alembic.migration import MigrationC
 from alembic.operations import ops
 from alembic.script import ScriptDirectory
 from alembic.testing import assert_raises_message
+from alembic.testing import assertions
 from alembic.testing import combinations
 from alembic.testing import config
 from alembic.testing import eq_
@@ -52,6 +54,7 @@ from alembic.testing.fixtures import Fut
 from alembic.testing.fixtures import op_fixture
 from alembic.testing.fixtures import TablesTest
 from alembic.testing.fixtures import TestBase
+from alembic.testing.suite._autogen_fixtures import AutogenFixtureTest
 from alembic.util import sqla_compat
 
 
@@ -765,7 +768,7 @@ class PostgresqlDefaultCompareTest(TestB
         )
 
     def test_compare_unicode_literal(self):
-        self._compare_default_roundtrip(String(), u"im a default")
+        self._compare_default_roundtrip(String(), "im a default")
 
     # TOOD: will need to actually eval() the repr() and
     # spend more effort figuring out exactly the kind of expression
@@ -1165,3 +1168,206 @@ class PostgresqlAutogenRenderTest(TestBa
             autogenerate.render._repr_type(JSONB(), self.autogen_context),
             "postgresql.JSONB(astext_type=sa.Text())",
         )
+
+
+class PGUniqueIndexAutogenerateTest(AutogenFixtureTest, TestBase):
+    __only_on__ = "postgresql"
+    __backend__ = True
+
+    def test_idx_added_schema(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table("add_ix", m1, Column("x", String(50)), schema="test_schema")
+        Table(
+            "add_ix",
+            m2,
+            Column("x", String(50)),
+            Index("ix_1", "x"),
+            schema="test_schema",
+        )
+
+        diffs = self._fixture(m1, m2, include_schemas=True)
+        eq_(diffs[0][0], "add_index")
+        eq_(diffs[0][1].name, "ix_1")
+
+    def test_idx_unchanged_schema(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table(
+            "add_ix",
+            m1,
+            Column("x", String(50)),
+            Index("ix_1", "x"),
+            schema="test_schema",
+        )
+        Table(
+            "add_ix",
+            m2,
+            Column("x", String(50)),
+            Index("ix_1", "x"),
+            schema="test_schema",
+        )
+
+        diffs = self._fixture(m1, m2, include_schemas=True)
+        eq_(diffs, [])
+
+    def test_uq_added_schema(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table("add_uq", m1, Column("x", String(50)), schema="test_schema")
+        Table(
+            "add_uq",
+            m2,
+            Column("x", String(50)),
+            UniqueConstraint("x", name="ix_1"),
+            schema="test_schema",
+        )
+
+        diffs = self._fixture(m1, m2, include_schemas=True)
+        eq_(diffs[0][0], "add_constraint")
+        eq_(diffs[0][1].name, "ix_1")
+
+    def test_uq_unchanged_schema(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table(
+            "add_uq",
+            m1,
+            Column("x", String(50)),
+            UniqueConstraint("x", name="ix_1"),
+            schema="test_schema",
+        )
+        Table(
+            "add_uq",
+            m2,
+            Column("x", String(50)),
+            UniqueConstraint("x", name="ix_1"),
+            schema="test_schema",
+        )
+
+        diffs = self._fixture(m1, m2, include_schemas=True)
+        eq_(diffs, [])
+
+    @config.requirements.btree_gist
+    def test_exclude_const_unchanged(self):
+        from sqlalchemy.dialects.postgresql import TSRANGE, ExcludeConstraint
+
+        m1 = MetaData()
+        m2 = MetaData()
+
+        Table(
+            "add_excl",
+            m1,
+            Column("id", Integer, primary_key=True),
+            Column("period", TSRANGE),
+            ExcludeConstraint(("period", "&&"), name="quarters_period_excl"),
+        )
+
+        Table(
+            "add_excl",
+            m2,
+            Column("id", Integer, primary_key=True),
+            Column("period", TSRANGE),
+            ExcludeConstraint(("period", "&&"), name="quarters_period_excl"),
+        )
+
+        diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
+
+    def test_same_tname_two_schemas(self):
+        m1 = MetaData()
+        m2 = MetaData()
+
+        Table("add_ix", m1, Column("x", String(50)), Index("ix_1", "x"))
+
+        Table("add_ix", m2, Column("x", String(50)), Index("ix_1", "x"))
+        Table("add_ix", m2, Column("x", String(50)), schema="test_schema")
+
+        diffs = self._fixture(m1, m2, include_schemas=True)
+        eq_(diffs[0][0], "add_table")
+        eq_(len(diffs), 1)
+
+    def test_uq_dropped(self):
+        m1 = MetaData()
+        m2 = MetaData()
+        Table(
+            "add_uq",
+            m1,
+            Column("id", Integer, primary_key=True),
+            Column("name", String),
+            UniqueConstraint("name", name="uq_name"),
+        )
+        Table(
+            "add_uq",
+            m2,
+            Column("id", Integer, primary_key=True),
+            Column("name", String),
+        )
+        diffs = self._fixture(m1, m2, include_schemas=True)
+        eq_(diffs[0][0], "remove_constraint")
+        eq_(diffs[0][1].name, "uq_name")
+        eq_(len(diffs), 1)
+
+    def test_functional_ix_one(self):
+        m1 = MetaData()
+        m2 = MetaData()
+
+        t1 = Table(
+            "foo",
+            m1,
+            Column("id", Integer, primary_key=True),
+            Column("email", String(50)),
+        )
+        Index("email_idx", func.lower(t1.c.email), unique=True)
+
+        t2 = Table(
+            "foo",
+            m2,
+            Column("id", Integer, primary_key=True),
+            Column("email", String(50)),
+        )
+        Index("email_idx", func.lower(t2.c.email), unique=True)
+
+        with assertions.expect_warnings(
+            "Skipped unsupported reflection",
+            "autogenerate skipping functional index",
+        ):
+            diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
+
+    def test_functional_ix_two(self):
+        m1 = MetaData()
+        m2 = MetaData()
+
+        t1 = Table(
+            "foo",
+            m1,
+            Column("id", Integer, primary_key=True),
+            Column("email", String(50)),
+            Column("name", String(50)),
+        )
+        Index(
+            "email_idx",
+            func.coalesce(t1.c.email, t1.c.name).desc(),
+            unique=True,
+        )
+
+        t2 = Table(
+            "foo",
+            m2,
+            Column("id", Integer, primary_key=True),
+            Column("email", String(50)),
+            Column("name", String(50)),
+        )
+        Index(
+            "email_idx",
+            func.coalesce(t2.c.email, t2.c.name).desc(),
+            unique=True,
+        )
+
+        with assertions.expect_warnings(
+            "Skipped unsupported reflection",
+            "autogenerate skipping functional index",
+        ):
+            diffs = self._fixture(m1, m2)
+        eq_(diffs, [])
diff -pruN 1.7.6-1/tests/test_revision.py 1.8.1-2/tests/test_revision.py
--- 1.7.6-1/tests/test_revision.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_revision.py	2022-07-13 14:17:20.000000000 +0000
@@ -239,10 +239,7 @@ class DownIterateTest(TestBase):
             edges, list(reversed(result))
         )
 
-        eq_(
-            result,
-            assertion,
-        )
+        eq_(result, assertion)
 
 
 class DiamondTest(DownIterateTest):
@@ -573,6 +570,45 @@ class MultipleBranchTest(DownIterateTest
         )
 
 
+class MultipleBranchEffectiveHead(DownIterateTest):
+    def setUp(self):
+        self.map = RevisionMap(
+            lambda: [
+                Revision("y1", None, branch_labels="y"),
+                Revision("x1", None, branch_labels="x"),
+                Revision("y2", "y1", dependencies="x1"),
+                Revision("x2", "x1"),
+            ]
+        )
+
+    def test_other_downgrade(self):
+        self._assert_iteration(
+            ("x2", "y2"),
+            "x@-1",
+            ["x2"],
+            inclusive=False,
+            select_for_downgrade=True,
+        )
+
+    def test_use_all_current(self):
+        self._assert_iteration(
+            ("x1", "y2"),
+            "x@-1",
+            ["y2", "x1"],
+            inclusive=False,
+            select_for_downgrade=True,
+        )
+
+    def test_effective_head(self):
+        self._assert_iteration(
+            "y2",
+            "x@-1",
+            ["y2", "x1"],
+            inclusive=False,
+            select_for_downgrade=True,
+        )
+
+
 class BranchTravellingTest(DownIterateTest):
     """test the order of revs when going along multiple branches.
 
diff -pruN 1.7.6-1/tests/test_script_consumption.py 1.8.1-2/tests/test_script_consumption.py
--- 1.7.6-1/tests/test_script_consumption.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_script_consumption.py	2022-07-13 14:17:20.000000000 +0000
@@ -6,6 +6,7 @@ import re
 import textwrap
 
 import sqlalchemy as sa
+from sqlalchemy import pool
 
 from alembic import command
 from alembic import testing
@@ -116,18 +117,9 @@ class PatchEnvironment:
 
 
 @testing.combinations(
-    (
-        False,
-        True,
-    ),
-    (
-        True,
-        False,
-    ),
-    (
-        True,
-        True,
-    ),
+    (False, True),
+    (True, False),
+    (True, True),
     argnames="transactional_ddl,transaction_per_migration",
     id_="rr",
 )
@@ -141,7 +133,9 @@ class ApplyVersionsFunctionalTest(PatchE
     branched_connection = False
 
     def setUp(self):
-        self.bind = _sqlite_file_db(future=self.future)
+        self.bind = _sqlite_file_db(
+            future=self.future, poolclass=pool.NullPool
+        )
         self.env = staging_env(sourceless=self.sourceless)
         self.cfg = _sqlite_testing_config(
             sourceless=self.sourceless, future=self.future
@@ -242,44 +236,51 @@ class ApplyVersionsFunctionalTest(PatchE
     def _test_002_upgrade(self):
         command.upgrade(self.cfg, self.c)
         db = self.bind
-        assert db.dialect.has_table(db.connect(), "foo")
-        assert db.dialect.has_table(db.connect(), "bar")
-        assert db.dialect.has_table(db.connect(), "bat")
+
+        with db.connect() as conn:
+            assert db.dialect.has_table(conn, "foo")
+            assert db.dialect.has_table(conn, "bar")
+            assert db.dialect.has_table(conn, "bat")
 
     def _test_003_downgrade(self):
         command.downgrade(self.cfg, self.a)
         db = self.bind
-        assert db.dialect.has_table(db.connect(), "foo")
-        assert not db.dialect.has_table(db.connect(), "bar")
-        assert not db.dialect.has_table(db.connect(), "bat")
+        with db.connect() as conn:
+            assert db.dialect.has_table(conn, "foo")
+            assert not db.dialect.has_table(conn, "bar")
+            assert not db.dialect.has_table(conn, "bat")
 
     def _test_004_downgrade(self):
         command.downgrade(self.cfg, "base")
         db = self.bind
-        assert not db.dialect.has_table(db.connect(), "foo")
-        assert not db.dialect.has_table(db.connect(), "bar")
-        assert not db.dialect.has_table(db.connect(), "bat")
+        with db.connect() as conn:
+            assert not db.dialect.has_table(conn, "foo")
+            assert not db.dialect.has_table(conn, "bar")
+            assert not db.dialect.has_table(conn, "bat")
 
     def _test_005_upgrade(self):
         command.upgrade(self.cfg, self.b)
         db = self.bind
-        assert db.dialect.has_table(db.connect(), "foo")
-        assert db.dialect.has_table(db.connect(), "bar")
-        assert not db.dialect.has_table(db.connect(), "bat")
+        with db.connect() as conn:
+            assert db.dialect.has_table(conn, "foo")
+            assert db.dialect.has_table(conn, "bar")
+            assert not db.dialect.has_table(conn, "bat")
 
     def _test_006_upgrade_again(self):
         command.upgrade(self.cfg, self.b)
         db = self.bind
-        assert db.dialect.has_table(db.connect(), "foo")
-        assert db.dialect.has_table(db.connect(), "bar")
-        assert not db.dialect.has_table(db.connect(), "bat")
+        with db.connect() as conn:
+            assert db.dialect.has_table(conn, "foo")
+            assert db.dialect.has_table(conn, "bar")
+            assert not db.dialect.has_table(conn, "bat")
 
     def _test_007_stamp_upgrade(self):
         command.stamp(self.cfg, self.c)
         db = self.bind
-        assert db.dialect.has_table(db.connect(), "foo")
-        assert db.dialect.has_table(db.connect(), "bar")
-        assert not db.dialect.has_table(db.connect(), "bat")
+        with db.connect() as conn:
+            assert db.dialect.has_table(conn, "foo")
+            assert db.dialect.has_table(conn, "bar")
+            assert not db.dialect.has_table(conn, "bat")
 
 
 class LegacyApplyVersionsFunctionalTest(ApplyVersionsFunctionalTest):
@@ -795,7 +796,7 @@ class IgnoreFilesTest(TestBase):
     sourceless = False
 
     def setUp(self):
-        self.bind = _sqlite_file_db()
+        self.bind = _sqlite_file_db(poolclass=pool.NullPool)
         self.env = staging_env(sourceless=self.sourceless)
         self.cfg = _sqlite_testing_config(sourceless=self.sourceless)
 
diff -pruN 1.7.6-1/tests/test_script_production.py 1.8.1-2/tests/test_script_production.py
--- 1.7.6-1/tests/test_script_production.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_script_production.py	2022-07-13 14:17:20.000000000 +0000
@@ -8,6 +8,7 @@ from sqlalchemy import inspect
 
 from alembic import autogenerate
 from alembic import command
+from alembic import testing
 from alembic import util
 from alembic.environment import EnvironmentContext
 from alembic.operations import ops
@@ -185,6 +186,33 @@ class ScriptNamingTest(TestBase):
             ),
         )
 
+    @testing.combinations(
+        (
+            datetime.datetime(2012, 7, 25, 15, 8, 5, tzinfo=tz.gettz("UTC")),
+            "%s/versions/1343228885_12345_this_is_a_"
+            "message_2012_7_25_15_8_5.py",
+        ),
+        (
+            datetime.datetime(2012, 7, 25, 15, 8, 6, tzinfo=tz.gettz("UTC")),
+            "%s/versions/1343228886_12345_this_is_a_"
+            "message_2012_7_25_15_8_6.py",
+        ),
+    )
+    def test_epoch(self, create_date, expected):
+        script = ScriptDirectory(
+            _get_staging_directory(),
+            file_template="%(epoch)s_%(rev)s_%(slug)s_"
+            "%(year)s_%(month)s_"
+            "%(day)s_%(hour)s_"
+            "%(minute)s_%(second)s",
+        )
+        eq_(
+            script._rev_path(
+                script.versions, "12345", "this is a message", create_date
+            ),
+            os.path.abspath(expected % _get_staging_directory()),
+        )
+
     def _test_tz(self, timezone_arg, given, expected):
         script = ScriptDirectory(
             _get_staging_directory(),
@@ -618,7 +646,7 @@ def downgrade():
         assert (
             (
                 """
-def upgrade():
+def upgrade() -> None:
     # ### commands auto generated by Alembic - please adjust! ###
     op.create_table('test_table',
     sa.Column('id', sa.Integer(), nullable=False),
diff -pruN 1.7.6-1/tests/test_sqlite.py 1.8.1-2/tests/test_sqlite.py
--- 1.7.6-1/tests/test_sqlite.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_sqlite.py	2022-07-13 14:17:20.000000000 +0000
@@ -81,6 +81,11 @@ class SQLiteTest(TestBase):
         op.add_column("t1", Column("c1", Integer, comment="c1 comment"))
         context.assert_("ALTER TABLE t1 ADD COLUMN c1 INTEGER")
 
+    def test_rename_table_w_schema(self):
+        context = op_fixture("sqlite")
+        op.rename_table("old_name", "new_name", schema="my_schema")
+        context.assert_("ALTER TABLE my_schema.old_name RENAME TO new_name")
+
 
 class SQLiteDefaultCompareTest(TestBase):
     __only_on__ = "sqlite"
diff -pruN 1.7.6-1/tests/test_version_traversal.py 1.8.1-2/tests/test_version_traversal.py
--- 1.7.6-1/tests/test_version_traversal.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tests/test_version_traversal.py	2022-07-13 14:17:20.000000000 +0000
@@ -40,11 +40,12 @@ class RevisionPathTest(MigrationTest):
     @classmethod
     def setup_class(cls):
         cls.env = env = staging_env()
-        cls.a = env.generate_revision(util.rev_id(), "->a")
-        cls.b = env.generate_revision(util.rev_id(), "a->b")
-        cls.c = env.generate_revision(util.rev_id(), "b->c")
-        cls.d = env.generate_revision(util.rev_id(), "c->d")
-        cls.e = env.generate_revision(util.rev_id(), "d->e")
+
+        cls.a = env.generate_revision("e6239818bb3a", "->a")
+        cls.b = env.generate_revision("548bbb905360", "a->b")
+        cls.c = env.generate_revision("b7ea43dc85e4", "b->c")
+        cls.d = env.generate_revision("1bbe33445780", "c->d")
+        cls.e = env.generate_revision("3975fb1a0125", "d->e")
 
     @classmethod
     def teardown_class(cls):
diff -pruN 1.7.6-1/tools/write_pyi.py 1.8.1-2/tools/write_pyi.py
--- 1.7.6-1/tools/write_pyi.py	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tools/write_pyi.py	2022-07-13 14:17:20.000000000 +0000
@@ -4,6 +4,7 @@ import re
 import sys
 from tempfile import NamedTemporaryFile
 import textwrap
+import typing
 
 from mako.pygen import PythonPrinter
 
@@ -15,6 +16,8 @@ if True:  # avoid flake/zimports messing
     from alembic.script.write_hooks import console_scripts
     from alembic.util.compat import inspect_formatargspec
     from alembic.util.compat import inspect_getfullargspec
+    from alembic.operations import ops
+    import sqlalchemy as sa
 
 IGNORE_ITEMS = {
     "op": {"context", "create_module_class_proxy"},
@@ -24,6 +27,18 @@ IGNORE_ITEMS = {
         "requires_connection",
     },
 }
+TRIM_MODULE = [
+    "alembic.runtime.migration.",
+    "alembic.operations.ops.",
+    "sqlalchemy.engine.base.",
+    "sqlalchemy.sql.schema.",
+    "sqlalchemy.sql.selectable.",
+    "sqlalchemy.sql.elements.",
+    "sqlalchemy.sql.type_api.",
+    "sqlalchemy.sql.functions.",
+    "sqlalchemy.sql.dml.",
+]
+CONTEXT_MANAGERS = {"op": ["batch_alter_table"]}
 
 
 def generate_pyi_for_proxy(
@@ -32,8 +47,10 @@ def generate_pyi_for_proxy(
     source_path: Path,
     destination_path: Path,
     ignore_output: bool,
-    ignore_items: set,
+    file_key: str,
 ):
+    ignore_items = IGNORE_ITEMS.get(file_key, set())
+    context_managers = CONTEXT_MANAGERS.get(file_key, [])
     if sys.version_info < (3, 9):
         raise RuntimeError("This script must be run with Python 3.9 or higher")
 
@@ -66,14 +83,24 @@ def generate_pyi_for_proxy(
         printer.writeline("### end imports ###")
         buf.write("\n\n")
 
+        module = sys.modules[cls.__module__]
+        env = {
+            **sa.__dict__,
+            **sa.types.__dict__,
+            **ops.__dict__,
+            **module.__dict__,
+        }
+
         for name in dir(cls):
             if name.startswith("_") or name in ignore_items:
                 continue
-            meth = getattr(cls, name)
+            meth = getattr(cls, name, None)
             if callable(meth):
-                _generate_stub_for_meth(cls, name, printer)
+                _generate_stub_for_meth(
+                    cls, name, printer, env, name in context_managers
+                )
             else:
-                _generate_stub_for_attr(cls, name, printer)
+                _generate_stub_for_attr(cls, name, printer, env)
 
         printer.close()
 
@@ -92,18 +119,29 @@ def generate_pyi_for_proxy(
     )
 
 
-def _generate_stub_for_attr(cls, name, printer):
-    type_ = cls.__annotations__.get(name, "Any")
+def _generate_stub_for_attr(cls, name, printer, env):
+    try:
+        annotations = typing.get_type_hints(cls, env)
+    except NameError as e:
+        annotations = cls.__annotations__
+    type_ = annotations.get(name, "Any")
+    if isinstance(type_, str) and type_[0] in "'\"":
+        type_ = type_[1:-1]
     printer.writeline(f"{name}: {type_}")
 
 
-def _generate_stub_for_meth(cls, name, printer):
+def _generate_stub_for_meth(cls, name, printer, env, is_context_manager):
 
     fn = getattr(cls, name)
     while hasattr(fn, "__wrapped__"):
         fn = fn.__wrapped__
 
     spec = inspect_getfullargspec(fn)
+    try:
+        annotations = typing.get_type_hints(fn, env)
+        spec.annotations.update(annotations)
+    except NameError as e:
+        pass
 
     name_args = spec[0]
     assert name_args[0:1] == ["self"] or name_args[0:1] == ["cls"]
@@ -119,7 +157,10 @@ def _generate_stub_for_meth(cls, name, p
             else:
                 retval = annotation.__module__ + "." + annotation.__qualname__
         else:
-            retval = repr(annotation)
+            retval = annotation
+
+        for trim in TRIM_MODULE:
+            retval = retval.replace(trim, "")
 
         retval = re.sub(
             r'ForwardRef\(([\'"].+?[\'"])\)', lambda m: m.group(1), retval
@@ -127,25 +168,24 @@ def _generate_stub_for_meth(cls, name, p
         retval = re.sub("NoneType", "None", retval)
         return retval
 
-    argspec = inspect_formatargspec(*spec, formatannotation=_formatannotation)
-
+    argspec = inspect_formatargspec(
+        *spec,
+        formatannotation=_formatannotation,
+        formatreturns=lambda val: f"-> {_formatannotation(val)}",
+    )
+    contextmanager = "@contextmanager" if is_context_manager else ""
     func_text = textwrap.dedent(
-        """\
-    def %(name)s%(argspec)s:
-        '''%(doc)s'''
+        f"""
+    {contextmanager}
+    def {name}{argspec}:
+        '''{fn.__doc__}'''
     """
-        % {
-            "name": name,
-            "argspec": argspec,
-            "doc": fn.__doc__,
-        }
     )
-
     printer.write_indented_block(func_text)
 
 
 def run_file(
-    source_path: Path, cls_to_generate: type, stdout: bool, ignore_items: set
+    source_path: Path, cls_to_generate: type, stdout: bool, file_key: str
 ):
     progname = Path(sys.argv[0]).as_posix()
     if not stdout:
@@ -155,7 +195,7 @@ def run_file(
             source_path=source_path,
             destination_path=source_path,
             ignore_output=False,
-            ignore_items=ignore_items,
+            file_key=file_key,
         )
     else:
         with NamedTemporaryFile(delete=False, suffix=".pyi") as f:
@@ -167,7 +207,7 @@ def run_file(
                 source_path=source_path,
                 destination_path=f_path,
                 ignore_output=True,
-                ignore_items=ignore_items,
+                file_key=file_key,
             )
             sys.stdout.write(f_path.read_text())
         f_path.unlink()
@@ -176,15 +216,13 @@ def run_file(
 def main(args):
     location = Path(__file__).parent.parent / "alembic"
     if args.file in {"all", "op"}:
-        run_file(
-            location / "op.pyi", Operations, args.stdout, IGNORE_ITEMS["op"]
-        )
+        run_file(location / "op.pyi", Operations, args.stdout, "op")
     if args.file in {"all", "context"}:
         run_file(
             location / "context.pyi",
             EnvironmentContext,
             args.stdout,
-            IGNORE_ITEMS["context"],
+            "context",
         )
 
 
diff -pruN 1.7.6-1/tox.ini 1.8.1-2/tox.ini
--- 1.7.6-1/tox.ini	2022-02-01 15:00:09.000000000 +0000
+++ 1.8.1-2/tox.ini	2022-07-13 14:17:20.000000000 +0000
@@ -24,7 +24,7 @@ deps=pytest>4.6
      mako
      python-dateutil
      zimports
-     black
+     black==22.3.0
 
 
 
@@ -89,7 +89,7 @@ deps=
       pydocstyle<4.0.0
       # used by flake8-rst-docstrings
       pygments
-      black==21.5b1
+      black==22.3.0
 commands =
      flake8 ./alembic/ ./tests/ setup.py docs/build/conf.py {posargs}
      black --check setup.py tests alembic
