about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism')
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py7
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py1163
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py249
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py1238
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py352
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py308
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99bin0 -> 1442 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99bin0 -> 1442 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99bin0 -> 310 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99bin0 -> 1602 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py327
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py48
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py410
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py64
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py212
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py292
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py1608
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py3106
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py200
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py284
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py1075
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py192
23 files changed, 11135 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py
new file mode 100644
index 00000000..58c22688
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py
@@ -0,0 +1,7 @@
+from networkx.algorithms.isomorphism.isomorph import *
+from networkx.algorithms.isomorphism.vf2userfunc import *
+from networkx.algorithms.isomorphism.matchhelpers import *
+from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
+from networkx.algorithms.isomorphism.ismags import *
+from networkx.algorithms.isomorphism.tree_isomorphism import *
+from networkx.algorithms.isomorphism.vf2pp import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py
new file mode 100644
index 00000000..24819faf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py
@@ -0,0 +1,1163 @@
+"""
+ISMAGS Algorithm
+================
+
+Provides a Python implementation of the ISMAGS algorithm. [1]_
+
+It is capable of finding (subgraph) isomorphisms between two graphs, taking the
+symmetry of the subgraph into account. In most cases the VF2 algorithm is
+faster (at least on small graphs) than this implementation, but in some cases
+there is an exponential number of isomorphisms that are symmetrically
+equivalent. In that case, the ISMAGS algorithm will provide only one solution
+per symmetry group.
+
+>>> petersen = nx.petersen_graph()
+>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
+>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
+>>> len(isomorphisms)
+120
+>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
+>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
+>>> answer == isomorphisms
+True
+
+In addition, this implementation also provides an interface to find the
+largest common induced subgraph [2]_ between any two graphs, again taking
+symmetry into account. Given `graph` and `subgraph` the algorithm will remove
+nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
+`graph`. Since only the symmetry of `subgraph` is taken into account it is
+worth thinking about how you provide your graphs:
+
+>>> graph1 = nx.path_graph(4)
+>>> graph2 = nx.star_graph(3)
+>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
+>>> ismags.is_isomorphic()
+False
+>>> largest_common_subgraph = list(ismags.largest_common_subgraph())
+>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
+>>> answer == largest_common_subgraph
+True
+>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
+>>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
+>>> answer = [
+...     {1: 0, 0: 1, 2: 2},
+...     {1: 0, 0: 1, 3: 2},
+...     {2: 0, 0: 1, 1: 2},
+...     {2: 0, 0: 1, 3: 2},
+...     {3: 0, 0: 1, 1: 2},
+...     {3: 0, 0: 1, 2: 2},
+... ]
+>>> answer == largest_common_subgraph
+True
+
+However, when not taking symmetry into account, it doesn't matter:
+
+>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
+>>> answer = [
+...     {1: 0, 0: 1, 2: 2},
+...     {1: 0, 2: 1, 0: 2},
+...     {2: 0, 1: 1, 3: 2},
+...     {2: 0, 3: 1, 1: 2},
+...     {1: 0, 0: 1, 2: 3},
+...     {1: 0, 2: 1, 0: 3},
+...     {2: 0, 1: 1, 3: 3},
+...     {2: 0, 3: 1, 1: 3},
+...     {1: 0, 0: 2, 2: 3},
+...     {1: 0, 2: 2, 0: 3},
+...     {2: 0, 1: 2, 3: 3},
+...     {2: 0, 3: 2, 1: 3},
+... ]
+>>> answer == largest_common_subgraph
+True
+>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
+>>> answer = [
+...     {1: 0, 0: 1, 2: 2},
+...     {1: 0, 0: 1, 3: 2},
+...     {2: 0, 0: 1, 1: 2},
+...     {2: 0, 0: 1, 3: 2},
+...     {3: 0, 0: 1, 1: 2},
+...     {3: 0, 0: 1, 2: 2},
+...     {1: 1, 0: 2, 2: 3},
+...     {1: 1, 0: 2, 3: 3},
+...     {2: 1, 0: 2, 1: 3},
+...     {2: 1, 0: 2, 3: 3},
+...     {3: 1, 0: 2, 1: 3},
+...     {3: 1, 0: 2, 2: 3},
+... ]
+>>> answer == largest_common_subgraph
+True
+
+Notes
+-----
+- The current implementation works for undirected graphs only. The algorithm
+  in general should work for directed graphs as well though.
+- Node keys for both provided graphs need to be fully orderable as well as
+  hashable.
+- Node and edge equality is assumed to be transitive: if A is equal to B, and
+  B is equal to C, then A is equal to C.
+
+References
+----------
+.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+   M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+   Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+   Enumeration", PLoS One 9(5): e97896, 2014.
+   https://doi.org/10.1371/journal.pone.0097896
+.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
+"""
+
+__all__ = ["ISMAGS"]
+
+import itertools
+from collections import Counter, defaultdict
+from functools import reduce, wraps
+
+
+def are_all_equal(iterable):
+    """
+    Returns ``True`` if and only if all elements in `iterable` are equal; and
+    ``False`` otherwise.
+
+    Parameters
+    ----------
+    iterable: collections.abc.Iterable
+        The container whose elements will be checked.
+
+    Returns
+    -------
+    bool
+        ``True`` iff all elements in `iterable` compare equal, ``False``
+        otherwise.
+    """
+    try:
+        shape = iterable.shape
+    except AttributeError:
+        pass
+    else:
+        if len(shape) > 1:
+            message = "The function does not works on multidimensional arrays."
+            raise NotImplementedError(message) from None
+
+    iterator = iter(iterable)
+    first = next(iterator, None)
+    return all(item == first for item in iterator)
+
+
+def make_partitions(items, test):
+    """
+    Partitions items into sets based on the outcome of ``test(item1, item2)``.
+    Pairs of items for which `test` returns `True` end up in the same set.
+
+    Parameters
+    ----------
+    items : collections.abc.Iterable[collections.abc.Hashable]
+        Items to partition
+    test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
+        A function that will be called with 2 arguments, taken from items.
+        Should return `True` if those 2 items need to end up in the same
+        partition, and `False` otherwise.
+
+    Returns
+    -------
+    list[set]
+        A list of sets, with each set containing part of the items in `items`,
+        such that ``all(test(*pair) for pair in  itertools.combinations(set, 2))
+        == True``
+
+    Notes
+    -----
+    The function `test` is assumed to be transitive: if ``test(a, b)`` and
+    ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
+    """
+    partitions = []
+    for item in items:
+        for partition in partitions:
+            p_item = next(iter(partition))
+            if test(item, p_item):
+                partition.add(item)
+                break
+        else:  # No break
+            partitions.append({item})
+    return partitions
+
+
+def partition_to_color(partitions):
+    """
+    Creates a dictionary that maps each item in each partition to the index of
+    the partition to which it belongs.
+
+    Parameters
+    ----------
+    partitions: collections.abc.Sequence[collections.abc.Iterable]
+        As returned by :func:`make_partitions`.
+
+    Returns
+    -------
+    dict
+    """
+    colors = {}
+    for color, keys in enumerate(partitions):
+        for key in keys:
+            colors[key] = color
+    return colors
+
+
+def intersect(collection_of_sets):
+    """
+    Given an collection of sets, returns the intersection of those sets.
+
+    Parameters
+    ----------
+    collection_of_sets: collections.abc.Collection[set]
+        A collection of sets.
+
+    Returns
+    -------
+    set
+        An intersection of all sets in `collection_of_sets`. Will have the same
+        type as the item initially taken from `collection_of_sets`.
+    """
+    collection_of_sets = list(collection_of_sets)
+    first = collection_of_sets.pop()
+    out = reduce(set.intersection, collection_of_sets, set(first))
+    return type(first)(out)
+
+
+class ISMAGS:
+    """
+    Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
+    "Index-based Subgraph Matching Algorithm with General Symmetries". As the
+    name implies, it is symmetry aware and will only generate non-symmetric
+    isomorphisms.
+
+    Notes
+    -----
+    The implementation imposes additional conditions compared to the VF2
+    algorithm on the graphs provided and the comparison functions
+    (:attr:`node_equality` and :attr:`edge_equality`):
+
+     - Node keys in both graphs must be orderable as well as hashable.
+     - Equality must be transitive: if A is equal to B, and B is equal to C,
+       then A must be equal to C.
+
+    Attributes
+    ----------
+    graph: networkx.Graph
+    subgraph: networkx.Graph
+    node_equality: collections.abc.Callable
+        The function called to see if two nodes should be considered equal.
+        It's signature looks like this:
+        ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
+        `node1` is a node in `graph1`, and `node2` a node in `graph2`.
+        Constructed from the argument `node_match`.
+    edge_equality: collections.abc.Callable
+        The function called to see if two edges should be considered equal.
+        It's signature looks like this:
+        ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
+        `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
+        Constructed from the argument `edge_match`.
+
+    References
+    ----------
+    .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+       M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+       Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+       Enumeration", PLoS One 9(5): e97896, 2014.
+       https://doi.org/10.1371/journal.pone.0097896
+    """
+
+    def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
+        """
+        Parameters
+        ----------
+        graph: networkx.Graph
+        subgraph: networkx.Graph
+        node_match: collections.abc.Callable or None
+            Function used to determine whether two nodes are equivalent. Its
+            signature should look like ``f(n1: dict, n2: dict) -> bool``, with
+            `n1` and `n2` node property dicts. See also
+            :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
+            friends.
+            If `None`, all nodes are considered equal.
+        edge_match: collections.abc.Callable or None
+            Function used to determine whether two edges are equivalent. Its
+            signature should look like ``f(e1: dict, e2: dict) -> bool``, with
+            `e1` and `e2` edge property dicts. See also
+            :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
+            friends.
+            If `None`, all edges are considered equal.
+        cache: collections.abc.Mapping
+            A cache used for caching graph symmetries.
+        """
+        # TODO: graph and subgraph setter methods that invalidate the caches.
+        # TODO: allow for precomputed partitions and colors
+        self.graph = graph
+        self.subgraph = subgraph
+        self._symmetry_cache = cache
+        # Naming conventions are taken from the original paper. For your
+        # sanity:
+        #   sg: subgraph
+        #   g: graph
+        #   e: edge(s)
+        #   n: node(s)
+        # So: sgn means "subgraph nodes".
+        self._sgn_partitions_ = None
+        self._sge_partitions_ = None
+
+        self._sgn_colors_ = None
+        self._sge_colors_ = None
+
+        self._gn_partitions_ = None
+        self._ge_partitions_ = None
+
+        self._gn_colors_ = None
+        self._ge_colors_ = None
+
+        self._node_compat_ = None
+        self._edge_compat_ = None
+
+        if node_match is None:
+            self.node_equality = self._node_match_maker(lambda n1, n2: True)
+            self._sgn_partitions_ = [set(self.subgraph.nodes)]
+            self._gn_partitions_ = [set(self.graph.nodes)]
+            self._node_compat_ = {0: 0}
+        else:
+            self.node_equality = self._node_match_maker(node_match)
+        if edge_match is None:
+            self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
+            self._sge_partitions_ = [set(self.subgraph.edges)]
+            self._ge_partitions_ = [set(self.graph.edges)]
+            self._edge_compat_ = {0: 0}
+        else:
+            self.edge_equality = self._edge_match_maker(edge_match)
+
+    @property
+    def _sgn_partitions(self):
+        if self._sgn_partitions_ is None:
+
+            def nodematch(node1, node2):
+                return self.node_equality(self.subgraph, node1, self.subgraph, node2)
+
+            self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
+        return self._sgn_partitions_
+
+    @property
+    def _sge_partitions(self):
+        if self._sge_partitions_ is None:
+
+            def edgematch(edge1, edge2):
+                return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
+
+            self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
+        return self._sge_partitions_
+
+    @property
+    def _gn_partitions(self):
+        if self._gn_partitions_ is None:
+
+            def nodematch(node1, node2):
+                return self.node_equality(self.graph, node1, self.graph, node2)
+
+            self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
+        return self._gn_partitions_
+
+    @property
+    def _ge_partitions(self):
+        if self._ge_partitions_ is None:
+
+            def edgematch(edge1, edge2):
+                return self.edge_equality(self.graph, edge1, self.graph, edge2)
+
+            self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
+        return self._ge_partitions_
+
+    @property
+    def _sgn_colors(self):
+        if self._sgn_colors_ is None:
+            self._sgn_colors_ = partition_to_color(self._sgn_partitions)
+        return self._sgn_colors_
+
+    @property
+    def _sge_colors(self):
+        if self._sge_colors_ is None:
+            self._sge_colors_ = partition_to_color(self._sge_partitions)
+        return self._sge_colors_
+
+    @property
+    def _gn_colors(self):
+        if self._gn_colors_ is None:
+            self._gn_colors_ = partition_to_color(self._gn_partitions)
+        return self._gn_colors_
+
+    @property
+    def _ge_colors(self):
+        if self._ge_colors_ is None:
+            self._ge_colors_ = partition_to_color(self._ge_partitions)
+        return self._ge_colors_
+
+    @property
+    def _node_compatibility(self):
+        if self._node_compat_ is not None:
+            return self._node_compat_
+        self._node_compat_ = {}
+        for sgn_part_color, gn_part_color in itertools.product(
+            range(len(self._sgn_partitions)), range(len(self._gn_partitions))
+        ):
+            sgn = next(iter(self._sgn_partitions[sgn_part_color]))
+            gn = next(iter(self._gn_partitions[gn_part_color]))
+            if self.node_equality(self.subgraph, sgn, self.graph, gn):
+                self._node_compat_[sgn_part_color] = gn_part_color
+        return self._node_compat_
+
+    @property
+    def _edge_compatibility(self):
+        if self._edge_compat_ is not None:
+            return self._edge_compat_
+        self._edge_compat_ = {}
+        for sge_part_color, ge_part_color in itertools.product(
+            range(len(self._sge_partitions)), range(len(self._ge_partitions))
+        ):
+            sge = next(iter(self._sge_partitions[sge_part_color]))
+            ge = next(iter(self._ge_partitions[ge_part_color]))
+            if self.edge_equality(self.subgraph, sge, self.graph, ge):
+                self._edge_compat_[sge_part_color] = ge_part_color
+        return self._edge_compat_
+
+    @staticmethod
+    def _node_match_maker(cmp):
+        @wraps(cmp)
+        def comparer(graph1, node1, graph2, node2):
+            return cmp(graph1.nodes[node1], graph2.nodes[node2])
+
+        return comparer
+
+    @staticmethod
+    def _edge_match_maker(cmp):
+        @wraps(cmp)
+        def comparer(graph1, edge1, graph2, edge2):
+            return cmp(graph1.edges[edge1], graph2.edges[edge2])
+
+        return comparer
+
+    def find_isomorphisms(self, symmetry=True):
+        """Find all subgraph isomorphisms between subgraph and graph
+
+        Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
+
+        Parameters
+        ----------
+        symmetry: bool
+            Whether symmetry should be taken into account. If False, found
+            isomorphisms may be symmetrically equivalent.
+
+        Yields
+        ------
+        dict
+            The found isomorphism mappings of {graph_node: subgraph_node}.
+        """
+        # The networkx VF2 algorithm is slightly funny in when it yields an
+        # empty dict and when not.
+        if not self.subgraph:
+            yield {}
+            return
+        elif not self.graph:
+            return
+        elif len(self.graph) < len(self.subgraph):
+            return
+
+        if symmetry:
+            _, cosets = self.analyze_symmetry(
+                self.subgraph, self._sgn_partitions, self._sge_colors
+            )
+            constraints = self._make_constraints(cosets)
+        else:
+            constraints = []
+
+        candidates = self._find_nodecolor_candidates()
+        la_candidates = self._get_lookahead_candidates()
+        for sgn in self.subgraph:
+            extra_candidates = la_candidates[sgn]
+            if extra_candidates:
+                candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
+
+        if any(candidates.values()):
+            start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
+            candidates[start_sgn] = (intersect(candidates[start_sgn]),)
+            yield from self._map_nodes(start_sgn, candidates, constraints)
+        else:
+            return
+
+    @staticmethod
+    def _find_neighbor_color_count(graph, node, node_color, edge_color):
+        """
+        For `node` in `graph`, count the number of edges of a specific color
+        it has to nodes of a specific color.
+        """
+        counts = Counter()
+        neighbors = graph[node]
+        for neighbor in neighbors:
+            n_color = node_color[neighbor]
+            if (node, neighbor) in edge_color:
+                e_color = edge_color[node, neighbor]
+            else:
+                e_color = edge_color[neighbor, node]
+            counts[e_color, n_color] += 1
+        return counts
+
+    def _get_lookahead_candidates(self):
+        """
+        Returns a mapping of {subgraph node: collection of graph nodes} for
+        which the graph nodes are feasible candidates for the subgraph node, as
+        determined by looking ahead one edge.
+        """
+        g_counts = {}
+        for gn in self.graph:
+            g_counts[gn] = self._find_neighbor_color_count(
+                self.graph, gn, self._gn_colors, self._ge_colors
+            )
+        candidates = defaultdict(set)
+        for sgn in self.subgraph:
+            sg_count = self._find_neighbor_color_count(
+                self.subgraph, sgn, self._sgn_colors, self._sge_colors
+            )
+            new_sg_count = Counter()
+            for (sge_color, sgn_color), count in sg_count.items():
+                try:
+                    ge_color = self._edge_compatibility[sge_color]
+                    gn_color = self._node_compatibility[sgn_color]
+                except KeyError:
+                    pass
+                else:
+                    new_sg_count[ge_color, gn_color] = count
+
+            for gn, g_count in g_counts.items():
+                if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
+                    # Valid candidate
+                    candidates[sgn].add(gn)
+        return candidates
+
+    def largest_common_subgraph(self, symmetry=True):
+        """
+        Find the largest common induced subgraphs between :attr:`subgraph` and
+        :attr:`graph`.
+
+        Parameters
+        ----------
+        symmetry: bool
+            Whether symmetry should be taken into account. If False, found
+            largest common subgraphs may be symmetrically equivalent.
+
+        Yields
+        ------
+        dict
+            The found isomorphism mappings of {graph_node: subgraph_node}.
+        """
+        # The networkx VF2 algorithm is slightly funny in when it yields an
+        # empty dict and when not.
+        if not self.subgraph:
+            yield {}
+            return
+        elif not self.graph:
+            return
+
+        if symmetry:
+            _, cosets = self.analyze_symmetry(
+                self.subgraph, self._sgn_partitions, self._sge_colors
+            )
+            constraints = self._make_constraints(cosets)
+        else:
+            constraints = []
+
+        candidates = self._find_nodecolor_candidates()
+
+        if any(candidates.values()):
+            yield from self._largest_common_subgraph(candidates, constraints)
+        else:
+            return
+
+    def analyze_symmetry(self, graph, node_partitions, edge_colors):
+        """
+        Find a minimal set of permutations and corresponding co-sets that
+        describe the symmetry of `graph`, given the node and edge equalities
+        given by `node_partitions` and `edge_colors`, respectively.
+
+        Parameters
+        ----------
+        graph : networkx.Graph
+            The graph whose symmetry should be analyzed.
+        node_partitions : list of sets
+            A list of sets containing node keys. Node keys in the same set
+            are considered equivalent. Every node key in `graph` should be in
+            exactly one of the sets. If all nodes are equivalent, this should
+            be ``[set(graph.nodes)]``.
+        edge_colors : dict mapping edges to their colors
+            A dict mapping every edge in `graph` to its corresponding color.
+            Edges with the same color are considered equivalent. If all edges
+            are equivalent, this should be ``{e: 0 for e in graph.edges}``.
+
+
+        Returns
+        -------
+        set[frozenset]
+            The found permutations. This is a set of frozensets of pairs of node
+            keys which can be exchanged without changing :attr:`subgraph`.
+        dict[collections.abc.Hashable, set[collections.abc.Hashable]]
+            The found co-sets. The co-sets is a dictionary of
+            ``{node key: set of node keys}``.
+            Every key-value pair describes which ``values`` can be interchanged
+            without changing nodes less than ``key``.
+        """
+        if self._symmetry_cache is not None:
+            key = hash(
+                (
+                    tuple(graph.nodes),
+                    tuple(graph.edges),
+                    tuple(map(tuple, node_partitions)),
+                    tuple(edge_colors.items()),
+                )
+            )
+            if key in self._symmetry_cache:
+                return self._symmetry_cache[key]
+        node_partitions = list(
+            self._refine_node_partitions(graph, node_partitions, edge_colors)
+        )
+        assert len(node_partitions) == 1
+        node_partitions = node_partitions[0]
+        permutations, cosets = self._process_ordered_pair_partitions(
+            graph, node_partitions, node_partitions, edge_colors
+        )
+        if self._symmetry_cache is not None:
+            self._symmetry_cache[key] = permutations, cosets
+        return permutations, cosets
+
+    def is_isomorphic(self, symmetry=False):
+        """
+        Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
+        False otherwise.
+
+        Returns
+        -------
+        bool
+        """
+        return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
+            symmetry
+        )
+
+    def subgraph_is_isomorphic(self, symmetry=False):
+        """
+        Returns True if a subgraph of :attr:`graph` is isomorphic to
+        :attr:`subgraph` and False otherwise.
+
+        Returns
+        -------
+        bool
+        """
+        # symmetry=False, since we only need to know whether there is any
+        # example; figuring out all symmetry elements probably costs more time
+        # than it gains.
+        isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
+        return isom is not None
+
+    def isomorphisms_iter(self, symmetry=True):
+        """
+        Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
+        :attr:`subgraph` have the same number of nodes.
+        """
+        if len(self.graph) == len(self.subgraph):
+            yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
+
+    def subgraph_isomorphisms_iter(self, symmetry=True):
+        """Alternative name for :meth:`find_isomorphisms`."""
+        return self.find_isomorphisms(symmetry)
+
+    def _find_nodecolor_candidates(self):
+        """
+        Per node in subgraph find all nodes in graph that have the same color.
+        """
+        candidates = defaultdict(set)
+        for sgn in self.subgraph.nodes:
+            sgn_color = self._sgn_colors[sgn]
+            if sgn_color in self._node_compatibility:
+                gn_color = self._node_compatibility[sgn_color]
+                candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
+            else:
+                candidates[sgn].add(frozenset())
+        candidates = dict(candidates)
+        for sgn, options in candidates.items():
+            candidates[sgn] = frozenset(options)
+        return candidates
+
+    @staticmethod
+    def _make_constraints(cosets):
+        """
+        Turn cosets into constraints.
+        """
+        constraints = []
+        for node_i, node_ts in cosets.items():
+            for node_t in node_ts:
+                if node_i != node_t:
+                    # Node i must be smaller than node t.
+                    constraints.append((node_i, node_t))
+        return constraints
+
+    @staticmethod
+    def _find_node_edge_color(graph, node_colors, edge_colors):
+        """
+        For every node in graph, come up with a color that combines 1) the
+        color of the node, and 2) the number of edges of a color to each type
+        of node.
+        """
+        counts = defaultdict(lambda: defaultdict(int))
+        for node1, node2 in graph.edges:
+            if (node1, node2) in edge_colors:
+                # FIXME directed graphs
+                ecolor = edge_colors[node1, node2]
+            else:
+                ecolor = edge_colors[node2, node1]
+            # Count per node how many edges it has of what color to nodes of
+            # what color
+            counts[node1][ecolor, node_colors[node2]] += 1
+            counts[node2][ecolor, node_colors[node1]] += 1
+
+        node_edge_colors = {}
+        for node in graph.nodes:
+            node_edge_colors[node] = node_colors[node], set(counts[node].items())
+
+        return node_edge_colors
+
+    @staticmethod
+    def _get_permutations_by_length(items):
+        """
+        Get all permutations of items, but only permute items with the same
+        length.
+
+        >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
+        >>> answer = [
+        ...     (([1], [2]), ([3, 4], [4, 5])),
+        ...     (([1], [2]), ([4, 5], [3, 4])),
+        ...     (([2], [1]), ([3, 4], [4, 5])),
+        ...     (([2], [1]), ([4, 5], [3, 4])),
+        ... ]
+        >>> found == answer
+        True
+        """
+        by_len = defaultdict(list)
+        for item in items:
+            by_len[len(item)].append(item)
+
+        yield from itertools.product(
+            *(itertools.permutations(by_len[l]) for l in sorted(by_len))
+        )
+
+    @classmethod
+    def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
+        """
+        Given a partition of nodes in graph, make the partitions smaller such
+        that all nodes in a partition have 1) the same color, and 2) the same
+        number of edges to specific other partitions.
+        """
+
+        def equal_color(node1, node2):
+            return node_edge_colors[node1] == node_edge_colors[node2]
+
+        node_partitions = list(node_partitions)
+        node_colors = partition_to_color(node_partitions)
+        node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
+        if all(
+            are_all_equal(node_edge_colors[node] for node in partition)
+            for partition in node_partitions
+        ):
+            yield node_partitions
+            return
+
+        new_partitions = []
+        output = [new_partitions]
+        for partition in node_partitions:
+            if not are_all_equal(node_edge_colors[node] for node in partition):
+                refined = make_partitions(partition, equal_color)
+                if (
+                    branch
+                    and len(refined) != 1
+                    and len({len(r) for r in refined}) != len([len(r) for r in refined])
+                ):
+                    # This is where it breaks. There are multiple new cells
+                    # in refined with the same length, and their order
+                    # matters.
+                    # So option 1) Hit it with a big hammer and simply make all
+                    # orderings.
+                    permutations = cls._get_permutations_by_length(refined)
+                    new_output = []
+                    for n_p in output:
+                        for permutation in permutations:
+                            new_output.append(n_p + list(permutation[0]))
+                    output = new_output
+                else:
+                    for n_p in output:
+                        n_p.extend(sorted(refined, key=len))
+            else:
+                for n_p in output:
+                    n_p.append(partition)
+        for n_p in output:
+            yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
+
+    def _edges_of_same_color(self, sgn1, sgn2):
+        """
+        Returns all edges in :attr:`graph` that have the same colour as the
+        edge between sgn1 and sgn2 in :attr:`subgraph`.
+        """
+        if (sgn1, sgn2) in self._sge_colors:
+            # FIXME directed graphs
+            sge_color = self._sge_colors[sgn1, sgn2]
+        else:
+            sge_color = self._sge_colors[sgn2, sgn1]
+        if sge_color in self._edge_compatibility:
+            ge_color = self._edge_compatibility[sge_color]
+            g_edges = self._ge_partitions[ge_color]
+        else:
+            g_edges = []
+        return g_edges
+
+    def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
+        """
+        Find all subgraph isomorphisms honoring constraints.
+        """
+        if mapping is None:
+            mapping = {}
+        else:
+            mapping = mapping.copy()
+        if to_be_mapped is None:
+            to_be_mapped = set(self.subgraph.nodes)
+
+        # Note, we modify candidates here. Doesn't seem to affect results, but
+        # remember this.
+        # candidates = candidates.copy()
+        sgn_candidates = intersect(candidates[sgn])
+        candidates[sgn] = frozenset([sgn_candidates])
+        for gn in sgn_candidates:
+            # We're going to try to map sgn to gn.
+            if gn in mapping.values() or sgn not in to_be_mapped:
+                # gn is already mapped to something
+                continue  # pragma: no cover
+
+            # REDUCTION and COMBINATION
+            mapping[sgn] = gn
+            # BASECASE
+            if to_be_mapped == set(mapping.keys()):
+                yield {v: k for k, v in mapping.items()}
+                continue
+            left_to_map = to_be_mapped - set(mapping.keys())
+
+            new_candidates = candidates.copy()
+            sgn_nbrs = set(self.subgraph[sgn])
+            not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn])
+            for sgn2 in left_to_map:
+                if sgn2 not in sgn_nbrs:
+                    gn2_options = not_gn_nbrs
+                else:
+                    # Get all edges to gn of the right color:
+                    g_edges = self._edges_of_same_color(sgn, sgn2)
+                    # FIXME directed graphs
+                    # And all nodes involved in those which are connected to gn
+                    gn2_options = {n for e in g_edges for n in e if gn in e}
+                # Node color compatibility should be taken care of by the
+                # initial candidate lists made by find_subgraphs
+
+                # Add gn2_options to the right collection. Since new_candidates
+                # is a dict of frozensets of frozensets of node indices it's
+                # a bit clunky. We can't do .add, and + also doesn't work. We
+                # could do |, but I deem union to be clearer.
+                new_candidates[sgn2] = new_candidates[sgn2].union(
+                    [frozenset(gn2_options)]
+                )
+
+                if (sgn, sgn2) in constraints:
+                    gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
+                elif (sgn2, sgn) in constraints:
+                    gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
+                else:
+                    continue  # pragma: no cover
+                new_candidates[sgn2] = new_candidates[sgn2].union(
+                    [frozenset(gn2_options)]
+                )
+
+            # The next node is the one that is unmapped and has fewest
+            # candidates
+            next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len))
+            yield from self._map_nodes(
+                next_sgn,
+                new_candidates,
+                constraints,
+                mapping=mapping,
+                to_be_mapped=to_be_mapped,
+            )
+            # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
+            # when making a new mapping for sgn.
+            # del mapping[sgn]
+
+    def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
+        """
+        Find all largest common subgraphs honoring constraints.
+        """
+        if to_be_mapped is None:
+            to_be_mapped = {frozenset(self.subgraph.nodes)}
+
+        # The LCS problem is basically a repeated subgraph isomorphism problem
+        # with smaller and smaller subgraphs. We store the nodes that are
+        # "part of" the subgraph in to_be_mapped, and we make it a little
+        # smaller every iteration.
+
+        current_size = len(next(iter(to_be_mapped), []))
+
+        found_iso = False
+        if current_size <= len(self.graph):
+            # There's no point in trying to find isomorphisms of
+            # graph >= subgraph if subgraph has more nodes than graph.
+
+            # Try the isomorphism first with the nodes with lowest ID. So sort
+            # them. Those are more likely to be part of the final
+            # correspondence. This makes finding the first answer(s) faster. In
+            # theory.
+            for nodes in sorted(to_be_mapped, key=sorted):
+                # Find the isomorphism between subgraph[to_be_mapped] <= graph
+                next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
+                isomorphs = self._map_nodes(
+                    next_sgn, candidates, constraints, to_be_mapped=nodes
+                )
+
+                # This is effectively `yield from isomorphs`, except that we look
+                # whether an item was yielded.
+                try:
+                    item = next(isomorphs)
+                except StopIteration:
+                    pass
+                else:
+                    yield item
+                    yield from isomorphs
+                    found_iso = True
+
+        # BASECASE
+        if found_iso or current_size == 1:
+            # Shrinking has no point because either 1) we end up with a smaller
+            # common subgraph (and we want the largest), or 2) there'll be no
+            # more subgraph.
+            return
+
+        left_to_be_mapped = set()
+        for nodes in to_be_mapped:
+            for sgn in nodes:
+                # We're going to remove sgn from to_be_mapped, but subject to
+                # symmetry constraints. We know that for every constraint we
+                # have those subgraph nodes are equal. So whenever we would
+                # remove the lower part of a constraint, remove the higher
+                # instead. This is all dealth with by _remove_node. And because
+                # left_to_be_mapped is a set, we don't do double work.
+
+                # And finally, make the subgraph one node smaller.
+                # REDUCTION
+                new_nodes = self._remove_node(sgn, nodes, constraints)
+                left_to_be_mapped.add(new_nodes)
+        # COMBINATION
+        yield from self._largest_common_subgraph(
+            candidates, constraints, to_be_mapped=left_to_be_mapped
+        )
+
+    @staticmethod
+    def _remove_node(node, nodes, constraints):
+        """
+        Returns a new set where node has been removed from nodes, subject to
+        symmetry constraints. We know, that for every constraint we have
+        those subgraph nodes are equal. So whenever we would remove the
+        lower part of a constraint, remove the higher instead.
+        """
+        while True:
+            for low, high in constraints:
+                if low == node and high in nodes:
+                    node = high
+                    break
+            else:  # no break, couldn't find node in constraints
+                break
+        return frozenset(nodes - {node})
+
+    @staticmethod
+    def _find_permutations(top_partitions, bottom_partitions):
+        """
+        Return the pairs of top/bottom partitions where the partitions are
+        different. Ensures that all partitions in both top and bottom
+        partitions have size 1.
+        """
+        # Find permutations
+        permutations = set()
+        for top, bot in zip(top_partitions, bottom_partitions):
+            # top and bot have only one element
+            if len(top) != 1 or len(bot) != 1:
+                raise IndexError(
+                    "Not all nodes are coupled. This is"
+                    f" impossible: {top_partitions}, {bottom_partitions}"
+                )
+            if top != bot:
+                permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
+        return permutations
+
+    @staticmethod
+    def _update_orbits(orbits, permutations):
+        """
+        Update orbits based on permutations. Orbits is modified in place.
+        For every pair of items in permutations their respective orbits are
+        merged.
+        """
+        for permutation in permutations:
+            node, node2 = permutation
+            # Find the orbits that contain node and node2, and replace the
+            # orbit containing node with the union
+            first = second = None
+            for idx, orbit in enumerate(orbits):
+                if first is not None and second is not None:
+                    break
+                if node in orbit:
+                    first = idx
+                if node2 in orbit:
+                    second = idx
+            if first != second:
+                orbits[first].update(orbits[second])
+                del orbits[second]
+
+    def _couple_nodes(
+        self,
+        top_partitions,
+        bottom_partitions,
+        pair_idx,
+        t_node,
+        b_node,
+        graph,
+        edge_colors,
+    ):
+        """
+        Generate new partitions from top and bottom_partitions where t_node is
+        coupled to b_node. pair_idx is the index of the partitions where t_ and
+        b_node can be found.
+        """
+        t_partition = top_partitions[pair_idx]
+        b_partition = bottom_partitions[pair_idx]
+        assert t_node in t_partition and b_node in b_partition
+        # Couple node to node2. This means they get their own partition
+        new_top_partitions = [top.copy() for top in top_partitions]
+        new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
+        new_t_groups = {t_node}, t_partition - {t_node}
+        new_b_groups = {b_node}, b_partition - {b_node}
+        # Replace the old partitions with the coupled ones
+        del new_top_partitions[pair_idx]
+        del new_bottom_partitions[pair_idx]
+        new_top_partitions[pair_idx:pair_idx] = new_t_groups
+        new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
+
+        new_top_partitions = self._refine_node_partitions(
+            graph, new_top_partitions, edge_colors
+        )
+        new_bottom_partitions = self._refine_node_partitions(
+            graph, new_bottom_partitions, edge_colors, branch=True
+        )
+        new_top_partitions = list(new_top_partitions)
+        assert len(new_top_partitions) == 1
+        new_top_partitions = new_top_partitions[0]
+        for bot in new_bottom_partitions:
+            yield list(new_top_partitions), bot
+
+    def _process_ordered_pair_partitions(
+        self,
+        graph,
+        top_partitions,
+        bottom_partitions,
+        edge_colors,
+        orbits=None,
+        cosets=None,
+    ):
+        """
+        Processes ordered pair partitions as per the reference paper. Finds and
+        returns all permutations and cosets that leave the graph unchanged.
+        """
+        if orbits is None:
+            orbits = [{node} for node in graph.nodes]
+        else:
+            # Note that we don't copy orbits when we are given one. This means
+            # we leak information between the recursive branches. This is
+            # intentional!
+            orbits = orbits
+        if cosets is None:
+            cosets = {}
+        else:
+            cosets = cosets.copy()
+
+        assert all(
+            len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
+        )
+
+        # BASECASE
+        if all(len(top) == 1 for top in top_partitions):
+            # All nodes are mapped
+            permutations = self._find_permutations(top_partitions, bottom_partitions)
+            self._update_orbits(orbits, permutations)
+            if permutations:
+                return [permutations], cosets
+            else:
+                return [], cosets
+
+        permutations = []
+        unmapped_nodes = {
+            (node, idx)
+            for idx, t_partition in enumerate(top_partitions)
+            for node in t_partition
+            if len(t_partition) > 1
+        }
+        node, pair_idx = min(unmapped_nodes)
+        b_partition = bottom_partitions[pair_idx]
+
+        for node2 in sorted(b_partition):
+            if len(b_partition) == 1:
+                # Can never result in symmetry
+                continue
+            if node != node2 and any(
+                node in orbit and node2 in orbit for orbit in orbits
+            ):
+                # Orbit prune branch
+                continue
+            # REDUCTION
+            # Couple node to node2
+            partitions = self._couple_nodes(
+                top_partitions,
+                bottom_partitions,
+                pair_idx,
+                node,
+                node2,
+                graph,
+                edge_colors,
+            )
+            for opp in partitions:
+                new_top_partitions, new_bottom_partitions = opp
+
+                new_perms, new_cosets = self._process_ordered_pair_partitions(
+                    graph,
+                    new_top_partitions,
+                    new_bottom_partitions,
+                    edge_colors,
+                    orbits,
+                    cosets,
+                )
+                # COMBINATION
+                permutations += new_perms
+                cosets.update(new_cosets)
+
+        mapped = {
+            k
+            for top, bottom in zip(top_partitions, bottom_partitions)
+            for k in top
+            if len(top) == 1 and top == bottom
+        }
+        ks = {k for k in graph.nodes if k < node}
+        # Have all nodes with ID < node been mapped?
+        find_coset = ks <= mapped and node not in cosets
+        if find_coset:
+            # Find the orbit that contains node
+            for orbit in orbits:
+                if node in orbit:
+                    cosets[node] = orbit.copy()
+        return permutations, cosets
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py
new file mode 100644
index 00000000..fc3a3fc6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py
@@ -0,0 +1,249 @@
+"""
+Graph isomorphism functions.
+"""
+
+import networkx as nx
+from networkx.exception import NetworkXError
+
+__all__ = [
+    "could_be_isomorphic",
+    "fast_could_be_isomorphic",
+    "faster_could_be_isomorphic",
+    "is_isomorphic",
+]
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def could_be_isomorphic(G1, G2):
+    """Returns False if graphs are definitely not isomorphic.
+    True does NOT guarantee isomorphism.
+
+    Parameters
+    ----------
+    G1, G2 : graphs
+       The two graphs G1 and G2 must be the same type.
+
+    Notes
+    -----
+    Checks for matching degree, triangle, and number of cliques sequences.
+    The triangle sequence contains the number of triangles each node is part of.
+    The clique sequence contains for each node the number of maximal cliques
+    involving that node.
+
+    """
+
+    # Check global properties
+    if G1.order() != G2.order():
+        return False
+
+    # Check local properties
+    d1 = G1.degree()
+    t1 = nx.triangles(G1)
+    clqs_1 = list(nx.find_cliques(G1))
+    c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1}  # number of cliques
+    props1 = [[d, t1[v], c1[v]] for v, d in d1]
+    props1.sort()
+
+    d2 = G2.degree()
+    t2 = nx.triangles(G2)
+    clqs_2 = list(nx.find_cliques(G2))
+    c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2}  # number of cliques
+    props2 = [[d, t2[v], c2[v]] for v, d in d2]
+    props2.sort()
+
+    if props1 != props2:
+        return False
+
+    # OK...
+    return True
+
+
+graph_could_be_isomorphic = could_be_isomorphic
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def fast_could_be_isomorphic(G1, G2):
+    """Returns False if graphs are definitely not isomorphic.
+
+    True does NOT guarantee isomorphism.
+
+    Parameters
+    ----------
+    G1, G2 : graphs
+       The two graphs G1 and G2 must be the same type.
+
+    Notes
+    -----
+    Checks for matching degree and triangle sequences. The triangle
+    sequence contains the number of triangles each node is part of.
+    """
+    # Check global properties
+    if G1.order() != G2.order():
+        return False
+
+    # Check local properties
+    d1 = G1.degree()
+    t1 = nx.triangles(G1)
+    props1 = [[d, t1[v]] for v, d in d1]
+    props1.sort()
+
+    d2 = G2.degree()
+    t2 = nx.triangles(G2)
+    props2 = [[d, t2[v]] for v, d in d2]
+    props2.sort()
+
+    if props1 != props2:
+        return False
+
+    # OK...
+    return True
+
+
+fast_graph_could_be_isomorphic = fast_could_be_isomorphic
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def faster_could_be_isomorphic(G1, G2):
+    """Returns False if graphs are definitely not isomorphic.
+
+    True does NOT guarantee isomorphism.
+
+    Parameters
+    ----------
+    G1, G2 : graphs
+       The two graphs G1 and G2 must be the same type.
+
+    Notes
+    -----
+    Checks for matching degree sequences.
+    """
+    # Check global properties
+    if G1.order() != G2.order():
+        return False
+
+    # Check local properties
+    d1 = sorted(d for n, d in G1.degree())
+    d2 = sorted(d for n, d in G2.degree())
+
+    if d1 != d2:
+        return False
+
+    # OK...
+    return True
+
+
+faster_graph_could_be_isomorphic = faster_could_be_isomorphic
+
+
+@nx._dispatchable(
+    graphs={"G1": 0, "G2": 1},
+    preserve_edge_attrs="edge_match",
+    preserve_node_attrs="node_match",
+)
+def is_isomorphic(G1, G2, node_match=None, edge_match=None):
+    """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
+
+    Parameters
+    ----------
+    G1, G2: graphs
+        The two graphs G1 and G2 must be the same type.
+
+    node_match : callable
+        A function that returns True if node n1 in G1 and n2 in G2 should
+        be considered equal during the isomorphism test.
+        If node_match is not specified then node attributes are not considered.
+
+        The function will be called like
+
+           node_match(G1.nodes[n1], G2.nodes[n2]).
+
+        That is, the function will receive the node attribute dictionaries
+        for n1 and n2 as inputs.
+
+    edge_match : callable
+        A function that returns True if the edge attribute dictionary
+        for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+        be considered equal during the isomorphism test.  If edge_match is
+        not specified then edge attributes are not considered.
+
+        The function will be called like
+
+           edge_match(G1[u1][v1], G2[u2][v2]).
+
+        That is, the function will receive the edge attribute dictionaries
+        of the edges under consideration.
+
+    Notes
+    -----
+    Uses the vf2 algorithm [1]_.
+
+    Examples
+    --------
+    >>> import networkx.algorithms.isomorphism as iso
+
+    For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
+
+    >>> G1 = nx.DiGraph()
+    >>> G2 = nx.DiGraph()
+    >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
+    >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
+    >>> em = iso.numerical_edge_match("weight", 1)
+    >>> nx.is_isomorphic(G1, G2)  # no weights considered
+    True
+    >>> nx.is_isomorphic(G1, G2, edge_match=em)  # match weights
+    False
+
+    For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
+
+    >>> G1 = nx.MultiDiGraph()
+    >>> G2 = nx.MultiDiGraph()
+    >>> G1.add_nodes_from([1, 2, 3], fill="red")
+    >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
+    >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
+    >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
+    >>> nm = iso.categorical_node_match("fill", "red")
+    >>> nx.is_isomorphic(G1, G2, node_match=nm)
+    True
+
+    For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
+
+    >>> G1.add_edge(1, 2, weight=7)
+    1
+    >>> G2.add_edge(10, 20)
+    1
+    >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
+    >>> nx.is_isomorphic(G1, G2, edge_match=em)
+    True
+
+    For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
+    with default values 7 and 2.5. Also using 'fill' node attribute with
+    default value 'red'.
+
+    >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
+    >>> nm = iso.categorical_node_match("fill", "red")
+    >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
+    True
+
+    See Also
+    --------
+    numerical_node_match, numerical_edge_match, numerical_multiedge_match
+    categorical_node_match, categorical_edge_match, categorical_multiedge_match
+
+    References
+    ----------
+    .. [1]  L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
+       "An Improved Algorithm for Matching Large Graphs",
+       3rd IAPR-TC15 Workshop  on Graph-based Representations in
+       Pattern Recognition, Cuen, pp. 149-159, 2001.
+       https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
+    """
+    if G1.is_directed() and G2.is_directed():
+        GM = nx.algorithms.isomorphism.DiGraphMatcher
+    elif (not G1.is_directed()) and (not G2.is_directed()):
+        GM = nx.algorithms.isomorphism.GraphMatcher
+    else:
+        raise NetworkXError("Graphs G1 and G2 are not of the same type.")
+
+    gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
+
+    return gm.is_isomorphic()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py
new file mode 100644
index 00000000..cb2f1e8f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py
@@ -0,0 +1,1238 @@
+"""
+*************
+VF2 Algorithm
+*************
+
+An implementation of VF2 algorithm for graph isomorphism testing.
+
+The simplest interface to use this module is to call the
+:func:`is_isomorphic <networkx.algorithms.isomorphism.is_isomorphic>`
+function.
+
+Introduction
+------------
+
+The GraphMatcher and DiGraphMatcher are responsible for matching
+graphs or directed graphs in a predetermined manner.  This
+usually means a check for an isomorphism, though other checks
+are also possible.  For example, a subgraph of one graph
+can be checked for isomorphism to a second graph.
+
+Matching is done via syntactic feasibility. It is also possible
+to check for semantic feasibility. Feasibility, then, is defined
+as the logical AND of the two functions.
+
+To include a semantic check, the (Di)GraphMatcher class should be
+subclassed, and the
+:meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
+function should be redefined.  By default, the semantic feasibility function always
+returns ``True``.  The effect of this is that semantics are not
+considered in the matching of G1 and G2.
+
+Examples
+--------
+
+Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
+
+>>> from networkx.algorithms import isomorphism
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> GM = isomorphism.GraphMatcher(G1, G2)
+>>> GM.is_isomorphic()
+True
+
+GM.mapping stores the isomorphism mapping from G1 to G2.
+
+>>> GM.mapping
+{0: 0, 1: 1, 2: 2, 3: 3}
+
+
+Suppose G1 and G2 are isomorphic directed graphs.
+Verification is as follows:
+
+>>> G1 = nx.path_graph(4, create_using=nx.DiGraph)
+>>> G2 = nx.path_graph(4, create_using=nx.DiGraph)
+>>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
+>>> DiGM.is_isomorphic()
+True
+
+DiGM.mapping stores the isomorphism mapping from G1 to G2.
+
+>>> DiGM.mapping
+{0: 0, 1: 1, 2: 2, 3: 3}
+
+
+
+Subgraph Isomorphism
+--------------------
+Graph theory literature can be ambiguous about the meaning of the
+above statement, and we seek to clarify it now.
+
+In the VF2 literature, a mapping ``M`` is said to be a graph-subgraph
+isomorphism iff ``M`` is an isomorphism between ``G2`` and a subgraph of ``G1``.
+Thus, to say that ``G1`` and ``G2`` are graph-subgraph isomorphic is to say
+that a subgraph of ``G1`` is isomorphic to ``G2``.
+
+Other literature uses the phrase 'subgraph isomorphic' as in '``G1`` does
+not have a subgraph isomorphic to ``G2``'.  Another use is as an in adverb
+for isomorphic.  Thus, to say that ``G1`` and ``G2`` are subgraph isomorphic
+is to say that a subgraph of ``G1`` is isomorphic to ``G2``.
+
+Finally, the term 'subgraph' can have multiple meanings. In this
+context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
+subgraph isomorphisms are not directly supported, but one should be
+able to perform the check by making use of
+:func:`line_graph <networkx.generators.line.line_graph>`. For
+subgraphs which are not induced, the term 'monomorphism' is preferred
+over 'isomorphism'.
+
+Let ``G = (N, E)`` be a graph with a set of nodes ``N`` and set of edges ``E``.
+
+If ``G' = (N', E')`` is a subgraph, then:
+    ``N'`` is a subset of ``N`` and
+    ``E'`` is a subset of ``E``.
+
+If ``G' = (N', E')`` is a node-induced subgraph, then:
+    ``N'`` is a subset of ``N`` and
+    ``E'`` is the subset of edges in ``E`` relating nodes in ``N'``.
+
+If ``G' = (N', E')`` is an edge-induced subgraph, then:
+    ``N'`` is the subset of nodes in ``N`` related by edges in ``E'`` and
+    ``E'`` is a subset of ``E``.
+
+If ``G' = (N', E')`` is a monomorphism, then:
+    ``N'`` is a subset of ``N`` and
+    ``E'`` is a subset of the set of edges in ``E`` relating nodes in ``N'``.
+
+Note that if ``G'`` is a node-induced subgraph of ``G``, then it is always a
+subgraph monomorphism of ``G``, but the opposite is not always true, as a
+monomorphism can have fewer edges.
+
+References
+----------
+[1]   Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
+      "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
+      IEEE Transactions on Pattern Analysis and Machine Intelligence,
+      vol. 26,  no. 10,  pp. 1367-1372,  Oct.,  2004.
+      http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
+
+[2]   L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
+      Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
+      on Graph-based Representations in Pattern Recognition, Cuen,
+      pp. 149-159, 2001.
+      https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
+
+See Also
+--------
+:meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
+:meth:`syntactic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.syntactic_feasibility>`
+
+Notes
+-----
+
+The implementation handles both directed and undirected graphs as well
+as multigraphs.
+
+In general, the subgraph isomorphism problem is NP-complete whereas the
+graph isomorphism problem is most likely not NP-complete (although no
+polynomial-time algorithm is known to exist).
+
+"""
+
+# This work was originally coded by Christopher Ellison
+# as part of the Computational Mechanics Python (CMPy) project.
+# James P. Crutchfield, principal investigator.
+# Complexity Sciences Center and Physics Department, UC Davis.
+
+import sys
+
+__all__ = ["GraphMatcher", "DiGraphMatcher"]
+
+
+class GraphMatcher:
+    """Implementation of VF2 algorithm for matching undirected graphs.
+
+    Suitable for Graph and MultiGraph instances.
+    """
+
+    def __init__(self, G1, G2):
+        """Initialize GraphMatcher.
+
+        Parameters
+        ----------
+        G1,G2: NetworkX Graph or MultiGraph instances.
+           The two graphs to check for isomorphism or monomorphism.
+
+        Examples
+        --------
+        To create a GraphMatcher which checks for syntactic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> G1 = nx.path_graph(4)
+        >>> G2 = nx.path_graph(4)
+        >>> GM = isomorphism.GraphMatcher(G1, G2)
+        """
+        self.G1 = G1
+        self.G2 = G2
+        self.G1_nodes = set(G1.nodes())
+        self.G2_nodes = set(G2.nodes())
+        self.G2_node_order = {n: i for i, n in enumerate(G2)}
+
+        # Set recursion limit.
+        self.old_recursion_limit = sys.getrecursionlimit()
+        expected_max_recursion_level = len(self.G2)
+        if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
+            # Give some breathing room.
+            sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
+
+        # Declare that we will be searching for a graph-graph isomorphism.
+        self.test = "graph"
+
+        # Initialize state
+        self.initialize()
+
+    def reset_recursion_limit(self):
+        """Restores the recursion limit."""
+        # TODO:
+        # Currently, we use recursion and set the recursion level higher.
+        # It would be nice to restore the level, but because the
+        # (Di)GraphMatcher classes make use of cyclic references, garbage
+        # collection will never happen when we define __del__() to
+        # restore the recursion level. The result is a memory leak.
+        # So for now, we do not automatically restore the recursion level,
+        # and instead provide a method to do this manually. Eventually,
+        # we should turn this into a non-recursive implementation.
+        sys.setrecursionlimit(self.old_recursion_limit)
+
+    def candidate_pairs_iter(self):
+        """Iterator over candidate pairs of nodes in G1 and G2."""
+
+        # All computations are done using the current state!
+
+        G1_nodes = self.G1_nodes
+        G2_nodes = self.G2_nodes
+        min_key = self.G2_node_order.__getitem__
+
+        # First we compute the inout-terminal sets.
+        T1_inout = [node for node in self.inout_1 if node not in self.core_1]
+        T2_inout = [node for node in self.inout_2 if node not in self.core_2]
+
+        # If T1_inout and T2_inout are both nonempty.
+        # P(s) = T1_inout x {min T2_inout}
+        if T1_inout and T2_inout:
+            node_2 = min(T2_inout, key=min_key)
+            for node_1 in T1_inout:
+                yield node_1, node_2
+
+        else:
+            # If T1_inout and T2_inout were both empty....
+            # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
+            # if not (T1_inout or T2_inout):  # as suggested by  [2], incorrect
+            if 1:  # as inferred from [1], correct
+                # First we determine the candidate node for G2
+                other_node = min(G2_nodes - set(self.core_2), key=min_key)
+                for node in self.G1:
+                    if node not in self.core_1:
+                        yield node, other_node
+
+        # For all other cases, we don't have any candidate pairs.
+
+    def initialize(self):
+        """Reinitializes the state of the algorithm.
+
+        This method should be redefined if using something other than GMState.
+        If only subclassing GraphMatcher, a redefinition is not necessary.
+
+        """
+
+        # core_1[n] contains the index of the node paired with n, which is m,
+        #           provided n is in the mapping.
+        # core_2[m] contains the index of the node paired with m, which is n,
+        #           provided m is in the mapping.
+        self.core_1 = {}
+        self.core_2 = {}
+
+        # See the paper for definitions of M_x and T_x^{y}
+
+        # inout_1[n]  is non-zero if n is in M_1 or in T_1^{inout}
+        # inout_2[m]  is non-zero if m is in M_2 or in T_2^{inout}
+        #
+        # The value stored is the depth of the SSR tree when the node became
+        # part of the corresponding set.
+        self.inout_1 = {}
+        self.inout_2 = {}
+        # Practically, these sets simply store the nodes in the subgraph.
+
+        self.state = GMState(self)
+
+        # Provide a convenient way to access the isomorphism mapping.
+        self.mapping = self.core_1.copy()
+
+    def is_isomorphic(self):
+        """Returns True if G1 and G2 are isomorphic graphs."""
+
+        # Let's do two very quick checks!
+        # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
+        # For now, I just copy the code.
+
+        # Check global properties
+        if self.G1.order() != self.G2.order():
+            return False
+
+        # Check local properties
+        d1 = sorted(d for n, d in self.G1.degree())
+        d2 = sorted(d for n, d in self.G2.degree())
+        if d1 != d2:
+            return False
+
+        try:
+            x = next(self.isomorphisms_iter())
+            return True
+        except StopIteration:
+            return False
+
+    def isomorphisms_iter(self):
+        """Generator over isomorphisms between G1 and G2."""
+        # Declare that we are looking for a graph-graph isomorphism.
+        self.test = "graph"
+        self.initialize()
+        yield from self.match()
+
+    def match(self):
+        """Extends the isomorphism mapping.
+
+        This function is called recursively to determine if a complete
+        isomorphism can be found between G1 and G2.  It cleans up the class
+        variables after each recursive call. If an isomorphism is found,
+        we yield the mapping.
+
+        """
+        if len(self.core_1) == len(self.G2):
+            # Save the final mapping, otherwise garbage collection deletes it.
+            self.mapping = self.core_1.copy()
+            # The mapping is complete.
+            yield self.mapping
+        else:
+            for G1_node, G2_node in self.candidate_pairs_iter():
+                if self.syntactic_feasibility(G1_node, G2_node):
+                    if self.semantic_feasibility(G1_node, G2_node):
+                        # Recursive call, adding the feasible state.
+                        newstate = self.state.__class__(self, G1_node, G2_node)
+                        yield from self.match()
+
+                        # restore data structures
+                        newstate.restore()
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is semantically feasible.
+
+        The semantic feasibility function should return True if it is
+        acceptable to add the candidate pair (G1_node, G2_node) to the current
+        partial isomorphism mapping.   The logic should focus on semantic
+        information contained in the edge data or a formalized node class.
+
+        By acceptable, we mean that the subsequent mapping can still become a
+        complete isomorphism mapping.  Thus, if adding the candidate pair
+        definitely makes it so that the subsequent mapping cannot become a
+        complete isomorphism mapping, then this function must return False.
+
+        The default semantic feasibility function always returns True. The
+        effect is that semantics are not considered in the matching of G1
+        and G2.
+
+        The semantic checks might differ based on the what type of test is
+        being performed.  A keyword description of the test is stored in
+        self.test.  Here is a quick description of the currently implemented
+        tests::
+
+          test='graph'
+            Indicates that the graph matcher is looking for a graph-graph
+            isomorphism.
+
+          test='subgraph'
+            Indicates that the graph matcher is looking for a subgraph-graph
+            isomorphism such that a subgraph of G1 is isomorphic to G2.
+
+          test='mono'
+            Indicates that the graph matcher is looking for a subgraph-graph
+            monomorphism such that a subgraph of G1 is monomorphic to G2.
+
+        Any subclass which redefines semantic_feasibility() must maintain
+        the above form to keep the match() method functional. Implementations
+        should consider multigraphs.
+        """
+        return True
+
+    def subgraph_is_isomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
+
+        Check whether a subgraph of G is isomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_isomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_isomorphic()
+        True
+        """
+        try:
+            x = next(self.subgraph_isomorphisms_iter())
+            return True
+        except StopIteration:
+            return False
+
+    def subgraph_is_monomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
+
+        Check whether a subgraph of G is monomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_monomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_monomorphic()
+        True
+        """
+        try:
+            x = next(self.subgraph_monomorphisms_iter())
+            return True
+        except StopIteration:
+            return False
+
+    def subgraph_isomorphisms_iter(self):
+        """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
+
+        Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_isomorphisms_iter())
+        []
+
+        Yield isomorphic mappings  between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_isomorphisms_iter())
+        {0: 'A', 1: 'B', 2: 'C'}
+
+        """
+        # Declare that we are looking for graph-subgraph isomorphism.
+        self.test = "subgraph"
+        self.initialize()
+        yield from self.match()
+
+    def subgraph_monomorphisms_iter(self):
+        """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
+
+        Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_monomorphisms_iter())
+        []
+
+        Yield monomorphic mappings  between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_monomorphisms_iter())
+        {0: 'A', 1: 'B', 2: 'C'}
+        """
+        # Declare that we are looking for graph-subgraph monomorphism.
+        self.test = "mono"
+        self.initialize()
+        yield from self.match()
+
+    def syntactic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is syntactically feasible.
+
+        This function returns True if it is adding the candidate pair
+        to the current partial isomorphism/monomorphism mapping is allowable.
+        The addition is allowable if the inclusion of the candidate pair does
+        not make it impossible for an isomorphism/monomorphism to be found.
+        """
+
+        # The VF2 algorithm was designed to work with graphs having, at most,
+        # one edge connecting any two nodes.  This is not the case when
+        # dealing with an MultiGraphs.
+        #
+        # Basically, when we test the look-ahead rules R_neighbor, we will
+        # make sure that the number of edges are checked. We also add
+        # a R_self check to verify that the number of selfloops is acceptable.
+        #
+        # Users might be comparing Graph instances with MultiGraph instances.
+        # So the generic GraphMatcher class must work with MultiGraphs.
+        # Care must be taken since the value in the innermost dictionary is a
+        # singlet for Graph instances.  For MultiGraphs, the value in the
+        # innermost dictionary is a list.
+
+        ###
+        # Test at each step to get a return value as soon as possible.
+        ###
+
+        # Look ahead 0
+
+        # R_self
+
+        # The number of selfloops for G1_node must equal the number of
+        # self-loops for G2_node. Without this check, we would fail on
+        # R_neighbor at the next recursion level. But it is good to prune the
+        # search tree now.
+
+        if self.test == "mono":
+            if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+        else:
+            if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+
+        # R_neighbor
+
+        # For each neighbor n' of n in the partial mapping, the corresponding
+        # node m' is a neighbor of m, and vice versa. Also, the number of
+        # edges must be equal.
+        if self.test != "mono":
+            for neighbor in self.G1[G1_node]:
+                if neighbor in self.core_1:
+                    if self.core_1[neighbor] not in self.G2[G2_node]:
+                        return False
+                    elif self.G1.number_of_edges(
+                        neighbor, G1_node
+                    ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
+                        return False
+
+        for neighbor in self.G2[G2_node]:
+            if neighbor in self.core_2:
+                if self.core_2[neighbor] not in self.G1[G1_node]:
+                    return False
+                elif self.test == "mono":
+                    if self.G1.number_of_edges(
+                        self.core_2[neighbor], G1_node
+                    ) < self.G2.number_of_edges(neighbor, G2_node):
+                        return False
+                else:
+                    if self.G1.number_of_edges(
+                        self.core_2[neighbor], G1_node
+                    ) != self.G2.number_of_edges(neighbor, G2_node):
+                        return False
+
+        if self.test != "mono":
+            # Look ahead 1
+
+            # R_terminout
+            # The number of neighbors of n in T_1^{inout} is equal to the
+            # number of neighbors of m that are in T_2^{inout}, and vice versa.
+            num1 = 0
+            for neighbor in self.G1[G1_node]:
+                if (neighbor in self.inout_1) and (neighbor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for neighbor in self.G2[G2_node]:
+                if (neighbor in self.inout_2) and (neighbor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # Look ahead 2
+
+            # R_new
+
+            # The number of neighbors of n that are neither in the core_1 nor
+            # T_1^{inout} is equal to the number of neighbors of m
+            # that are neither in core_2 nor T_2^{inout}.
+            num1 = 0
+            for neighbor in self.G1[G1_node]:
+                if neighbor not in self.inout_1:
+                    num1 += 1
+            num2 = 0
+            for neighbor in self.G2[G2_node]:
+                if neighbor not in self.inout_2:
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+        # Otherwise, this node pair is syntactically feasible!
+        return True
+
+
+class DiGraphMatcher(GraphMatcher):
+    """Implementation of VF2 algorithm for matching directed graphs.
+
+    Suitable for DiGraph and MultiDiGraph instances.
+    """
+
+    def __init__(self, G1, G2):
+        """Initialize DiGraphMatcher.
+
+        G1 and G2 should be nx.Graph or nx.MultiGraph instances.
+
+        Examples
+        --------
+        To create a GraphMatcher which checks for syntactic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+        >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+        >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
+        """
+        super().__init__(G1, G2)
+
+    def candidate_pairs_iter(self):
+        """Iterator over candidate pairs of nodes in G1 and G2."""
+
+        # All computations are done using the current state!
+
+        G1_nodes = self.G1_nodes
+        G2_nodes = self.G2_nodes
+        min_key = self.G2_node_order.__getitem__
+
+        # First we compute the out-terminal sets.
+        T1_out = [node for node in self.out_1 if node not in self.core_1]
+        T2_out = [node for node in self.out_2 if node not in self.core_2]
+
+        # If T1_out and T2_out are both nonempty.
+        # P(s) = T1_out x {min T2_out}
+        if T1_out and T2_out:
+            node_2 = min(T2_out, key=min_key)
+            for node_1 in T1_out:
+                yield node_1, node_2
+
+        # If T1_out and T2_out were both empty....
+        # We compute the in-terminal sets.
+
+        # elif not (T1_out or T2_out):   # as suggested by [2], incorrect
+        else:  # as suggested by [1], correct
+            T1_in = [node for node in self.in_1 if node not in self.core_1]
+            T2_in = [node for node in self.in_2 if node not in self.core_2]
+
+            # If T1_in and T2_in are both nonempty.
+            # P(s) = T1_out x {min T2_out}
+            if T1_in and T2_in:
+                node_2 = min(T2_in, key=min_key)
+                for node_1 in T1_in:
+                    yield node_1, node_2
+
+            # If all terminal sets are empty...
+            # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
+
+            # elif not (T1_in or T2_in):   # as suggested by  [2], incorrect
+            else:  # as inferred from [1], correct
+                node_2 = min(G2_nodes - set(self.core_2), key=min_key)
+                for node_1 in G1_nodes:
+                    if node_1 not in self.core_1:
+                        yield node_1, node_2
+
+        # For all other cases, we don't have any candidate pairs.
+
+    def initialize(self):
+        """Reinitializes the state of the algorithm.
+
+        This method should be redefined if using something other than DiGMState.
+        If only subclassing GraphMatcher, a redefinition is not necessary.
+        """
+
+        # core_1[n] contains the index of the node paired with n, which is m,
+        #           provided n is in the mapping.
+        # core_2[m] contains the index of the node paired with m, which is n,
+        #           provided m is in the mapping.
+        self.core_1 = {}
+        self.core_2 = {}
+
+        # See the paper for definitions of M_x and T_x^{y}
+
+        # in_1[n]  is non-zero if n is in M_1 or in T_1^{in}
+        # out_1[n] is non-zero if n is in M_1 or in T_1^{out}
+        #
+        # in_2[m]  is non-zero if m is in M_2 or in T_2^{in}
+        # out_2[m] is non-zero if m is in M_2 or in T_2^{out}
+        #
+        # The value stored is the depth of the search tree when the node became
+        # part of the corresponding set.
+        self.in_1 = {}
+        self.in_2 = {}
+        self.out_1 = {}
+        self.out_2 = {}
+
+        self.state = DiGMState(self)
+
+        # Provide a convenient way to access the isomorphism mapping.
+        self.mapping = self.core_1.copy()
+
+    def syntactic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is syntactically feasible.
+
+        This function returns True if it is adding the candidate pair
+        to the current partial isomorphism/monomorphism mapping is allowable.
+        The addition is allowable if the inclusion of the candidate pair does
+        not make it impossible for an isomorphism/monomorphism to be found.
+        """
+
+        # The VF2 algorithm was designed to work with graphs having, at most,
+        # one edge connecting any two nodes.  This is not the case when
+        # dealing with an MultiGraphs.
+        #
+        # Basically, when we test the look-ahead rules R_pred and R_succ, we
+        # will make sure that the number of edges are checked.  We also add
+        # a R_self check to verify that the number of selfloops is acceptable.
+
+        # Users might be comparing DiGraph instances with MultiDiGraph
+        # instances. So the generic DiGraphMatcher class must work with
+        # MultiDiGraphs. Care must be taken since the value in the innermost
+        # dictionary is a singlet for DiGraph instances.  For MultiDiGraphs,
+        # the value in the innermost dictionary is a list.
+
+        ###
+        # Test at each step to get a return value as soon as possible.
+        ###
+
+        # Look ahead 0
+
+        # R_self
+
+        # The number of selfloops for G1_node must equal the number of
+        # self-loops for G2_node. Without this check, we would fail on R_pred
+        # at the next recursion level. This should prune the tree even further.
+        if self.test == "mono":
+            if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+        else:
+            if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+
+        # R_pred
+
+        # For each predecessor n' of n in the partial mapping, the
+        # corresponding node m' is a predecessor of m, and vice versa. Also,
+        # the number of edges must be equal
+        if self.test != "mono":
+            for predecessor in self.G1.pred[G1_node]:
+                if predecessor in self.core_1:
+                    if self.core_1[predecessor] not in self.G2.pred[G2_node]:
+                        return False
+                    elif self.G1.number_of_edges(
+                        predecessor, G1_node
+                    ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
+                        return False
+
+        for predecessor in self.G2.pred[G2_node]:
+            if predecessor in self.core_2:
+                if self.core_2[predecessor] not in self.G1.pred[G1_node]:
+                    return False
+                elif self.test == "mono":
+                    if self.G1.number_of_edges(
+                        self.core_2[predecessor], G1_node
+                    ) < self.G2.number_of_edges(predecessor, G2_node):
+                        return False
+                else:
+                    if self.G1.number_of_edges(
+                        self.core_2[predecessor], G1_node
+                    ) != self.G2.number_of_edges(predecessor, G2_node):
+                        return False
+
+        # R_succ
+
+        # For each successor n' of n in the partial mapping, the corresponding
+        # node m' is a successor of m, and vice versa. Also, the number of
+        # edges must be equal.
+        if self.test != "mono":
+            for successor in self.G1[G1_node]:
+                if successor in self.core_1:
+                    if self.core_1[successor] not in self.G2[G2_node]:
+                        return False
+                    elif self.G1.number_of_edges(
+                        G1_node, successor
+                    ) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
+                        return False
+
+        for successor in self.G2[G2_node]:
+            if successor in self.core_2:
+                if self.core_2[successor] not in self.G1[G1_node]:
+                    return False
+                elif self.test == "mono":
+                    if self.G1.number_of_edges(
+                        G1_node, self.core_2[successor]
+                    ) < self.G2.number_of_edges(G2_node, successor):
+                        return False
+                else:
+                    if self.G1.number_of_edges(
+                        G1_node, self.core_2[successor]
+                    ) != self.G2.number_of_edges(G2_node, successor):
+                        return False
+
+        if self.test != "mono":
+            # Look ahead 1
+
+            # R_termin
+            # The number of predecessors of n that are in T_1^{in} is equal to the
+            # number of predecessors of m that are in T_2^{in}.
+            num1 = 0
+            for predecessor in self.G1.pred[G1_node]:
+                if (predecessor in self.in_1) and (predecessor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for predecessor in self.G2.pred[G2_node]:
+                if (predecessor in self.in_2) and (predecessor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # The number of successors of n that are in T_1^{in} is equal to the
+            # number of successors of m that are in T_2^{in}.
+            num1 = 0
+            for successor in self.G1[G1_node]:
+                if (successor in self.in_1) and (successor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for successor in self.G2[G2_node]:
+                if (successor in self.in_2) and (successor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # R_termout
+
+            # The number of predecessors of n that are in T_1^{out} is equal to the
+            # number of predecessors of m that are in T_2^{out}.
+            num1 = 0
+            for predecessor in self.G1.pred[G1_node]:
+                if (predecessor in self.out_1) and (predecessor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for predecessor in self.G2.pred[G2_node]:
+                if (predecessor in self.out_2) and (predecessor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # The number of successors of n that are in T_1^{out} is equal to the
+            # number of successors of m that are in T_2^{out}.
+            num1 = 0
+            for successor in self.G1[G1_node]:
+                if (successor in self.out_1) and (successor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for successor in self.G2[G2_node]:
+                if (successor in self.out_2) and (successor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # Look ahead 2
+
+            # R_new
+
+            # The number of predecessors of n that are neither in the core_1 nor
+            # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
+            # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
+            num1 = 0
+            for predecessor in self.G1.pred[G1_node]:
+                if (predecessor not in self.in_1) and (predecessor not in self.out_1):
+                    num1 += 1
+            num2 = 0
+            for predecessor in self.G2.pred[G2_node]:
+                if (predecessor not in self.in_2) and (predecessor not in self.out_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # The number of successors of n that are neither in the core_1 nor
+            # T_1^{in} nor T_1^{out} is equal to the number of successors of m
+            # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
+            num1 = 0
+            for successor in self.G1[G1_node]:
+                if (successor not in self.in_1) and (successor not in self.out_1):
+                    num1 += 1
+            num2 = 0
+            for successor in self.G2[G2_node]:
+                if (successor not in self.in_2) and (successor not in self.out_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+        # Otherwise, this node pair is syntactically feasible!
+        return True
+
+    def subgraph_is_isomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.DiGraph([("A", "B"), ("B", "A"), ("B", "C"), ("C", "B")])
+        >>> H = nx.DiGraph(nx.path_graph(5))
+
+        Check whether a subgraph of G is isomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_isomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_isomorphic()
+        True
+        """
+        return super().subgraph_is_isomorphic()
+
+    def subgraph_is_monomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
+        >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
+
+        Check whether a subgraph of G is monomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_monomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_monomorphic()
+        True
+        """
+        return super().subgraph_is_monomorphic()
+
+    def subgraph_isomorphisms_iter(self):
+        """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.DiGraph([("B", "C"), ("C", "B"), ("C", "D"), ("D", "C")])
+        >>> H = nx.DiGraph(nx.path_graph(5))
+
+        Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_isomorphisms_iter())
+        []
+
+        Yield isomorphic mappings between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_isomorphisms_iter())
+        {0: 'B', 1: 'C', 2: 'D'}
+        """
+        return super().subgraph_isomorphisms_iter()
+
+    def subgraph_monomorphisms_iter(self):
+        """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
+        >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
+
+        Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_monomorphisms_iter())
+        []
+
+        Yield monomorphic mappings between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_monomorphisms_iter())
+        {3: 'A', 2: 'B', 1: 'C', 0: 'D'}
+        """
+        return super().subgraph_monomorphisms_iter()
+
+
+class GMState:
+    """Internal representation of state for the GraphMatcher class.
+
+    This class is used internally by the GraphMatcher class.  It is used
+    only to store state specific data. There will be at most G2.order() of
+    these objects in memory at a time, due to the depth-first search
+    strategy employed by the VF2 algorithm.
+    """
+
+    def __init__(self, GM, G1_node=None, G2_node=None):
+        """Initializes GMState object.
+
+        Pass in the GraphMatcher to which this GMState belongs and the
+        new node pair that will be added to the GraphMatcher's current
+        isomorphism mapping.
+        """
+        self.GM = GM
+
+        # Initialize the last stored node pair.
+        self.G1_node = None
+        self.G2_node = None
+        self.depth = len(GM.core_1)
+
+        if G1_node is None or G2_node is None:
+            # Then we reset the class variables
+            GM.core_1 = {}
+            GM.core_2 = {}
+            GM.inout_1 = {}
+            GM.inout_2 = {}
+
+        # Watch out! G1_node == 0 should evaluate to True.
+        if G1_node is not None and G2_node is not None:
+            # Add the node pair to the isomorphism mapping.
+            GM.core_1[G1_node] = G2_node
+            GM.core_2[G2_node] = G1_node
+
+            # Store the node that was added last.
+            self.G1_node = G1_node
+            self.G2_node = G2_node
+
+            # Now we must update the other two vectors.
+            # We will add only if it is not in there already!
+            self.depth = len(GM.core_1)
+
+            # First we add the new nodes...
+            if G1_node not in GM.inout_1:
+                GM.inout_1[G1_node] = self.depth
+            if G2_node not in GM.inout_2:
+                GM.inout_2[G2_node] = self.depth
+
+            # Now we add every other node...
+
+            # Updates for T_1^{inout}
+            new_nodes = set()
+            for node in GM.core_1:
+                new_nodes.update(
+                    [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
+                )
+            for node in new_nodes:
+                if node not in GM.inout_1:
+                    GM.inout_1[node] = self.depth
+
+            # Updates for T_2^{inout}
+            new_nodes = set()
+            for node in GM.core_2:
+                new_nodes.update(
+                    [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
+                )
+            for node in new_nodes:
+                if node not in GM.inout_2:
+                    GM.inout_2[node] = self.depth
+
+    def restore(self):
+        """Deletes the GMState object and restores the class variables."""
+        # First we remove the node that was added from the core vectors.
+        # Watch out! G1_node == 0 should evaluate to True.
+        if self.G1_node is not None and self.G2_node is not None:
+            del self.GM.core_1[self.G1_node]
+            del self.GM.core_2[self.G2_node]
+
+        # Now we revert the other two vectors.
+        # Thus, we delete all entries which have this depth level.
+        for vector in (self.GM.inout_1, self.GM.inout_2):
+            for node in list(vector.keys()):
+                if vector[node] == self.depth:
+                    del vector[node]
+
+
+class DiGMState:
+    """Internal representation of state for the DiGraphMatcher class.
+
+    This class is used internally by the DiGraphMatcher class.  It is used
+    only to store state specific data. There will be at most G2.order() of
+    these objects in memory at a time, due to the depth-first search
+    strategy employed by the VF2 algorithm.
+
+    """
+
+    def __init__(self, GM, G1_node=None, G2_node=None):
+        """Initializes DiGMState object.
+
+        Pass in the DiGraphMatcher to which this DiGMState belongs and the
+        new node pair that will be added to the GraphMatcher's current
+        isomorphism mapping.
+        """
+        self.GM = GM
+
+        # Initialize the last stored node pair.
+        self.G1_node = None
+        self.G2_node = None
+        self.depth = len(GM.core_1)
+
+        if G1_node is None or G2_node is None:
+            # Then we reset the class variables
+            GM.core_1 = {}
+            GM.core_2 = {}
+            GM.in_1 = {}
+            GM.in_2 = {}
+            GM.out_1 = {}
+            GM.out_2 = {}
+
+        # Watch out! G1_node == 0 should evaluate to True.
+        if G1_node is not None and G2_node is not None:
+            # Add the node pair to the isomorphism mapping.
+            GM.core_1[G1_node] = G2_node
+            GM.core_2[G2_node] = G1_node
+
+            # Store the node that was added last.
+            self.G1_node = G1_node
+            self.G2_node = G2_node
+
+            # Now we must update the other four vectors.
+            # We will add only if it is not in there already!
+            self.depth = len(GM.core_1)
+
+            # First we add the new nodes...
+            for vector in (GM.in_1, GM.out_1):
+                if G1_node not in vector:
+                    vector[G1_node] = self.depth
+            for vector in (GM.in_2, GM.out_2):
+                if G2_node not in vector:
+                    vector[G2_node] = self.depth
+
+            # Now we add every other node...
+
+            # Updates for T_1^{in}
+            new_nodes = set()
+            for node in GM.core_1:
+                new_nodes.update(
+                    [
+                        predecessor
+                        for predecessor in GM.G1.predecessors(node)
+                        if predecessor not in GM.core_1
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.in_1:
+                    GM.in_1[node] = self.depth
+
+            # Updates for T_2^{in}
+            new_nodes = set()
+            for node in GM.core_2:
+                new_nodes.update(
+                    [
+                        predecessor
+                        for predecessor in GM.G2.predecessors(node)
+                        if predecessor not in GM.core_2
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.in_2:
+                    GM.in_2[node] = self.depth
+
+            # Updates for T_1^{out}
+            new_nodes = set()
+            for node in GM.core_1:
+                new_nodes.update(
+                    [
+                        successor
+                        for successor in GM.G1.successors(node)
+                        if successor not in GM.core_1
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.out_1:
+                    GM.out_1[node] = self.depth
+
+            # Updates for T_2^{out}
+            new_nodes = set()
+            for node in GM.core_2:
+                new_nodes.update(
+                    [
+                        successor
+                        for successor in GM.G2.successors(node)
+                        if successor not in GM.core_2
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.out_2:
+                    GM.out_2[node] = self.depth
+
+    def restore(self):
+        """Deletes the DiGMState object and restores the class variables."""
+
+        # First we remove the node that was added from the core vectors.
+        # Watch out! G1_node == 0 should evaluate to True.
+        if self.G1_node is not None and self.G2_node is not None:
+            del self.GM.core_1[self.G1_node]
+            del self.GM.core_2[self.G2_node]
+
+        # Now we revert the other four vectors.
+        # Thus, we delete all entries which have this depth level.
+        for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
+            for node in list(vector.keys()):
+                if vector[node] == self.depth:
+                    del vector[node]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py
new file mode 100644
index 00000000..b48820d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py
@@ -0,0 +1,352 @@
+"""Functions which help end users define customize node_match and
+edge_match functions to use during isomorphism checks.
+"""
+
+import math
+import types
+from itertools import permutations
+
+__all__ = [
+    "categorical_node_match",
+    "categorical_edge_match",
+    "categorical_multiedge_match",
+    "numerical_node_match",
+    "numerical_edge_match",
+    "numerical_multiedge_match",
+    "generic_node_match",
+    "generic_edge_match",
+    "generic_multiedge_match",
+]
+
+
+def copyfunc(f, name=None):
+    """Returns a deepcopy of a function."""
+    return types.FunctionType(
+        f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
+    )
+
+
+def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
+    """Returns True if x and y are sufficiently close, elementwise.
+
+    Parameters
+    ----------
+    rtol : float
+        The relative error tolerance.
+    atol : float
+        The absolute error tolerance.
+
+    """
+    # assume finite weights, see numpy.allclose() for reference
+    return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
+
+
+categorical_doc = """
+Returns a comparison function for a categorical node attribute.
+
+The value(s) of the attr(s) must be hashable and comparable via the ==
+operator since they are placed into a set([]) object.  If the sets from
+G1 and G2 are the same, then the constructed function returns True.
+
+Parameters
+----------
+attr : string | list
+    The categorical node attribute to compare, or a list of categorical
+    node attributes to compare.
+default : value | list
+    The default value for the categorical node attribute, or a list of
+    default values for the categorical node attributes.
+
+Returns
+-------
+match : function
+    The customized, categorical `node_match` function.
+
+Examples
+--------
+>>> import networkx.algorithms.isomorphism as iso
+>>> nm = iso.categorical_node_match("size", 1)
+>>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
+
+"""
+
+
+def categorical_node_match(attr, default):
+    if isinstance(attr, str):
+
+        def match(data1, data2):
+            return data1.get(attr, default) == data2.get(attr, default)
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(data1, data2):
+            return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
+
+    return match
+
+
+categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
+
+
+def categorical_multiedge_match(attr, default):
+    if isinstance(attr, str):
+
+        def match(datasets1, datasets2):
+            values1 = {data.get(attr, default) for data in datasets1.values()}
+            values2 = {data.get(attr, default) for data in datasets2.values()}
+            return values1 == values2
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(datasets1, datasets2):
+            values1 = set()
+            for data1 in datasets1.values():
+                x = tuple(data1.get(attr, d) for attr, d in attrs)
+                values1.add(x)
+            values2 = set()
+            for data2 in datasets2.values():
+                x = tuple(data2.get(attr, d) for attr, d in attrs)
+                values2.add(x)
+            return values1 == values2
+
+    return match
+
+
+# Docstrings for categorical functions.
+categorical_node_match.__doc__ = categorical_doc
+categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
+tmpdoc = categorical_doc.replace("node", "edge")
+tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
+categorical_multiedge_match.__doc__ = tmpdoc
+
+
+numerical_doc = """
+Returns a comparison function for a numerical node attribute.
+
+The value(s) of the attr(s) must be numerical and sortable.  If the
+sorted list of values from G1 and G2 are the same within some
+tolerance, then the constructed function returns True.
+
+Parameters
+----------
+attr : string | list
+    The numerical node attribute to compare, or a list of numerical
+    node attributes to compare.
+default : value | list
+    The default value for the numerical node attribute, or a list of
+    default values for the numerical node attributes.
+rtol : float
+    The relative error tolerance.
+atol : float
+    The absolute error tolerance.
+
+Returns
+-------
+match : function
+    The customized, numerical `node_match` function.
+
+Examples
+--------
+>>> import networkx.algorithms.isomorphism as iso
+>>> nm = iso.numerical_node_match("weight", 1.0)
+>>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
+
+"""
+
+
+def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
+    if isinstance(attr, str):
+
+        def match(data1, data2):
+            return math.isclose(
+                data1.get(attr, default),
+                data2.get(attr, default),
+                rel_tol=rtol,
+                abs_tol=atol,
+            )
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(data1, data2):
+            values1 = [data1.get(attr, d) for attr, d in attrs]
+            values2 = [data2.get(attr, d) for attr, d in attrs]
+            return allclose(values1, values2, rtol=rtol, atol=atol)
+
+    return match
+
+
+numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
+
+
+def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
+    if isinstance(attr, str):
+
+        def match(datasets1, datasets2):
+            values1 = sorted(data.get(attr, default) for data in datasets1.values())
+            values2 = sorted(data.get(attr, default) for data in datasets2.values())
+            return allclose(values1, values2, rtol=rtol, atol=atol)
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(datasets1, datasets2):
+            values1 = []
+            for data1 in datasets1.values():
+                x = tuple(data1.get(attr, d) for attr, d in attrs)
+                values1.append(x)
+            values2 = []
+            for data2 in datasets2.values():
+                x = tuple(data2.get(attr, d) for attr, d in attrs)
+                values2.append(x)
+            values1.sort()
+            values2.sort()
+            for xi, yi in zip(values1, values2):
+                if not allclose(xi, yi, rtol=rtol, atol=atol):
+                    return False
+            else:
+                return True
+
+    return match
+
+
+# Docstrings for numerical functions.
+numerical_node_match.__doc__ = numerical_doc
+numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
+tmpdoc = numerical_doc.replace("node", "edge")
+tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
+numerical_multiedge_match.__doc__ = tmpdoc
+
+
+generic_doc = """
+Returns a comparison function for a generic attribute.
+
+The value(s) of the attr(s) are compared using the specified
+operators. If all the attributes are equal, then the constructed
+function returns True.
+
+Parameters
+----------
+attr : string | list
+    The node attribute to compare, or a list of node attributes
+    to compare.
+default : value | list
+    The default value for the node attribute, or a list of
+    default values for the node attributes.
+op : callable | list
+    The operator to use when comparing attribute values, or a list
+    of operators to use when comparing values for each attribute.
+
+Returns
+-------
+match : function
+    The customized, generic `node_match` function.
+
+Examples
+--------
+>>> from operator import eq
+>>> from math import isclose
+>>> from networkx.algorithms.isomorphism import generic_node_match
+>>> nm = generic_node_match("weight", 1.0, isclose)
+>>> nm = generic_node_match("color", "red", eq)
+>>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
+
+"""
+
+
+def generic_node_match(attr, default, op):
+    if isinstance(attr, str):
+
+        def match(data1, data2):
+            return op(data1.get(attr, default), data2.get(attr, default))
+
+    else:
+        attrs = list(zip(attr, default, op))  # Python 3
+
+        def match(data1, data2):
+            for attr, d, operator in attrs:
+                if not operator(data1.get(attr, d), data2.get(attr, d)):
+                    return False
+            else:
+                return True
+
+    return match
+
+
+generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
+
+
+def generic_multiedge_match(attr, default, op):
+    """Returns a comparison function for a generic attribute.
+
+    The value(s) of the attr(s) are compared using the specified
+    operators. If all the attributes are equal, then the constructed
+    function returns True. Potentially, the constructed edge_match
+    function can be slow since it must verify that no isomorphism
+    exists between the multiedges before it returns False.
+
+    Parameters
+    ----------
+    attr : string | list
+        The edge attribute to compare, or a list of node attributes
+        to compare.
+    default : value | list
+        The default value for the edge attribute, or a list of
+        default values for the edgeattributes.
+    op : callable | list
+        The operator to use when comparing attribute values, or a list
+        of operators to use when comparing values for each attribute.
+
+    Returns
+    -------
+    match : function
+        The customized, generic `edge_match` function.
+
+    Examples
+    --------
+    >>> from operator import eq
+    >>> from math import isclose
+    >>> from networkx.algorithms.isomorphism import generic_node_match
+    >>> nm = generic_node_match("weight", 1.0, isclose)
+    >>> nm = generic_node_match("color", "red", eq)
+    >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
+
+    """
+
+    # This is slow, but generic.
+    # We must test every possible isomorphism between the edges.
+    if isinstance(attr, str):
+        attr = [attr]
+        default = [default]
+        op = [op]
+    attrs = list(zip(attr, default))  # Python 3
+
+    def match(datasets1, datasets2):
+        values1 = []
+        for data1 in datasets1.values():
+            x = tuple(data1.get(attr, d) for attr, d in attrs)
+            values1.append(x)
+        values2 = []
+        for data2 in datasets2.values():
+            x = tuple(data2.get(attr, d) for attr, d in attrs)
+            values2.append(x)
+        for vals2 in permutations(values2):
+            for xi, yi in zip(values1, vals2):
+                if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
+                    # This is not an isomorphism, go to next permutation.
+                    break
+            else:
+                # Then we found an isomorphism.
+                return True
+        else:
+            # Then there are no isomorphisms between the multiedges.
+            return False
+
+    return match
+
+
+# Docstrings for numerical functions.
+generic_node_match.__doc__ = generic_doc
+generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py
new file mode 100644
index 00000000..62cacc77
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py
@@ -0,0 +1,308 @@
+"""
+*****************************
+Time-respecting VF2 Algorithm
+*****************************
+
+An extension of the VF2 algorithm for time-respecting graph isomorphism
+testing in temporal graphs.
+
+A temporal graph is one in which edges contain a datetime attribute,
+denoting when interaction occurred between the incident nodes. A
+time-respecting subgraph of a temporal graph is a subgraph such that
+all interactions incident to a node occurred within a time threshold,
+delta, of each other. A directed time-respecting subgraph has the
+added constraint that incoming interactions to a node must precede
+outgoing interactions from the same node - this enforces a sense of
+directed flow.
+
+Introduction
+------------
+
+The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
+extend the GraphMatcher and DiGraphMatcher classes, respectively,
+to include temporal constraints on matches. This is achieved through
+a semantic check, via the semantic_feasibility() function.
+
+As well as including G1 (the graph in which to seek embeddings) and
+G2 (the subgraph structure of interest), the name of the temporal
+attribute on the edges and the time threshold, delta, must be supplied
+as arguments to the matching constructors.
+
+A delta of zero is the strictest temporal constraint on the match -
+only embeddings in which all interactions occur at the same time will
+be returned. A delta of one day will allow embeddings in which
+adjacent interactions occur up to a day apart.
+
+Examples
+--------
+
+Examples will be provided when the datetime type has been incorporated.
+
+
+Temporal Subgraph Isomorphism
+-----------------------------
+
+A brief discussion of the somewhat diverse current literature will be
+included here.
+
+References
+----------
+
+[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
+The 2013 IEEE/ACM International Conference on Advances in Social
+Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
+pages 1451 - 1452. [65]
+
+For a discussion of the literature on temporal networks:
+
+[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
+519(3):97–125, 2012.
+
+Notes
+-----
+
+Handles directed and undirected graphs and graphs with parallel edges.
+
+"""
+
+import networkx as nx
+
+from .isomorphvf2 import DiGraphMatcher, GraphMatcher
+
+__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
+
+
+class TimeRespectingGraphMatcher(GraphMatcher):
+    def __init__(self, G1, G2, temporal_attribute_name, delta):
+        """Initialize TimeRespectingGraphMatcher.
+
+        G1 and G2 should be nx.Graph or nx.MultiGraph instances.
+
+        Examples
+        --------
+        To create a TimeRespectingGraphMatcher which checks for
+        syntactic and semantic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> from datetime import timedelta
+        >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
+
+        >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
+
+        >>> GM = isomorphism.TimeRespectingGraphMatcher(
+        ...     G1, G2, "date", timedelta(days=1)
+        ... )
+        """
+        self.temporal_attribute_name = temporal_attribute_name
+        self.delta = delta
+        super().__init__(G1, G2)
+
+    def one_hop(self, Gx, Gx_node, neighbors):
+        """
+        Edges one hop out from a node in the mapping should be
+        time-respecting with respect to each other.
+        """
+        dates = []
+        for n in neighbors:
+            if isinstance(Gx, nx.Graph):  # Graph G[u][v] returns the data dictionary.
+                dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
+            else:  # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+                for edge in Gx[Gx_node][
+                    n
+                ].values():  # Iterates all edges between node pair.
+                    dates.append(edge[self.temporal_attribute_name])
+        if any(x is None for x in dates):
+            raise ValueError("Datetime not supplied for at least one edge.")
+        return not dates or max(dates) - min(dates) <= self.delta
+
+    def two_hop(self, Gx, core_x, Gx_node, neighbors):
+        """
+        Paths of length 2 from Gx_node should be time-respecting.
+        """
+        return all(
+            self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
+            for v in neighbors
+        )
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is semantically
+        feasible.
+
+        Any subclass which redefines semantic_feasibility() must
+        maintain the self.tests if needed, to keep the match() method
+        functional. Implementations should consider multigraphs.
+        """
+        neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
+        if not self.one_hop(self.G1, G1_node, neighbors):  # Fail fast on first node.
+            return False
+        if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
+            return False
+        # Otherwise, this node is semantically feasible!
+        return True
+
+
+class TimeRespectingDiGraphMatcher(DiGraphMatcher):
+    def __init__(self, G1, G2, temporal_attribute_name, delta):
+        """Initialize TimeRespectingDiGraphMatcher.
+
+        G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
+
+        Examples
+        --------
+        To create a TimeRespectingDiGraphMatcher which checks for
+        syntactic and semantic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> from datetime import timedelta
+        >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+
+        >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+
+        >>> GM = isomorphism.TimeRespectingDiGraphMatcher(
+        ...     G1, G2, "date", timedelta(days=1)
+        ... )
+        """
+        self.temporal_attribute_name = temporal_attribute_name
+        self.delta = delta
+        super().__init__(G1, G2)
+
+    def get_pred_dates(self, Gx, Gx_node, core_x, pred):
+        """
+        Get the dates of edges from predecessors.
+        """
+        pred_dates = []
+        if isinstance(Gx, nx.DiGraph):  # Graph G[u][v] returns the data dictionary.
+            for n in pred:
+                pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
+        else:  # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+            for n in pred:
+                for edge in Gx[n][
+                    Gx_node
+                ].values():  # Iterates all edge data between node pair.
+                    pred_dates.append(edge[self.temporal_attribute_name])
+        return pred_dates
+
+    def get_succ_dates(self, Gx, Gx_node, core_x, succ):
+        """
+        Get the dates of edges to successors.
+        """
+        succ_dates = []
+        if isinstance(Gx, nx.DiGraph):  # Graph G[u][v] returns the data dictionary.
+            for n in succ:
+                succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
+        else:  # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+            for n in succ:
+                for edge in Gx[Gx_node][
+                    n
+                ].values():  # Iterates all edge data between node pair.
+                    succ_dates.append(edge[self.temporal_attribute_name])
+        return succ_dates
+
+    def one_hop(self, Gx, Gx_node, core_x, pred, succ):
+        """
+        The ego node.
+        """
+        pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
+        succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
+        return self.test_one(pred_dates, succ_dates) and self.test_two(
+            pred_dates, succ_dates
+        )
+
+    def two_hop_pred(self, Gx, Gx_node, core_x, pred):
+        """
+        The predecessors of the ego node.
+        """
+        return all(
+            self.one_hop(
+                Gx,
+                p,
+                core_x,
+                self.preds(Gx, core_x, p),
+                self.succs(Gx, core_x, p, Gx_node),
+            )
+            for p in pred
+        )
+
+    def two_hop_succ(self, Gx, Gx_node, core_x, succ):
+        """
+        The successors of the ego node.
+        """
+        return all(
+            self.one_hop(
+                Gx,
+                s,
+                core_x,
+                self.preds(Gx, core_x, s, Gx_node),
+                self.succs(Gx, core_x, s),
+            )
+            for s in succ
+        )
+
+    def preds(self, Gx, core_x, v, Gx_node=None):
+        pred = [n for n in Gx.predecessors(v) if n in core_x]
+        if Gx_node:
+            pred.append(Gx_node)
+        return pred
+
+    def succs(self, Gx, core_x, v, Gx_node=None):
+        succ = [n for n in Gx.successors(v) if n in core_x]
+        if Gx_node:
+            succ.append(Gx_node)
+        return succ
+
+    def test_one(self, pred_dates, succ_dates):
+        """
+        Edges one hop out from Gx_node in the mapping should be
+        time-respecting with respect to each other, regardless of
+        direction.
+        """
+        time_respecting = True
+        dates = pred_dates + succ_dates
+
+        if any(x is None for x in dates):
+            raise ValueError("Date or datetime not supplied for at least one edge.")
+
+        dates.sort()  # Small to large.
+        if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
+            time_respecting = False
+        return time_respecting
+
+    def test_two(self, pred_dates, succ_dates):
+        """
+        Edges from a dual Gx_node in the mapping should be ordered in
+        a time-respecting manner.
+        """
+        time_respecting = True
+        pred_dates.sort()
+        succ_dates.sort()
+        # First out before last in; negative of the necessary condition for time-respect.
+        if (
+            0 < len(succ_dates)
+            and 0 < len(pred_dates)
+            and succ_dates[0] < pred_dates[-1]
+        ):
+            time_respecting = False
+        return time_respecting
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is semantically
+        feasible.
+
+        Any subclass which redefines semantic_feasibility() must
+        maintain the self.tests if needed, to keep the match() method
+        functional. Implementations should consider multigraphs.
+        """
+        pred, succ = (
+            [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
+            [n for n in self.G1.successors(G1_node) if n in self.core_1],
+        )
+        if not self.one_hop(
+            self.G1, G1_node, self.core_1, pred, succ
+        ):  # Fail fast on first node.
+            return False
+        if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
+            return False
+        if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
+            return False
+        # Otherwise, this node is semantically feasible!
+        return True
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99
new file mode 100644
index 00000000..dac54f00
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99
new file mode 100644
index 00000000..6c6af680
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99
new file mode 100644
index 00000000..60c3a3ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99
new file mode 100644
index 00000000..02368720
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
new file mode 100644
index 00000000..bc4070ac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
@@ -0,0 +1,327 @@
+"""
+Tests for ISMAGS isomorphism algorithm.
+"""
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def _matches_to_sets(matches):
+    """
+    Helper function to facilitate comparing collections of dictionaries in
+    which order does not matter.
+    """
+    return {frozenset(m.items()) for m in matches}
+
+
+class TestSelfIsomorphism:
+    data = [
+        (
+            [
+                (0, {"name": "a"}),
+                (1, {"name": "a"}),
+                (2, {"name": "b"}),
+                (3, {"name": "b"}),
+                (4, {"name": "a"}),
+                (5, {"name": "a"}),
+            ],
+            [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)],
+        ),
+        (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]),
+        (
+            [],
+            [
+                (0, 1),
+                (1, 2),
+                (2, 3),
+                (3, 4),
+                (4, 5),
+                (5, 0),
+                (0, 6),
+                (6, 7),
+                (2, 8),
+                (8, 9),
+                (4, 10),
+                (10, 11),
+            ],
+        ),
+        ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]),
+    ]
+
+    def test_self_isomorphism(self):
+        """
+        For some small, symmetric graphs, make sure that 1) they are isomorphic
+        to themselves, and 2) that only the identity mapping is found.
+        """
+        for node_data, edge_data in self.data:
+            graph = nx.Graph()
+            graph.add_nodes_from(node_data)
+            graph.add_edges_from(edge_data)
+
+            ismags = iso.ISMAGS(
+                graph, graph, node_match=iso.categorical_node_match("name", None)
+            )
+            assert ismags.is_isomorphic()
+            assert ismags.subgraph_is_isomorphic()
+            assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+                {n: n for n in graph.nodes}
+            ]
+
+    def test_edgecase_self_isomorphism(self):
+        """
+        This edgecase is one of the cases in which it is hard to find all
+        symmetry elements.
+        """
+        graph = nx.Graph()
+        nx.add_path(graph, range(5))
+        graph.add_edges_from([(2, 5), (5, 6)])
+
+        ismags = iso.ISMAGS(graph, graph)
+        ismags_answer = list(ismags.find_isomorphisms(True))
+        assert ismags_answer == [{n: n for n in graph.nodes}]
+
+        graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5})
+        ismags = iso.ISMAGS(graph, graph)
+        ismags_answer = list(ismags.find_isomorphisms(True))
+        assert ismags_answer == [{n: n for n in graph.nodes}]
+
+    def test_directed_self_isomorphism(self):
+        """
+        For some small, directed, symmetric graphs, make sure that 1) they are
+        isomorphic to themselves, and 2) that only the identity mapping is
+        found.
+        """
+        for node_data, edge_data in self.data:
+            graph = nx.Graph()
+            graph.add_nodes_from(node_data)
+            graph.add_edges_from(edge_data)
+
+            ismags = iso.ISMAGS(
+                graph, graph, node_match=iso.categorical_node_match("name", None)
+            )
+            assert ismags.is_isomorphic()
+            assert ismags.subgraph_is_isomorphic()
+            assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+                {n: n for n in graph.nodes}
+            ]
+
+
+class TestSubgraphIsomorphism:
+    def test_isomorphism(self):
+        g1 = nx.Graph()
+        nx.add_cycle(g1, range(4))
+
+        g2 = nx.Graph()
+        nx.add_cycle(g2, range(4))
+        g2.add_edges_from(list(zip(g2, range(4, 8))))
+        ismags = iso.ISMAGS(g2, g1)
+        assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+            {n: n for n in g1.nodes}
+        ]
+
+    def test_isomorphism2(self):
+        g1 = nx.Graph()
+        nx.add_path(g1, range(3))
+
+        g2 = g1.copy()
+        g2.add_edge(1, 3)
+
+        ismags = iso.ISMAGS(g2, g1)
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+        expected_symmetric = [
+            {0: 0, 1: 1, 2: 2},
+            {0: 0, 1: 1, 3: 2},
+            {2: 0, 1: 1, 3: 2},
+        ]
+        assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+        expected_asymmetric = [
+            {0: 2, 1: 1, 2: 0},
+            {0: 2, 1: 1, 3: 0},
+            {2: 2, 1: 1, 3: 0},
+        ]
+        assert _matches_to_sets(matches) == _matches_to_sets(
+            expected_symmetric + expected_asymmetric
+        )
+
+    def test_labeled_nodes(self):
+        g1 = nx.Graph()
+        nx.add_cycle(g1, range(3))
+        g1.nodes[1]["attr"] = True
+
+        g2 = g1.copy()
+        g2.add_edge(1, 3)
+        ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y)
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+        expected_symmetric = [{0: 0, 1: 1, 2: 2}]
+        assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+        expected_asymmetric = [{0: 2, 1: 1, 2: 0}]
+        assert _matches_to_sets(matches) == _matches_to_sets(
+            expected_symmetric + expected_asymmetric
+        )
+
+    def test_labeled_edges(self):
+        g1 = nx.Graph()
+        nx.add_cycle(g1, range(3))
+        g1.edges[1, 2]["attr"] = True
+
+        g2 = g1.copy()
+        g2.add_edge(1, 3)
+        ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y)
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+        expected_symmetric = [{0: 0, 1: 1, 2: 2}]
+        assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+        expected_asymmetric = [{1: 2, 0: 0, 2: 1}]
+        assert _matches_to_sets(matches) == _matches_to_sets(
+            expected_symmetric + expected_asymmetric
+        )
+
+
+class TestWikipediaExample:
+    # Nodes 'a', 'b', 'c' and 'd' form a column.
+    # Nodes 'g', 'h', 'i' and 'j' form a column.
+    g1edges = [
+        ["a", "g"],
+        ["a", "h"],
+        ["a", "i"],
+        ["b", "g"],
+        ["b", "h"],
+        ["b", "j"],
+        ["c", "g"],
+        ["c", "i"],
+        ["c", "j"],
+        ["d", "h"],
+        ["d", "i"],
+        ["d", "j"],
+    ]
+
+    # Nodes 1,2,3,4 form the clockwise corners of a large square.
+    # Nodes 5,6,7,8 form the clockwise corners of a small square
+    g2edges = [
+        [1, 2],
+        [2, 3],
+        [3, 4],
+        [4, 1],
+        [5, 6],
+        [6, 7],
+        [7, 8],
+        [8, 5],
+        [1, 5],
+        [2, 6],
+        [3, 7],
+        [4, 8],
+    ]
+
+    def test_graph(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from(self.g2edges)
+        gm = iso.ISMAGS(g1, g2)
+        assert gm.is_isomorphic()
+
+
+class TestLargestCommonSubgraph:
+    def test_mcis(self):
+        # Example graphs from DOI: 10.1002/spe.588
+        graph1 = nx.Graph()
+        graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)])
+        graph1.nodes[1]["color"] = 0
+
+        graph2 = nx.Graph()
+        graph2.add_edges_from(
+            [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)]
+        )
+        graph2.nodes[1]["color"] = 1
+        graph2.nodes[6]["color"] = 2
+        graph2.nodes[7]["color"] = 2
+
+        ismags = iso.ISMAGS(
+            graph1, graph2, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags.subgraph_isomorphisms_iter(True)) == []
+        assert list(ismags.subgraph_isomorphisms_iter(False)) == []
+        found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
+        expected = _matches_to_sets(
+            [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}]
+        )
+        assert expected == found_mcis
+
+        ismags = iso.ISMAGS(
+            graph2, graph1, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags.subgraph_isomorphisms_iter(True)) == []
+        assert list(ismags.subgraph_isomorphisms_iter(False)) == []
+        found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
+        # Same answer, but reversed.
+        expected = _matches_to_sets(
+            [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}]
+        )
+        assert expected == found_mcis
+
+    def test_symmetry_mcis(self):
+        graph1 = nx.Graph()
+        nx.add_path(graph1, range(4))
+
+        graph2 = nx.Graph()
+        nx.add_path(graph2, range(3))
+        graph2.add_edge(1, 3)
+
+        # Only the symmetry of graph2 is taken into account here.
+        ismags1 = iso.ISMAGS(
+            graph1, graph2, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags1.subgraph_isomorphisms_iter(True)) == []
+        found_mcis = _matches_to_sets(ismags1.largest_common_subgraph())
+        expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}])
+        assert expected == found_mcis
+
+        # Only the symmetry of graph1 is taken into account here.
+        ismags2 = iso.ISMAGS(
+            graph2, graph1, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags2.subgraph_isomorphisms_iter(True)) == []
+        found_mcis = _matches_to_sets(ismags2.largest_common_subgraph())
+        expected = _matches_to_sets(
+            [
+                {3: 2, 0: 0, 1: 1},
+                {2: 0, 0: 2, 1: 1},
+                {3: 0, 0: 2, 1: 1},
+                {3: 0, 1: 1, 2: 2},
+                {0: 0, 1: 1, 2: 2},
+                {2: 0, 3: 2, 1: 1},
+            ]
+        )
+
+        assert expected == found_mcis
+
+        found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False))
+        found_mcis2 = ismags2.largest_common_subgraph(False)
+        found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2]
+        found_mcis2 = _matches_to_sets(found_mcis2)
+
+        expected = _matches_to_sets(
+            [
+                {3: 2, 1: 3, 2: 1},
+                {2: 0, 0: 2, 1: 1},
+                {1: 2, 3: 3, 2: 1},
+                {3: 0, 1: 3, 2: 1},
+                {0: 2, 2: 3, 1: 1},
+                {3: 0, 1: 2, 2: 1},
+                {2: 0, 0: 3, 1: 1},
+                {0: 0, 2: 3, 1: 1},
+                {1: 0, 3: 3, 2: 1},
+                {1: 0, 3: 2, 2: 1},
+                {0: 3, 1: 1, 2: 2},
+                {0: 0, 1: 1, 2: 2},
+            ]
+        )
+        assert expected == found_mcis1
+        assert expected == found_mcis2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py
new file mode 100644
index 00000000..548af808
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py
@@ -0,0 +1,48 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+class TestIsomorph:
+    @classmethod
+    def setup_class(cls):
+        cls.G1 = nx.Graph()
+        cls.G2 = nx.Graph()
+        cls.G3 = nx.Graph()
+        cls.G4 = nx.Graph()
+        cls.G5 = nx.Graph()
+        cls.G6 = nx.Graph()
+        cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]])
+        cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]])
+        cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]])
+        cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]])
+        cls.G5.add_edges_from([[1, 2], [1, 3]])
+        cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]])
+
+    def test_could_be_isomorphic(self):
+        assert iso.could_be_isomorphic(self.G1, self.G2)
+        assert iso.could_be_isomorphic(self.G1, self.G3)
+        assert not iso.could_be_isomorphic(self.G1, self.G4)
+        assert iso.could_be_isomorphic(self.G3, self.G2)
+        assert not iso.could_be_isomorphic(self.G1, self.G6)
+
+    def test_fast_could_be_isomorphic(self):
+        assert iso.fast_could_be_isomorphic(self.G3, self.G2)
+        assert not iso.fast_could_be_isomorphic(self.G3, self.G5)
+        assert not iso.fast_could_be_isomorphic(self.G1, self.G6)
+
+    def test_faster_could_be_isomorphic(self):
+        assert iso.faster_could_be_isomorphic(self.G3, self.G2)
+        assert not iso.faster_could_be_isomorphic(self.G3, self.G5)
+        assert not iso.faster_could_be_isomorphic(self.G1, self.G6)
+
+    def test_is_isomorphic(self):
+        assert iso.is_isomorphic(self.G1, self.G2)
+        assert not iso.is_isomorphic(self.G1, self.G4)
+        assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed())
+        assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed())
+        with pytest.raises(
+            nx.NetworkXError, match="Graphs G1 and G2 are not of the same type."
+        ):
+            iso.is_isomorphic(self.G1.to_directed(), self.G1)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
new file mode 100644
index 00000000..413dfaf3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
@@ -0,0 +1,410 @@
+"""
+Tests for VF2 isomorphism algorithm.
+"""
+
+import importlib.resources
+import os
+import random
+import struct
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+class TestWikipediaExample:
+    # Source: https://en.wikipedia.org/wiki/Graph_isomorphism
+
+    # Nodes 'a', 'b', 'c' and 'd' form a column.
+    # Nodes 'g', 'h', 'i' and 'j' form a column.
+    g1edges = [
+        ["a", "g"],
+        ["a", "h"],
+        ["a", "i"],
+        ["b", "g"],
+        ["b", "h"],
+        ["b", "j"],
+        ["c", "g"],
+        ["c", "i"],
+        ["c", "j"],
+        ["d", "h"],
+        ["d", "i"],
+        ["d", "j"],
+    ]
+
+    # Nodes 1,2,3,4 form the clockwise corners of a large square.
+    # Nodes 5,6,7,8 form the clockwise corners of a small square
+    g2edges = [
+        [1, 2],
+        [2, 3],
+        [3, 4],
+        [4, 1],
+        [5, 6],
+        [6, 7],
+        [7, 8],
+        [8, 5],
+        [1, 5],
+        [2, 6],
+        [3, 7],
+        [4, 8],
+    ]
+
+    def test_graph(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from(self.g2edges)
+        gm = iso.GraphMatcher(g1, g2)
+        assert gm.is_isomorphic()
+        # Just testing some cases
+        assert gm.subgraph_is_monomorphic()
+
+        mapping = sorted(gm.mapping.items())
+
+    # this mapping is only one of the possibilities
+    # so this test needs to be reconsidered
+    #        isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
+    #                  ('g', 2), ('h', 5), ('i', 4), ('j', 7)]
+    #        assert_equal(mapping, isomap)
+
+    def test_subgraph(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from(self.g2edges)
+        g3 = g2.subgraph([1, 2, 3, 4])
+        gm = iso.GraphMatcher(g1, g3)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_subgraph_mono(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from([[1, 2], [2, 3], [3, 4]])
+        gm = iso.GraphMatcher(g1, g2)
+        assert gm.subgraph_is_monomorphic()
+
+
+class TestVF2GraphDB:
+    # https://web.archive.org/web/20090303210205/http://amalfi.dis.unina.it/graph/db/
+
+    @staticmethod
+    def create_graph(filename):
+        """Creates a Graph instance from the filename."""
+
+        # The file is assumed to be in the format from the VF2 graph database.
+        # Each file is composed of 16-bit numbers (unsigned short int).
+        # So we will want to read 2 bytes at a time.
+
+        # We can read the number as follows:
+        #   number = struct.unpack('<H', file.read(2))
+        # This says, expect the data in little-endian encoding
+        # as an unsigned short int and unpack 2 bytes from the file.
+
+        fh = open(filename, mode="rb")
+
+        # Grab the number of nodes.
+        # Node numeration is 0-based, so the first node has index 0.
+        nodes = struct.unpack("<H", fh.read(2))[0]
+
+        graph = nx.Graph()
+        for from_node in range(nodes):
+            # Get the number of edges.
+            edges = struct.unpack("<H", fh.read(2))[0]
+            for edge in range(edges):
+                # Get the terminal node.
+                to_node = struct.unpack("<H", fh.read(2))[0]
+                graph.add_edge(from_node, to_node)
+
+        fh.close()
+        return graph
+
+    def test_graph(self):
+        head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
+        g1 = self.create_graph(head / "iso_r01_s80.A99")
+        g2 = self.create_graph(head / "iso_r01_s80.B99")
+        gm = iso.GraphMatcher(g1, g2)
+        assert gm.is_isomorphic()
+
+    def test_subgraph(self):
+        # A is the subgraph
+        # B is the full graph
+        head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
+        subgraph = self.create_graph(head / "si2_b06_m200.A99")
+        graph = self.create_graph(head / "si2_b06_m200.B99")
+        gm = iso.GraphMatcher(graph, subgraph)
+        assert gm.subgraph_is_isomorphic()
+        # Just testing some cases
+        assert gm.subgraph_is_monomorphic()
+
+    # There isn't a similar test implemented for subgraph monomorphism,
+    # feel free to create one.
+
+
+class TestAtlas:
+    @classmethod
+    def setup_class(cls):
+        global atlas
+        from networkx.generators import atlas
+
+        cls.GAG = atlas.graph_atlas_g()
+
+    def test_graph_atlas(self):
+        # Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
+        Atlas = self.GAG[0:100]
+        alphabet = list(range(26))
+        for graph in Atlas:
+            nlist = list(graph)
+            labels = alphabet[: len(nlist)]
+            for s in range(10):
+                random.shuffle(labels)
+                d = dict(zip(nlist, labels))
+                relabel = nx.relabel_nodes(graph, d)
+                gm = iso.GraphMatcher(graph, relabel)
+                assert gm.is_isomorphic()
+
+
+def test_multiedge():
+    # Simple test for multigraphs
+    # Need something much more rigorous
+    edges = [
+        (0, 1),
+        (1, 2),
+        (2, 3),
+        (3, 4),
+        (4, 5),
+        (5, 6),
+        (6, 7),
+        (7, 8),
+        (8, 9),
+        (9, 10),
+        (10, 11),
+        (10, 11),
+        (11, 12),
+        (11, 12),
+        (12, 13),
+        (12, 13),
+        (13, 14),
+        (13, 14),
+        (14, 15),
+        (14, 15),
+        (15, 16),
+        (15, 16),
+        (16, 17),
+        (16, 17),
+        (17, 18),
+        (17, 18),
+        (18, 19),
+        (18, 19),
+        (19, 0),
+        (19, 0),
+    ]
+    nodes = list(range(20))
+
+    for g1 in [nx.MultiGraph(), nx.MultiDiGraph()]:
+        g1.add_edges_from(edges)
+        for _ in range(10):
+            new_nodes = list(nodes)
+            random.shuffle(new_nodes)
+            d = dict(zip(nodes, new_nodes))
+            g2 = nx.relabel_nodes(g1, d)
+            if not g1.is_directed():
+                gm = iso.GraphMatcher(g1, g2)
+            else:
+                gm = iso.DiGraphMatcher(g1, g2)
+            assert gm.is_isomorphic()
+            # Testing if monomorphism works in multigraphs
+            assert gm.subgraph_is_monomorphic()
+
+
+def test_selfloop():
+    # Simple test for graphs with selfloops
+    edges = [
+        (0, 1),
+        (0, 2),
+        (1, 2),
+        (1, 3),
+        (2, 2),
+        (2, 4),
+        (3, 1),
+        (3, 2),
+        (4, 2),
+        (4, 5),
+        (5, 4),
+    ]
+    nodes = list(range(6))
+
+    for g1 in [nx.Graph(), nx.DiGraph()]:
+        g1.add_edges_from(edges)
+        for _ in range(100):
+            new_nodes = list(nodes)
+            random.shuffle(new_nodes)
+            d = dict(zip(nodes, new_nodes))
+            g2 = nx.relabel_nodes(g1, d)
+            if not g1.is_directed():
+                gm = iso.GraphMatcher(g1, g2)
+            else:
+                gm = iso.DiGraphMatcher(g1, g2)
+            assert gm.is_isomorphic()
+
+
+def test_selfloop_mono():
+    # Simple test for graphs with selfloops
+    edges0 = [
+        (0, 1),
+        (0, 2),
+        (1, 2),
+        (1, 3),
+        (2, 4),
+        (3, 1),
+        (3, 2),
+        (4, 2),
+        (4, 5),
+        (5, 4),
+    ]
+    edges = edges0 + [(2, 2)]
+    nodes = list(range(6))
+
+    for g1 in [nx.Graph(), nx.DiGraph()]:
+        g1.add_edges_from(edges)
+        for _ in range(100):
+            new_nodes = list(nodes)
+            random.shuffle(new_nodes)
+            d = dict(zip(nodes, new_nodes))
+            g2 = nx.relabel_nodes(g1, d)
+            g2.remove_edges_from(nx.selfloop_edges(g2))
+            if not g1.is_directed():
+                gm = iso.GraphMatcher(g2, g1)
+            else:
+                gm = iso.DiGraphMatcher(g2, g1)
+            assert not gm.subgraph_is_monomorphic()
+
+
+def test_isomorphism_iter1():
+    # As described in:
+    # http://groups.google.com/group/networkx-discuss/browse_thread/thread/2ff65c67f5e3b99f/d674544ebea359bb?fwc=1
+    g1 = nx.DiGraph()
+    g2 = nx.DiGraph()
+    g3 = nx.DiGraph()
+    g1.add_edge("A", "B")
+    g1.add_edge("B", "C")
+    g2.add_edge("Y", "Z")
+    g3.add_edge("Z", "Y")
+    gm12 = iso.DiGraphMatcher(g1, g2)
+    gm13 = iso.DiGraphMatcher(g1, g3)
+    x = list(gm12.subgraph_isomorphisms_iter())
+    y = list(gm13.subgraph_isomorphisms_iter())
+    assert {"A": "Y", "B": "Z"} in x
+    assert {"B": "Y", "C": "Z"} in x
+    assert {"A": "Z", "B": "Y"} in y
+    assert {"B": "Z", "C": "Y"} in y
+    assert len(x) == len(y)
+    assert len(x) == 2
+
+
+def test_monomorphism_iter1():
+    g1 = nx.DiGraph()
+    g2 = nx.DiGraph()
+    g1.add_edge("A", "B")
+    g1.add_edge("B", "C")
+    g1.add_edge("C", "A")
+    g2.add_edge("X", "Y")
+    g2.add_edge("Y", "Z")
+    gm12 = iso.DiGraphMatcher(g1, g2)
+    x = list(gm12.subgraph_monomorphisms_iter())
+    assert {"A": "X", "B": "Y", "C": "Z"} in x
+    assert {"A": "Y", "B": "Z", "C": "X"} in x
+    assert {"A": "Z", "B": "X", "C": "Y"} in x
+    assert len(x) == 3
+    gm21 = iso.DiGraphMatcher(g2, g1)
+    # Check if StopIteration exception returns False
+    assert not gm21.subgraph_is_monomorphic()
+
+
+def test_isomorphism_iter2():
+    # Path
+    for L in range(2, 10):
+        g1 = nx.path_graph(L)
+        gm = iso.GraphMatcher(g1, g1)
+        s = len(list(gm.isomorphisms_iter()))
+        assert s == 2
+    # Cycle
+    for L in range(3, 10):
+        g1 = nx.cycle_graph(L)
+        gm = iso.GraphMatcher(g1, g1)
+        s = len(list(gm.isomorphisms_iter()))
+        assert s == 2 * L
+
+
+def test_multiple():
+    # Verify that we can use the graph matcher multiple times
+    edges = [("A", "B"), ("B", "A"), ("B", "C")]
+    for g1, g2 in [(nx.Graph(), nx.Graph()), (nx.DiGraph(), nx.DiGraph())]:
+        g1.add_edges_from(edges)
+        g2.add_edges_from(edges)
+        g3 = nx.subgraph(g2, ["A", "B"])
+        if not g1.is_directed():
+            gmA = iso.GraphMatcher(g1, g2)
+            gmB = iso.GraphMatcher(g1, g3)
+        else:
+            gmA = iso.DiGraphMatcher(g1, g2)
+            gmB = iso.DiGraphMatcher(g1, g3)
+        assert gmA.is_isomorphic()
+        g2.remove_node("C")
+        if not g1.is_directed():
+            gmA = iso.GraphMatcher(g1, g2)
+        else:
+            gmA = iso.DiGraphMatcher(g1, g2)
+        assert gmA.subgraph_is_isomorphic()
+        assert gmB.subgraph_is_isomorphic()
+        assert gmA.subgraph_is_monomorphic()
+        assert gmB.subgraph_is_monomorphic()
+
+
+#        for m in [gmB.mapping, gmB.mapping]:
+#            assert_true(m['A'] == 'A')
+#            assert_true(m['B'] == 'B')
+#            assert_true('C' not in m)
+
+
+def test_noncomparable_nodes():
+    node1 = object()
+    node2 = object()
+    node3 = object()
+
+    # Graph
+    G = nx.path_graph([node1, node2, node3])
+    gm = iso.GraphMatcher(G, G)
+    assert gm.is_isomorphic()
+    # Just testing some cases
+    assert gm.subgraph_is_monomorphic()
+
+    # DiGraph
+    G = nx.path_graph([node1, node2, node3], create_using=nx.DiGraph)
+    H = nx.path_graph([node3, node2, node1], create_using=nx.DiGraph)
+    dgm = iso.DiGraphMatcher(G, H)
+    assert dgm.is_isomorphic()
+    # Just testing some cases
+    assert gm.subgraph_is_monomorphic()
+
+
+def test_monomorphism_edge_match():
+    G = nx.DiGraph()
+    G.add_node(1)
+    G.add_node(2)
+    G.add_edge(1, 2, label="A")
+    G.add_edge(2, 1, label="B")
+    G.add_edge(2, 2, label="C")
+
+    SG = nx.DiGraph()
+    SG.add_node(5)
+    SG.add_node(6)
+    SG.add_edge(5, 6, label="A")
+
+    gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match("label", None))
+    assert gm.subgraph_is_monomorphic()
+
+
+def test_isomorphvf2pp_multidigraphs():
+    g = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 2: [3]})
+    h = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 3: [2]})
+    assert not (nx.vf2pp_is_isomorphic(g, h))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py
new file mode 100644
index 00000000..4d70347f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py
@@ -0,0 +1,64 @@
+from operator import eq
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def test_categorical_node_match():
+    nm = iso.categorical_node_match(["x", "y", "z"], [None] * 3)
+    assert nm({"x": 1, "y": 2, "z": 3}, {"x": 1, "y": 2, "z": 3})
+    assert not nm({"x": 1, "y": 2, "z": 2}, {"x": 1, "y": 2, "z": 1})
+
+
+class TestGenericMultiEdgeMatch:
+    def setup_method(self):
+        self.G1 = nx.MultiDiGraph()
+        self.G2 = nx.MultiDiGraph()
+        self.G3 = nx.MultiDiGraph()
+        self.G4 = nx.MultiDiGraph()
+        attr_dict1 = {"id": "edge1", "minFlow": 0, "maxFlow": 10}
+        attr_dict2 = {"id": "edge2", "minFlow": -3, "maxFlow": 7}
+        attr_dict3 = {"id": "edge3", "minFlow": 13, "maxFlow": 117}
+        attr_dict4 = {"id": "edge4", "minFlow": 13, "maxFlow": 117}
+        attr_dict5 = {"id": "edge5", "minFlow": 8, "maxFlow": 12}
+        attr_dict6 = {"id": "edge6", "minFlow": 8, "maxFlow": 12}
+        for attr_dict in [
+            attr_dict1,
+            attr_dict2,
+            attr_dict3,
+            attr_dict4,
+            attr_dict5,
+            attr_dict6,
+        ]:
+            self.G1.add_edge(1, 2, **attr_dict)
+        for attr_dict in [
+            attr_dict5,
+            attr_dict3,
+            attr_dict6,
+            attr_dict1,
+            attr_dict4,
+            attr_dict2,
+        ]:
+            self.G2.add_edge(2, 3, **attr_dict)
+        for attr_dict in [attr_dict3, attr_dict5]:
+            self.G3.add_edge(3, 4, **attr_dict)
+        for attr_dict in [attr_dict6, attr_dict4]:
+            self.G4.add_edge(4, 5, **attr_dict)
+
+    def test_generic_multiedge_match(self):
+        full_match = iso.generic_multiedge_match(
+            ["id", "flowMin", "flowMax"], [None] * 3, [eq] * 3
+        )
+        flow_match = iso.generic_multiedge_match(
+            ["flowMin", "flowMax"], [None] * 2, [eq] * 2
+        )
+        min_flow_match = iso.generic_multiedge_match("flowMin", None, eq)
+        id_match = iso.generic_multiedge_match("id", None, eq)
+        assert flow_match(self.G1[1][2], self.G2[2][3])
+        assert min_flow_match(self.G1[1][2], self.G2[2][3])
+        assert id_match(self.G1[1][2], self.G2[2][3])
+        assert full_match(self.G1[1][2], self.G2[2][3])
+        assert flow_match(self.G3[3][4], self.G4[4][5])
+        assert min_flow_match(self.G3[3][4], self.G4[4][5])
+        assert not id_match(self.G3[3][4], self.G4[4][5])
+        assert not full_match(self.G3[3][4], self.G4[4][5])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
new file mode 100644
index 00000000..1fe70a42
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
@@ -0,0 +1,212 @@
+"""
+Tests for the temporal aspect of the Temporal VF2 isomorphism algorithm.
+"""
+
+from datetime import date, datetime, timedelta
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def provide_g1_edgelist():
+    return [(0, 1), (0, 2), (1, 2), (2, 4), (1, 3), (3, 4), (4, 5)]
+
+
+def put_same_time(G, att_name):
+    for e in G.edges(data=True):
+        e[2][att_name] = date(2015, 1, 1)
+    return G
+
+
+def put_same_datetime(G, att_name):
+    for e in G.edges(data=True):
+        e[2][att_name] = datetime(2015, 1, 1)
+    return G
+
+
+def put_sequence_time(G, att_name):
+    current_date = date(2015, 1, 1)
+    for e in G.edges(data=True):
+        current_date += timedelta(days=1)
+        e[2][att_name] = current_date
+    return G
+
+
+def put_time_config_0(G, att_name):
+    G[0][1][att_name] = date(2015, 1, 2)
+    G[0][2][att_name] = date(2015, 1, 2)
+    G[1][2][att_name] = date(2015, 1, 3)
+    G[1][3][att_name] = date(2015, 1, 1)
+    G[2][4][att_name] = date(2015, 1, 1)
+    G[3][4][att_name] = date(2015, 1, 3)
+    G[4][5][att_name] = date(2015, 1, 3)
+    return G
+
+
+def put_time_config_1(G, att_name):
+    G[0][1][att_name] = date(2015, 1, 2)
+    G[0][2][att_name] = date(2015, 1, 1)
+    G[1][2][att_name] = date(2015, 1, 3)
+    G[1][3][att_name] = date(2015, 1, 1)
+    G[2][4][att_name] = date(2015, 1, 2)
+    G[3][4][att_name] = date(2015, 1, 4)
+    G[4][5][att_name] = date(2015, 1, 3)
+    return G
+
+
+def put_time_config_2(G, att_name):
+    G[0][1][att_name] = date(2015, 1, 1)
+    G[0][2][att_name] = date(2015, 1, 1)
+    G[1][2][att_name] = date(2015, 1, 3)
+    G[1][3][att_name] = date(2015, 1, 2)
+    G[2][4][att_name] = date(2015, 1, 2)
+    G[3][4][att_name] = date(2015, 1, 3)
+    G[4][5][att_name] = date(2015, 1, 2)
+    return G
+
+
+class TestTimeRespectingGraphMatcher:
+    """
+    A test class for the undirected temporal graph matcher.
+    """
+
+    def provide_g1_topology(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(provide_g1_edgelist())
+        return G1
+
+    def provide_g2_path_3edges(self):
+        G2 = nx.Graph()
+        G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
+        return G2
+
+    def test_timdelta_zero_timeRespecting_returnsTrue(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_timdelta_zero_datetime_timeRespecting_returnsTrue(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_same_datetime(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_attNameStrange_timdelta_zero_timeRespecting_returnsTrue(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "strange_name"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_notTimeRespecting_returnsFalse(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_sequence_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert not gm.subgraph_is_isomorphic()
+
+    def test_timdelta_one_config0_returns_no_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_0(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 0
+
+    def test_timdelta_one_config1_returns_four_embedding(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_1(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 4
+
+    def test_timdelta_one_config2_returns_ten_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_2(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        L = list(gm.subgraph_isomorphisms_iter())
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 10
+
+
+class TestDiTimeRespectingGraphMatcher:
+    """
+    A test class for the directed time-respecting graph matcher.
+    """
+
+    def provide_g1_topology(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(provide_g1_edgelist())
+        return G1
+
+    def provide_g2_path_3edges(self):
+        G2 = nx.DiGraph()
+        G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
+        return G2
+
+    def test_timdelta_zero_same_dates_returns_true(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_attNameStrange_timdelta_zero_same_dates_returns_true(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "strange"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_timdelta_one_config0_returns_no_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_0(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 0
+
+    def test_timdelta_one_config1_returns_one_embedding(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_1(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 1
+
+    def test_timdelta_one_config2_returns_two_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_2(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
new file mode 100644
index 00000000..fa1ab9bb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
@@ -0,0 +1,292 @@
+import random
+import time
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.isomorphism.tree_isomorphism import (
+    rooted_tree_isomorphism,
+    tree_isomorphism,
+)
+from networkx.classes.function import is_directed
+
+
+@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
+def test_tree_isomorphism_raises_on_directed_and_multigraphs(graph_constructor):
+    t1 = graph_constructor([(0, 1)])
+    t2 = graph_constructor([(1, 2)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.isomorphism.tree_isomorphism(t1, t2)
+
+
+# have this work for graph
+# given two trees (either the directed or undirected)
+# transform t2 according to the isomorphism
+# and confirm it is identical to t1
+# randomize the order of the edges when constructing
+def check_isomorphism(t1, t2, isomorphism):
+    # get the name of t1, given the name in t2
+    mapping = {v2: v1 for (v1, v2) in isomorphism}
+
+    # these should be the same
+    d1 = is_directed(t1)
+    d2 = is_directed(t2)
+    assert d1 == d2
+
+    edges_1 = []
+    for u, v in t1.edges():
+        if d1:
+            edges_1.append((u, v))
+        else:
+            # if not directed, then need to
+            # put the edge in a consistent direction
+            if u < v:
+                edges_1.append((u, v))
+            else:
+                edges_1.append((v, u))
+
+    edges_2 = []
+    for u, v in t2.edges():
+        # translate to names for t1
+        u = mapping[u]
+        v = mapping[v]
+        if d2:
+            edges_2.append((u, v))
+        else:
+            if u < v:
+                edges_2.append((u, v))
+            else:
+                edges_2.append((v, u))
+
+    return sorted(edges_1) == sorted(edges_2)
+
+
+def test_hardcoded():
+    print("hardcoded test")
+
+    # define a test problem
+    edges_1 = [
+        ("a", "b"),
+        ("a", "c"),
+        ("a", "d"),
+        ("b", "e"),
+        ("b", "f"),
+        ("e", "j"),
+        ("e", "k"),
+        ("c", "g"),
+        ("c", "h"),
+        ("g", "m"),
+        ("d", "i"),
+        ("f", "l"),
+    ]
+
+    edges_2 = [
+        ("v", "y"),
+        ("v", "z"),
+        ("u", "x"),
+        ("q", "u"),
+        ("q", "v"),
+        ("p", "t"),
+        ("n", "p"),
+        ("n", "q"),
+        ("n", "o"),
+        ("o", "r"),
+        ("o", "s"),
+        ("s", "w"),
+    ]
+
+    # there are two possible correct isomorphisms
+    # it currently returns isomorphism1
+    # but the second is also correct
+    isomorphism1 = [
+        ("a", "n"),
+        ("b", "q"),
+        ("c", "o"),
+        ("d", "p"),
+        ("e", "v"),
+        ("f", "u"),
+        ("g", "s"),
+        ("h", "r"),
+        ("i", "t"),
+        ("j", "y"),
+        ("k", "z"),
+        ("l", "x"),
+        ("m", "w"),
+    ]
+
+    # could swap y and z
+    isomorphism2 = [
+        ("a", "n"),
+        ("b", "q"),
+        ("c", "o"),
+        ("d", "p"),
+        ("e", "v"),
+        ("f", "u"),
+        ("g", "s"),
+        ("h", "r"),
+        ("i", "t"),
+        ("j", "z"),
+        ("k", "y"),
+        ("l", "x"),
+        ("m", "w"),
+    ]
+
+    t1 = nx.Graph()
+    t1.add_edges_from(edges_1)
+    root1 = "a"
+
+    t2 = nx.Graph()
+    t2.add_edges_from(edges_2)
+    root2 = "n"
+
+    isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
+
+    # is correct by hand
+    assert isomorphism in (isomorphism1, isomorphism2)
+
+    # check algorithmically
+    assert check_isomorphism(t1, t2, isomorphism)
+
+    # try again as digraph
+    t1 = nx.DiGraph()
+    t1.add_edges_from(edges_1)
+    root1 = "a"
+
+    t2 = nx.DiGraph()
+    t2.add_edges_from(edges_2)
+    root2 = "n"
+
+    isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
+
+    # is correct by hand
+    assert isomorphism in (isomorphism1, isomorphism2)
+
+    # check algorithmically
+    assert check_isomorphism(t1, t2, isomorphism)
+
+
+# randomly swap a tuple (a,b)
+def random_swap(t):
+    (a, b) = t
+    if random.randint(0, 1) == 1:
+        return (a, b)
+    else:
+        return (b, a)
+
+
+# given a tree t1, create a new tree t2
+# that is isomorphic to t1, with a known isomorphism
+# and test that our algorithm found the right one
+def positive_single_tree(t1):
+    assert nx.is_tree(t1)
+
+    nodes1 = list(t1.nodes())
+    # get a random permutation of this
+    nodes2 = nodes1.copy()
+    random.shuffle(nodes2)
+
+    # this is one isomorphism, however they may be multiple
+    # so we don't necessarily get this one back
+    someisomorphism = list(zip(nodes1, nodes2))
+
+    # map from old to new
+    map1to2 = dict(someisomorphism)
+
+    # get the edges with the transformed names
+    edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
+    # randomly permute, to ensure we're not relying on edge order somehow
+    random.shuffle(edges2)
+
+    # so t2 is isomorphic to t1
+    t2 = nx.Graph()
+    t2.add_edges_from(edges2)
+
+    # lets call our code to see if t1 and t2 are isomorphic
+    isomorphism = tree_isomorphism(t1, t2)
+
+    # make sure we got a correct solution
+    # although not necessarily someisomorphism
+    assert len(isomorphism) > 0
+    assert check_isomorphism(t1, t2, isomorphism)
+
+
+# run positive_single_tree over all the
+# non-isomorphic trees for k from 4 to maxk
+# k = 4 is the first level that has more than 1 non-isomorphic tree
+# k = 13 takes about 2.86 seconds to run on my laptop
+# larger values run slow down significantly
+# as the number of trees grows rapidly
+def test_positive(maxk=14):
+    print("positive test")
+
+    for k in range(2, maxk + 1):
+        start_time = time.time()
+        trial = 0
+        for t in nx.nonisomorphic_trees(k):
+            positive_single_tree(t)
+            trial += 1
+        print(k, trial, time.time() - start_time)
+
+
+# test the trivial case of a single node in each tree
+# note that nonisomorphic_trees doesn't work for k = 1
+def test_trivial():
+    print("trivial test")
+
+    # back to an undirected graph
+    t1 = nx.Graph()
+    t1.add_node("a")
+    root1 = "a"
+
+    t2 = nx.Graph()
+    t2.add_node("n")
+    root2 = "n"
+
+    isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2)
+
+    assert isomorphism == [("a", "n")]
+
+    assert check_isomorphism(t1, t2, isomorphism)
+
+
+# test another trivial case where the two graphs have
+# different numbers of nodes
+def test_trivial_2():
+    print("trivial test 2")
+
+    edges_1 = [("a", "b"), ("a", "c")]
+
+    edges_2 = [("v", "y")]
+
+    t1 = nx.Graph()
+    t1.add_edges_from(edges_1)
+
+    t2 = nx.Graph()
+    t2.add_edges_from(edges_2)
+
+    isomorphism = tree_isomorphism(t1, t2)
+
+    # they cannot be isomorphic,
+    # since they have different numbers of nodes
+    assert isomorphism == []
+
+
+# the function nonisomorphic_trees generates all the non-isomorphic
+# trees of a given size.  Take each pair of these and verify that
+# they are not isomorphic
+# k = 4 is the first level that has more than 1 non-isomorphic tree
+# k = 11 takes about 4.76 seconds to run on my laptop
+# larger values run slow down significantly
+# as the number of trees grows rapidly
+def test_negative(maxk=11):
+    print("negative test")
+
+    for k in range(4, maxk + 1):
+        test_trees = list(nx.nonisomorphic_trees(k))
+        start_time = time.time()
+        trial = 0
+        for i in range(len(test_trees) - 1):
+            for j in range(i + 1, len(test_trees)):
+                trial += 1
+                assert tree_isomorphism(test_trees[i], test_trees[j]) == []
+        print(k, trial, time.time() - start_time)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py
new file mode 100644
index 00000000..5f3fb901
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py
@@ -0,0 +1,1608 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
+
+labels_same = ["blue"]
+
+labels_many = [
+    "white",
+    "red",
+    "blue",
+    "green",
+    "orange",
+    "black",
+    "purple",
+    "yellow",
+    "brown",
+    "cyan",
+    "solarized",
+    "pink",
+    "none",
+]
+
+
+class TestPreCheck:
+    def test_first_graph_empty(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph([(0, 1), (1, 2)])
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_second_graph_empty(self):
+        G1 = nx.Graph([(0, 1), (1, 2)])
+        G2 = nx.Graph()
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order1(self):
+        G1 = nx.path_graph(5)
+        G2 = nx.path_graph(6)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order2(self):
+        G1 = nx.barbell_graph(100, 20)
+        G2 = nx.barbell_graph(101, 20)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order3(self):
+        G1 = nx.complete_graph(7)
+        G2 = nx.complete_graph(8)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences1(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4)])
+        G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4), (2, 5)])
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G2.remove_node(3)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences2(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (0, 2),
+                (2, 3),
+                (3, 4),
+                (4, 5),
+                (5, 6),
+                (6, 3),
+                (4, 7),
+                (7, 8),
+                (8, 3),
+            ]
+        )
+        G2 = G1.copy()
+        G2.add_edge(8, 0)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G1.add_edge(6, 1)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences3(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+        G2 = nx.Graph(
+            [(0, 1), (0, 6), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]
+        )
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G1.add_edge(3, 5)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_label_distribution(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+        G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+
+        colors1 = ["blue", "blue", "blue", "yellow", "black", "purple", "purple"]
+        colors2 = ["blue", "blue", "yellow", "yellow", "black", "purple", "purple"]
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors1[::-1]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(colors2[::-1]))), "label")
+
+        assert not vf2pp_is_isomorphic(G1, G2, node_label="label")
+        G2.nodes[3]["label"] = "blue"
+        assert vf2pp_is_isomorphic(G1, G2, node_label="label")
+
+
+class TestAllGraphTypesEdgeCases:
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_both_graphs_empty(self, graph_type):
+        G = graph_type()
+        H = graph_type()
+        assert vf2pp_isomorphism(G, H) is None
+
+        G.add_node(0)
+
+        assert vf2pp_isomorphism(G, H) is None
+        assert vf2pp_isomorphism(H, G) is None
+
+        H.add_node(0)
+        assert vf2pp_isomorphism(G, H) == {0: 0}
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_first_graph_empty(self, graph_type):
+        G = graph_type()
+        H = graph_type([(0, 1)])
+        assert vf2pp_isomorphism(G, H) is None
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_second_graph_empty(self, graph_type):
+        G = graph_type([(0, 1)])
+        H = graph_type()
+        assert vf2pp_isomorphism(G, H) is None
+
+
+class TestGraphISOVF2pp:
+    def test_custom_graph1_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add edge making G1 symmetrical
+        G1.add_edge(3, 7)
+        G1.nodes[7]["label"] = "blue"
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make G2 isomorphic to G1
+        G2.add_edges_from([(mapped[3], "X"), (mapped[6], mapped[5])])
+        G1.add_edge(4, 7)
+        G2.nodes["X"]["label"] = "blue"
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Re-structure maintaining isomorphism
+        G1.remove_edges_from([(1, 4), (1, 3)])
+        G2.remove_edges_from([(mapped[1], mapped[5]), (mapped[1], mapped[2])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph1_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph2_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Obtain two isomorphic subgraphs from the graph
+        G2.remove_edge(mapped[1], mapped[2])
+        G2.add_edge(mapped[1], mapped[4])
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 7]))
+        H2 = nx.Graph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Add edges maintaining isomorphism
+        H1.add_edges_from([(3, 7), (4, 7)])
+        H2.add_edges_from([(mapped[1], mapped[6]), (mapped[4], mapped[6])])
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_custom_graph2_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+
+        # Adding new nodes
+        G1.add_node(0)
+        G2.add_node("Z")
+        G1.nodes[0]["label"] = G1.nodes[1]["label"]
+        G2.nodes["Z"]["label"] = G1.nodes[1]["label"]
+        mapped.update({0: "Z"})
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Change the color of one of the nodes
+        G2.nodes["Z"]["label"] = G1.nodes[2]["label"]
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Add an extra edge
+        G1.nodes[0]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+        G1.add_edge(0, 1)
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Add extra edge to both
+        G2.add_edge("Z", "A")
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph3_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (5, 8),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect nodes maintaining symmetry
+        G1.add_edges_from([(6, 9), (7, 8)])
+        G2.add_edges_from([(mapped[6], mapped[8]), (mapped[7], mapped[9])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make isomorphic
+        G1.add_edges_from([(6, 8), (7, 9)])
+        G2.add_edges_from([(mapped[6], mapped[9]), (mapped[7], mapped[8])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect more nodes
+        G1.add_edges_from([(2, 7), (3, 6)])
+        G2.add_edges_from([(mapped[2], mapped[7]), (mapped[3], mapped[6])])
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the newly added node, to opposite sides of the graph
+        G1.add_edges_from([(10, 1), (10, 5), (10, 8)])
+        G2.add_edges_from([("Z", mapped[1]), ("Z", mapped[4]), ("Z", mapped[9])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Get two subgraphs that are not isomorphic but are easy to make
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
+            )
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label") is None
+
+        # Restructure both to make them isomorphic
+        H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
+        H2.add_edges_from(
+            [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Add edges with opposite direction in each Graph
+        H1.add_edge(3, 5)
+        H2.add_edge(mapped[5], mapped[7])
+        assert vf2pp_isomorphism(H1, H2, node_label="label") is None
+
+    def test_custom_graph3_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (5, 8),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Add extra edge to G1
+        G1.add_edge(1, 7)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Compensate in G2
+        G2.add_edge(9, 1)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Add extra node
+        G1.add_node("A")
+        G2.add_node("K")
+        G1.nodes["A"]["label"] = "green"
+        G2.nodes["K"]["label"] = "green"
+        mapped.update({"A": "K"})
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Connect A to one side of G1 and K to the opposite
+        G1.add_edge("A", 6)
+        G2.add_edge("K", 5)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make the graphs symmetrical
+        G1.add_edge(1, 5)
+        G1.add_edge(2, 9)
+        G2.add_edge(9, 3)
+        G2.add_edge(8, 4)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Assign same colors so the two opposite sides are identical
+        for node in G1.nodes():
+            color = "red"
+            G1.nodes[node]["label"] = color
+            G2.nodes[mapped[node]]["label"] = color
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph4_different_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 2),
+            (2, 3),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (8, 7),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph4_same_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 2),
+            (2, 3),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (8, 7),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add nodes of different label
+        G1.add_node(0)
+        G2.add_node("z")
+        G1.nodes[0]["label"] = "green"
+        G2.nodes["z"]["label"] = "blue"
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make the labels identical
+        G2.nodes["z"]["label"] = "green"
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Change the structure of the graphs, keeping them isomorphic
+        G1.add_edge(2, 5)
+        G2.remove_edge("i", "l")
+        G2.add_edge("g", "l")
+        G2.add_edge("m", "f")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Change the structure of the disconnected sub-graph, keeping it isomorphic
+        G1.remove_node(13)
+        G2.remove_node("d")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the newly added node to the disconnected graph, which now is just a path of size 3
+        G1.add_edge(0, 10)
+        G2.add_edge("e", "z")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the two disconnected sub-graphs, forming a single graph
+        G1.add_edge(11, 3)
+        G1.add_edge(0, 8)
+        G2.add_edge("a", "l")
+        G2.add_edge("z", "j")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph5_same_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add different edges in each graph, maintaining symmetry
+        G1.add_edges_from([(3, 6), (2, 7), (2, 5), (1, 3), (4, 7), (6, 8)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[3]),
+                (mapped[2], mapped[7]),
+                (mapped[1], mapped[6]),
+                (mapped[5], mapped[7]),
+                (mapped[3], mapped[8]),
+                (mapped[2], mapped[4]),
+            ]
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Obtain two different but isomorphic subgraphs from G1 and G2
+        H1 = nx.Graph(G1.subgraph([1, 5, 8, 6, 7, 3]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[1], mapped[4], mapped[8], mapped[7], mapped[3], mapped[5]]
+            )
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Delete corresponding node from the two graphs
+        H1.remove_node(8)
+        H2.remove_node(mapped[7])
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Re-orient, maintaining isomorphism
+        H1.add_edge(1, 6)
+        H1.remove_edge(3, 6)
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_custom_graph5_different_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        colors = ["red", "blue", "grey", "none", "brown", "solarized", "yellow", "pink"]
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Assign different colors to matching nodes
+        c = 0
+        for node in G1.nodes():
+            color1 = colors[c]
+            color2 = colors[(c + 3) % len(colors)]
+            G1.nodes[node]["label"] = color1
+            G2.nodes[mapped[node]]["label"] = color2
+            c += 1
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Get symmetrical sub-graphs of G1,G2 and compare them
+        H1 = G1.subgraph([1, 5])
+        H2 = G2.subgraph(["i", "c"])
+        c = 0
+        for node1, node2 in zip(H1.nodes(), H2.nodes()):
+            H1.nodes[node1]["label"] = "red"
+            H2.nodes[node2]["label"] = "red"
+            c += 1
+
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_disconnected_graph_all_same_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_disconnected_graph_all_different_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_disconnected_graph_some_same_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        colors = [
+            "white",
+            "white",
+            "white",
+            "purple",
+            "purple",
+            "red",
+            "red",
+            "pink",
+            "pink",
+            "pink",
+        ]
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors))), "label")
+        nx.set_node_attributes(
+            G2, dict(zip([mapped[n] for n in G1], it.cycle(colors))), "label"
+        )
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+
+class TestMultiGraphISOVF2pp:
+    def test_custom_multigraph1_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 4),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (2, 6),
+            (3, 4),
+            (3, 4),
+            (5, 1),
+            (5, 1),
+            (5, 2),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Transfer the 2-clique to the right side of G1
+        G1.remove_edges_from([(2, 6), (2, 6)])
+        G1.add_edges_from([(3, 6), (3, 6)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Delete an edges, making them symmetrical, so the position of the 2-clique doesn't matter
+        G2.remove_edge(mapped[1], mapped[4])
+        G1.remove_edge(1, 4)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add self-loops
+        G1.add_edges_from([(5, 5), (5, 5), (1, 1)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G2
+        G2.add_edges_from(
+            [(mapped[1], mapped[1]), (mapped[4], mapped[4]), (mapped[4], mapped[4])]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph1_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 4),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (2, 6),
+            (3, 4),
+            (3, 4),
+            (5, 1),
+            (5, 1),
+            (5, 2),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Re-structure G1, maintaining the degree sequence
+        G1.remove_edge(1, 4)
+        G1.add_edge(1, 5)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Restructure G2, making it isomorphic to G1
+        G2.remove_edge("A", "D")
+        G2.add_edge("A", "Z")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Add edge from node to itself
+        G1.add_edges_from([(6, 6), (6, 6), (6, 6)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Same for G2
+        G2.add_edges_from([("E", "E"), ("E", "E"), ("E", "E")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+    def test_custom_multigraph2_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (1, 5),
+            (1, 5),
+            (1, 5),
+            (5, 6),
+            (2, 3),
+            (2, 3),
+            (2, 4),
+            (3, 4),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (2, 7),
+            (2, 7),
+            (2, 7),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Obtain two non-isomorphic subgraphs from the graph
+        G2.remove_edges_from([(mapped[1], mapped[2]), (mapped[1], mapped[2])])
+        G2.add_edge(mapped[1], mapped[4])
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 7]))
+        H2 = nx.MultiGraph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Make them isomorphic
+        H1.remove_edge(3, 4)
+        H1.add_edges_from([(2, 3), (2, 4), (2, 4)])
+        H2.add_edges_from([(mapped[5], mapped[6]), (mapped[5], mapped[6])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove triangle edge
+        H1.remove_edges_from([(2, 3), (2, 3), (2, 3)])
+        H2.remove_edges_from([(mapped[5], mapped[4])] * 3)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Change the edge orientation such that H1 is rotated H2
+        H1.remove_edges_from([(2, 7), (2, 7)])
+        H1.add_edges_from([(3, 4), (3, 4)])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Add extra edges maintaining degree sequence, but in a non-symmetrical manner
+        H2.add_edge(mapped[5], mapped[1])
+        H1.add_edge(3, 4)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+    def test_custom_multigraph2_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (1, 5),
+            (1, 5),
+            (1, 5),
+            (5, 6),
+            (2, 3),
+            (2, 3),
+            (2, 4),
+            (3, 4),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (2, 7),
+            (2, 7),
+            (2, 7),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Re-structure G1
+        G1.remove_edge(2, 7)
+        G1.add_edge(5, 6)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Same for G2
+        G2.remove_edge("B", "C")
+        G2.add_edge("G", "F")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Delete node from G1 and G2, keeping them isomorphic
+        G1.remove_node(3)
+        G2.remove_node("D")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Change G1 edges
+        G1.remove_edge(1, 2)
+        G1.remove_edge(2, 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make G2 identical to G1, but with different edge orientation and different labels
+        G2.add_edges_from([("A", "C"), ("C", "E"), ("C", "E")])
+        G2.remove_edges_from(
+            [("A", "G"), ("A", "G"), ("F", "G"), ("E", "G"), ("E", "G")]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make all labels the same, so G1 and G2 are also isomorphic
+        for n1, n2 in zip(G1.nodes(), G2.nodes()):
+            G1.nodes[n1]["label"] = "blue"
+            G2.nodes[n2]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph3_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 3),
+            (2, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (4, 9),
+            (4, 9),
+            (5, 8),
+            (5, 8),
+            (8, 9),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (6, 7),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect nodes maintaining symmetry
+        G1.add_edges_from([(6, 9), (7, 8), (5, 8), (4, 9), (4, 9)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[8]),
+                (mapped[7], mapped[9]),
+                (mapped[5], mapped[8]),
+                (mapped[4], mapped[9]),
+                (mapped[4], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make isomorphic
+        G1.add_edges_from([(6, 8), (6, 8), (7, 9), (7, 9), (7, 9)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[8]),
+                (mapped[6], mapped[9]),
+                (mapped[7], mapped[8]),
+                (mapped[7], mapped[9]),
+                (mapped[7], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect more nodes
+        G1.add_edges_from([(2, 7), (2, 7), (3, 6), (3, 6)])
+        G2.add_edges_from(
+            [
+                (mapped[2], mapped[7]),
+                (mapped[2], mapped[7]),
+                (mapped[3], mapped[6]),
+                (mapped[3], mapped[6]),
+            ]
+        )
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect the newly added node, to opposite sides of the graph
+        G1.add_edges_from([(10, 1), (10, 5), (10, 8), (10, 10), (10, 10)])
+        G2.add_edges_from(
+            [
+                ("Z", mapped[1]),
+                ("Z", mapped[4]),
+                ("Z", mapped[9]),
+                ("Z", "Z"),
+                ("Z", "Z"),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # We connected the new node to opposite sides, so G1 must be symmetrical to G2. Re-structure them to be so
+        G1.remove_edges_from([(1, 3), (4, 9), (4, 9), (7, 9)])
+        G2.remove_edges_from(
+            [
+                (mapped[1], mapped[3]),
+                (mapped[4], mapped[9]),
+                (mapped[4], mapped[9]),
+                (mapped[7], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Get two subgraphs that are not isomorphic but are easy to make
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
+            )
+        )
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Restructure both to make them isomorphic
+        H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
+        H2.add_edges_from(
+            [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
+        )
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove one self-loop in H2
+        H2.remove_edge("Z", "Z")
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Compensate in H1
+        H1.remove_edge(10, 10)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+    def test_custom_multigraph3_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 3),
+            (2, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (4, 9),
+            (4, 9),
+            (5, 8),
+            (5, 8),
+            (8, 9),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (6, 7),
+            (6, 7),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Delete edge maintaining isomorphism
+        G1.remove_edge(4, 9)
+        G2.remove_edge(4, 6)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Change edge orientation such that G1 mirrors G2
+        G1.add_edges_from([(4, 9), (1, 2), (1, 2)])
+        G1.remove_edges_from([(1, 3), (1, 3)])
+        G2.add_edges_from([(3, 5), (7, 9)])
+        G2.remove_edge(8, 9)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make all labels the same, so G1 and G2 are also isomorphic
+        for n1, n2 in zip(G1.nodes(), G2.nodes()):
+            G1.nodes[n1]["label"] = "blue"
+            G2.nodes[n2]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "green"
+        G2.nodes["Z"]["label"] = "green"
+
+        # Add different number of edges between the new nodes and themselves
+        G1.add_edges_from([(10, 10), (10, 10)])
+        G2.add_edges_from([("Z", "Z")])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make the number of self-edges equal
+        G1.remove_edge(10, 10)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect the new node to the graph
+        G1.add_edges_from([(10, 3), (10, 4)])
+        G2.add_edges_from([("Z", 8), ("Z", 3)])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Remove central node
+        G1.remove_node(4)
+        G2.remove_node(3)
+        G1.add_edges_from([(5, 6), (5, 6), (5, 7)])
+        G2.add_edges_from([(1, 6), (1, 6), (6, 2)])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph4_same_labels(self):
+        G1 = nx.MultiGraph()
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (2, 2),
+            (2, 3),
+            (3, 8),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (3, 6),
+            (6, 6),
+            (8, 7),
+            (7, 7),
+            (8, 9),
+            (9, 9),
+            (8, 9),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+            (10, 10),
+            (10, 11),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add extra but corresponding edges to both graphs
+        G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
+        G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Obtain subgraphs
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6, 10, 11, 12, 13]))
+        H2 = nx.MultiGraph(
+            G2.subgraph(
+                [
+                    mapped[2],
+                    mapped[3],
+                    mapped[8],
+                    mapped[9],
+                    mapped[10],
+                    mapped[11],
+                    mapped[12],
+                    mapped[13],
+                ]
+            )
+        )
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Make them isomorphic
+        H2.remove_edges_from(
+            [(mapped[3], mapped[2]), (mapped[9], mapped[8]), (mapped[2], mapped[2])]
+        )
+        H2.add_edges_from([(mapped[9], mapped[9]), (mapped[2], mapped[8])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Re-structure the disconnected sub-graph
+        H1.remove_node(12)
+        H2.remove_node(mapped[12])
+        H1.add_edge(13, 13)
+        H2.add_edge(mapped[13], mapped[13])
+
+        # Connect the two disconnected components, forming a single graph
+        H1.add_edges_from([(3, 13), (6, 11)])
+        H2.add_edges_from([(mapped[8], mapped[10]), (mapped[2], mapped[11])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Change orientation of self-loops in one graph, maintaining the degree sequence
+        H1.remove_edges_from([(2, 2), (3, 6)])
+        H1.add_edges_from([(6, 6), (2, 3)])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+    def test_custom_multigraph4_different_labels(self):
+        G1 = nx.MultiGraph()
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (2, 2),
+            (2, 3),
+            (3, 8),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (3, 6),
+            (6, 6),
+            (8, 7),
+            (7, 7),
+            (8, 9),
+            (9, 9),
+            (8, 9),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m == mapped
+
+        # Add extra but corresponding edges to both graphs
+        G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
+        G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m == mapped
+
+        # Obtain isomorphic subgraphs
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6]))
+        H2 = nx.MultiGraph(G2.subgraph(["m", "l", "j", "i"]))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Delete the 3-clique, keeping only the path-graph. Also, H1 mirrors H2
+        H1.remove_node(4)
+        H2.remove_node("j")
+        H1.remove_edges_from([(2, 2), (2, 3), (6, 6)])
+        H2.remove_edges_from([("l", "i"), ("m", "m"), ("m", "m")])
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Assign the same labels so that mirroring means isomorphic
+        for n1, n2 in zip(H1.nodes(), H2.nodes()):
+            H1.nodes[n1]["label"] = "red"
+            H2.nodes[n2]["label"] = "red"
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Leave only one node with self-loop
+        H1.remove_nodes_from([3, 6])
+        H2.remove_nodes_from(["m", "l"])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove one self-loop from H1
+        H1.remove_edge(2, 2)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Same for H2
+        H2.remove_edge("i", "i")
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Compose H1 with the disconnected sub-graph of G1. Same for H2
+        S1 = nx.compose(H1, nx.MultiGraph(G1.subgraph([10, 11, 12, 13])))
+        S2 = nx.compose(H2, nx.MultiGraph(G2.subgraph(["a", "b", "d", "e"])))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Connect the two components
+        S1.add_edges_from([(13, 13), (13, 13), (2, 13)])
+        S2.add_edges_from([("a", "a"), ("a", "a"), ("i", "e")])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+    def test_custom_multigraph5_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add multiple edges and self-loops, maintaining isomorphism
+        G1.add_edges_from(
+            [(1, 2), (1, 2), (3, 7), (8, 8), (8, 8), (7, 8), (2, 3), (5, 6)]
+        )
+        G2.add_edges_from(
+            [
+                ("a", "h"),
+                ("a", "h"),
+                ("d", "j"),
+                ("c", "c"),
+                ("c", "c"),
+                ("j", "c"),
+                ("d", "h"),
+                ("g", "b"),
+            ]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Make G2 to be the rotated G1
+        G2.remove_edges_from(
+            [
+                ("a", "h"),
+                ("a", "h"),
+                ("d", "j"),
+                ("c", "c"),
+                ("c", "c"),
+                ("j", "c"),
+                ("d", "h"),
+                ("g", "b"),
+            ]
+        )
+        G2.add_edges_from(
+            [
+                ("d", "i"),
+                ("a", "h"),
+                ("g", "b"),
+                ("g", "b"),
+                ("i", "i"),
+                ("i", "i"),
+                ("b", "j"),
+                ("d", "j"),
+            ]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_disconnected_multigraph_all_same_labels(self):
+        G1 = nx.MultiGraph()
+        G1.add_nodes_from(list(range(10)))
+        G1.add_edges_from([(i, i) for i in range(10)])
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add self-loops to non-mapped nodes. Should be the same, as the graph is disconnected.
+        G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G2
+        G2.add_edges_from([(i, i) for i in range(3)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add one more self-loop in G2
+        G2.add_edges_from([(0, 0), (1, 1), (1, 1)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G1
+        G1.add_edges_from([(5, 5), (7, 7), (7, 7)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_disconnected_multigraph_all_different_labels(self):
+        G1 = nx.MultiGraph()
+        G1.add_nodes_from(list(range(10)))
+        G1.add_edges_from([(i, i) for i in range(10)])
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Add self-loops to non-mapped nodes. Now it is not the same, as there are different labels
+        G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to non mapped nodes in G2 as well
+        G2.add_edges_from([(mapped[i], mapped[i]) for i in range(3)] * 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to mapped nodes in G2
+        G2.add_edges_from([(mapped[i], mapped[i]) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to G1 so that they are even in both graphs
+        G1.add_edges_from([(i, i) for i in range(3)] * 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+
+class TestDiGraphISOVF2pp:
+    def test_wikipedia_graph(self):
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (3, 2),
+            (6, 2),
+            (3, 4),
+            (7, 3),
+            (4, 8),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1 = nx.DiGraph(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        assert vf2pp_isomorphism(G1, G2) == mapped
+
+        # Change the direction of an edge
+        G1.remove_edge(1, 5)
+        G1.add_edge(5, 1)
+        assert vf2pp_isomorphism(G1, G2) is None
+
+    def test_non_isomorphic_same_degree_sequence(self):
+        r"""
+                G1                           G2
+        x--------------x              x--------------x
+        | \            |              | \            |
+        |  x-------x   |              |  x-------x   |
+        |  |       |   |              |  |       |   |
+        |  x-------x   |              |  x-------x   |
+        | /            |              |            \ |
+        x--------------x              x--------------x
+        """
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (4, 1),
+            (3, 2),
+            (3, 4),
+            (4, 8),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+        ]
+        edges2 = [
+            (1, 5),
+            (1, 2),
+            (4, 1),
+            (3, 2),
+            (4, 3),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (3, 7),
+            (8, 7),
+        ]
+
+        G1 = nx.DiGraph(edges1)
+        G2 = nx.DiGraph(edges2)
+        assert vf2pp_isomorphism(G1, G2) is None
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
new file mode 100644
index 00000000..0e29b1be
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
@@ -0,0 +1,3106 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
+from networkx.algorithms.isomorphism.vf2pp import (
+    _consistent_PT,
+    _cut_PT,
+    _feasibility,
+    _find_candidates,
+    _find_candidates_Di,
+    _GraphParameters,
+    _initialize_parameters,
+    _matching_order,
+    _restore_Tinout,
+    _restore_Tinout_Di,
+    _StateParameters,
+    _update_Tinout,
+)
+
+labels_same = ["blue"]
+
+labels_many = [
+    "white",
+    "red",
+    "blue",
+    "green",
+    "orange",
+    "black",
+    "purple",
+    "yellow",
+    "brown",
+    "cyan",
+    "solarized",
+    "pink",
+    "none",
+]
+
+
+class TestNodeOrdering:
+    def test_empty_graph(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph()
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        assert len(set(_matching_order(gparams))) == 0
+
+    def test_single_node(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph()
+        G1.add_node(1)
+        G2.add_node(1)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip(G2, it.cycle(labels_many))),
+            "label",
+        )
+        l1, l2 = (
+            nx.get_node_attributes(G1, "label"),
+            nx.get_node_attributes(G2, "label"),
+        )
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+        m = _matching_order(gparams)
+        assert m == [1]
+
+    def test_matching_order(self):
+        labels = [
+            "blue",
+            "blue",
+            "red",
+            "red",
+            "red",
+            "red",
+            "green",
+            "green",
+            "green",
+            "yellow",
+            "purple",
+            "purple",
+            "blue",
+            "blue",
+        ]
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (0, 2),
+                (1, 2),
+                (2, 5),
+                (2, 4),
+                (1, 3),
+                (1, 4),
+                (3, 6),
+                (4, 6),
+                (6, 7),
+                (7, 8),
+                (9, 10),
+                (9, 11),
+                (11, 12),
+                (11, 13),
+                (12, 13),
+                (10, 13),
+            ]
+        )
+        G2 = G1.copy()
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip(G2, it.cycle(labels))),
+            "label",
+        )
+        l1, l2 = (
+            nx.get_node_attributes(G1, "label"),
+            nx.get_node_attributes(G2, "label"),
+        )
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        expected = [9, 11, 10, 13, 12, 1, 2, 4, 0, 3, 6, 5, 7, 8]
+        assert _matching_order(gparams) == expected
+
+    def test_matching_order_all_branches(self):
+        G1 = nx.Graph(
+            [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 4), (3, 4)]
+        )
+        G1.add_node(5)
+        G2 = G1.copy()
+
+        G1.nodes[0]["label"] = "black"
+        G1.nodes[1]["label"] = "blue"
+        G1.nodes[2]["label"] = "blue"
+        G1.nodes[3]["label"] = "red"
+        G1.nodes[4]["label"] = "red"
+        G1.nodes[5]["label"] = "blue"
+
+        G2.nodes[0]["label"] = "black"
+        G2.nodes[1]["label"] = "blue"
+        G2.nodes[2]["label"] = "blue"
+        G2.nodes[3]["label"] = "red"
+        G2.nodes[4]["label"] = "red"
+        G2.nodes[5]["label"] = "blue"
+
+        l1, l2 = (
+            nx.get_node_attributes(G1, "label"),
+            nx.get_node_attributes(G2, "label"),
+        )
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        expected = [0, 4, 1, 3, 2, 5]
+        assert _matching_order(gparams) == expected
+
+
+class TestGraphCandidateSelection:
+    G1_edges = [
+        (1, 2),
+        (1, 4),
+        (1, 5),
+        (2, 3),
+        (2, 4),
+        (3, 4),
+        (4, 5),
+        (1, 6),
+        (6, 7),
+        (6, 8),
+        (8, 9),
+        (7, 9),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+
+    def test_no_covered_neighbors_no_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5}
+        T1_tilde = {0, 3, 6}
+        T2 = {"g", "h", "b", "d", "e"}
+        T2_tilde = {"x", "c", "f"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1 = {2, 4, 5, 6}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {
+            self.mapped[u],
+            self.mapped[8],
+            self.mapped[3],
+            self.mapped[9],
+        }
+
+    def test_no_covered_neighbors_with_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change label of disconnected node
+        G1.nodes[u]["label"] = "blue"
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        # No candidate
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == set()
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1 = {2, 4, 5, 6}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2 = {"b", "d", "e", "f"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        G1.nodes[8]["label"] = G1.nodes[7]["label"]
+        G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8]}
+
+    def test_covered_neighbors_no_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        l1 = dict(G1.nodes(data=None, default=-1))
+        l2 = dict(G2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+    def test_covered_neighbors_with_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Assign to 2, the same label as 6
+        G1.nodes[2]["label"] = G1.nodes[u]["label"]
+        G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+
+class TestDiGraphCandidateSelection:
+    G1_edges = [
+        (1, 2),
+        (1, 4),
+        (5, 1),
+        (2, 3),
+        (4, 2),
+        (3, 4),
+        (4, 5),
+        (1, 6),
+        (6, 7),
+        (6, 8),
+        (8, 9),
+        (7, 9),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+
+    def test_no_covered_neighbors_no_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1_out = {2, 4, 6}
+        T1_in = {5}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8], self.mapped[3]}
+
+    def test_no_covered_neighbors_with_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change label of disconnected node
+        G1.nodes[u]["label"] = "blue"
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        # No candidate
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == set()
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1_out = {2, 4, 6}
+        T1_in = {5}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        G1.nodes[8]["label"] = G1.nodes[7]["label"]
+        G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8]}
+
+    def test_covered_neighbors_no_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        l1 = dict(G1.nodes(data=None, default=-1))
+        l2 = dict(G2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change the direction of an edge to make the degree orientation same as first candidate of u.
+        G1.remove_edge(4, 2)
+        G1.add_edge(2, 4)
+        G2.remove_edge("d", "b")
+        G2.add_edge("b", "d")
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+    def test_covered_neighbors_with_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1.remove_edge(4, 2)
+        G1.add_edge(2, 4)
+        G2.remove_edge("d", "b")
+        G2.add_edge("b", "d")
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Assign to 2, the same label as 6
+        G1.nodes[2]["label"] = G1.nodes[u]["label"]
+        G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+        # Change the direction of an edge to make the degree orientation same as first candidate of u.
+        G1.remove_edge(2, 4)
+        G1.add_edge(4, 2)
+        G2.remove_edge("b", "d")
+        G2.add_edge("d", "b")
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+    def test_same_in_out_degrees_no_candidate(self):
+        g1 = nx.DiGraph([(4, 1), (4, 2), (3, 4), (5, 4), (6, 4)])
+        g2 = nx.DiGraph([(1, 4), (2, 4), (3, 4), (4, 5), (4, 6)])
+
+        l1 = dict(g1.nodes(data=None, default=-1))
+        l2 = dict(g2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            g1,
+            g2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        g2.in_degree(), g2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        g1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(g1.in_degree, g1.out_degree)
+        }
+
+        m = {1: 1, 2: 2, 3: 3}
+        m_rev = m.copy()
+
+        T1_out = {4}
+        T1_in = {4}
+        T1_tilde = {5, 6}
+        T2_out = {4}
+        T2_in = {4}
+        T2_tilde = {5, 6}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 4
+        # despite the same in and out degree, there's no candidate for u=4
+        candidates = _find_candidates_Di(u, gparams, sparams, g1_degree)
+        assert candidates == set()
+        # Notice how the regular candidate selection method returns wrong result.
+        assert _find_candidates(u, gparams, sparams, g1_degree) == {4}
+
+
+class TestGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2)])
+        G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "a"), ("k", "c")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 4), (3, 5)])
+        G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
+        G2 = nx.Graph(
+            [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = nx.Graph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the test.
+        # Two reasons for this:
+        #   1. If u, v had different degrees from the beginning, they wouldn't
+        #      be selected as candidates in the first place.
+        #   2. Even if they are selected, consistency is basically 1-look-ahead,
+        #      meaning that we take into consideration the relation of the
+        #      candidates with their mapped neighbors. The node we deleted is
+        #      not a covered neighbor.
+        #      Such nodes will be checked by the cut_PT function, which is
+        #      basically the 2-look-ahead, checking the relation of the
+        #      candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compendate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
+    def test_cut_inconsistent_labels(self, graph_type):
+        G1 = graph_type(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = graph_type(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        l1.update({6: "green"})  # Change the label of one neighbor of u
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+
+        u, v = 10, "k"
+        assert _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_consistent_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = nx.Graph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1, so it's not the same as the one between G2[v] and T2
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_out
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add some new nodes to the mapping
+        sparams.mapping.update({6: "g", 7: "y"})
+        sparams.reverse_mapping.update({"g": 6, "y": 7})
+
+        # Add more nodes to T1, T2.
+        G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
+        G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
+
+        sparams.mapping.update({20: "j", 21: "i"})
+        sparams.reverse_mapping.update({"j": 20, "i": 21})
+        sparams.T1.update({20, 21})
+        sparams.T2.update({"i", "j"})
+        sparams.T1_tilde.difference_update({6, 7})
+        sparams.T2_tilde.difference_update({"g", "y"})
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add nodes from the new T1 and T2, as neighbors of u and v respectively
+        G1.add_edges_from([(u, 20), (u, 21)])
+        G2.add_edges_from([(v, "i"), (v, "j")])
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the edges, maintaining the G1[u]-T1 intersection
+        G1.remove_edge(u, 20)
+        G1.add_edge(u, 4)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Connect u to 8 which is still in T1_tilde
+        G1.add_edge(u, 8)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for v and z, so that inters(G1[u], T1out) == inters(G2[v], T2out)
+        G2.add_edge(v, "z")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 3),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G2[v] and T2_tilde
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edge(8, 3)
+        G2.add_edge(mapped[8], mapped[3])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_feasibility_same_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 2),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {mapped[n]: "blue" for n in G1.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edge(mapped[20], mapped[2])
+        G2.add_edge(mapped[20], mapped[3])
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edge(20, 2)
+        G1.add_edge(20, 3)
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        G2.add_edge(v, mapped[10])
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_feasibility_different_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 2),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                2: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edge(mapped[20], mapped[2])
+        G2.add_edge(mapped[20], mapped[3])
+        l2.update({mapped[3]: "green"})
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edge(20, 2)
+        G1.add_edge(20, 3)
+        l1.update({3: "green"})
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        l1.update({5: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+
+class TestMultiGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.MultiGraph(
+            [(0, 1), (0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2)]
+        )
+        G2 = nx.MultiGraph(
+            [
+                ("a", "b"),
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "c"),
+                ("k", "c"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.MultiGraph([(0, 1), (0, 1), (1, 2), (3, 4), (3, 4), (3, 5)])
+        G2 = nx.MultiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.MultiGraph(
+            [(0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2), (3, 4), (3, 5)]
+        )
+        G2 = nx.MultiGraph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "c"),
+                ("k", "c"),
+                ("k", "w"),
+                ("k", "z"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 0),
+                (10, 0),
+                (10, 3),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the test. Two reasons for this:
+        # 1. If u, v had different degrees from the beginning, they wouldn't be selected as candidates in the first
+        #    place.
+        # 2. Even if they are selected, consistency is basically 1-look-ahead, meaning that we take into consideration
+        #    the relation of the candidates with their mapped neighbors. The node we deleted is not a covered neighbor.
+        #    Such nodes will be checked by the cut_PT function, which is basically the 2-look-ahead, checking the
+        #    relation of the candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compendate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete an edge between u and a covered neighbor
+        G1.remove_edges_from([(u, 0), (u, 0)])
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edges_from([(v, mapped[0]), (v, mapped[0])])
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Remove an edge between v and a covered neighbor
+        G2.remove_edge(v, mapped[3])
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 3)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 0),
+                (10, 0),
+                (10, 3),
+                (10, 3),
+                (10, 4),
+                (10, 4),
+                (10, 5),
+                (10, 5),
+                (10, 5),
+                (10, 5),
+                (10, 6),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove one of the multiple edges between u and a neighbor
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G1.remove_edge(u, 4)
+        G2.remove_edges_from([(v, mapped[4]), (v, mapped[4])])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add more edges between u and neighbor which belongs in T1_tilde
+        G1.add_edges_from([(u, 5), (u, 5), (u, 5)])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edges_from([(v, mapped[5]), (v, mapped[5]), (v, mapped[5])])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_out
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        G1.add_edges_from([(u, 6), (u, 6), (u, 6), (u, 8)])
+        G2.add_edges_from([(v, "g"), (v, "g"), (v, "g"), (v, "z")])
+
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add some new nodes to the mapping
+        sparams.mapping.update({6: "g", 7: "y"})
+        sparams.reverse_mapping.update({"g": 6, "y": 7})
+
+        # Add more nodes to T1, T2.
+        G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
+        G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
+
+        sparams.T1.update({20, 21})
+        sparams.T2.update({"i", "j"})
+        sparams.T1_tilde.difference_update({6, 7})
+        sparams.T2_tilde.difference_update({"g", "y"})
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove some edges
+        G2.remove_edge(v, "g")
+        assert _cut_PT(u, v, gparams, sparams)
+
+        G1.remove_edge(u, 6)
+        G1.add_edge(u, 8)
+        G2.add_edge(v, "z")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add nodes from the new T1 and T2, as neighbors of u and v respectively
+        G1.add_edges_from([(u, 20), (u, 20), (u, 20), (u, 21)])
+        G2.add_edges_from([(v, "i"), (v, "i"), (v, "i"), (v, "j")])
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the edges
+        G1.remove_edge(u, 20)
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        G2.remove_edge(v, "i")
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 3),
+                (20, 3),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Delete one from the multiple edges
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edges_from([(8, 3), (8, 3), (8, u)])
+        G2.add_edges_from([(mapped[8], mapped[3]), (mapped[8], mapped[3])])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Fix uneven edges
+        G1.remove_edge(8, u)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_feasibility_same_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 3),
+                (20, 3),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {mapped[n]: "blue" for n in G1.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
+        G2.add_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edges_from([(20, 3), (20, 3)])
+        G1.add_edges_from([(20, 2), (20, 2)])
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        G2.add_edges_from([(v, mapped[10])] * 5)
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Pass all tests
+        G1.add_edges_from([(u, 10)] * 5)
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_feasibility_different_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 2),
+                (20, 2),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                2: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
+        G2.add_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
+        l2.update({mapped[3]: "green"})
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edges_from([(20, 2), (20, 2)])
+        G1.add_edges_from([(20, 3), (20, 3)])
+        l1.update({3: "green"})
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        l1.update({5: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+
+class TestDiGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (0, 3), (2, 3)])
+        G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("a", "k"), ("c", "k")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (3, 4), (3, 5)])
+        G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
+        G2 = nx.DiGraph(
+            [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the
+        # test. Two reasons for this:
+        #   1. If u, v had different degrees from the beginning, they wouldn't
+        #      be selected as candidates in the first place.
+        #   2. Even if they are selected, consistency is basically
+        #      1-look-ahead, meaning that we take into consideration the
+        #      relation of the candidates with their mapped neighbors.
+        #      The node we deleted is not a covered neighbor.
+        #      Such nodes will be checked by the cut_PT function, which is
+        #      basically the 2-look-ahead, checking the relation of the
+        #      candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_cut_inconsistent_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        l1.update({5: "green"})  # Change the label of one neighbor of u
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+
+        u, v = 10, "k"
+        assert _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_consistent_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4},
+            {5, 10},
+            {6},
+            None,
+            {"e"},
+            {"f", "k"},
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4},
+            {5, 10},
+            {6},
+            None,
+            {"e"},
+            {"f", "k"},
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1_out, so it's not the same as the one between G2[v] and T2_out
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1_in, so it's not the same as the one between G2[v] and T2_in
+        G1.remove_edge(5, u)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(mapped[5], v)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_tilde
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (14, 1),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (10, 4),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 3),
+                (20, 5),
+                (0, 20),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 20},
+            {14, 20},
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "x"},
+            {"o", "x"},
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1_out
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2_out
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1_in
+        G1.add_edge(u, 14)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2_in
+        G2.add_edge(v, mapped[14])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G2[v] and T2_tilde
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edge(8, 3)
+        G2.add_edge(mapped[8], mapped[3])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_predecessor_T1_in_fail(self):
+        G1 = nx.DiGraph(
+            [(0, 1), (0, 3), (4, 0), (1, 5), (5, 2), (3, 6), (4, 6), (6, 5)]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            {3, 5},
+            {4, 5},
+            {6},
+            None,
+            {"d", "f"},
+            {"f"},  # mapped[4] is missing from T2_in
+            {"g"},
+            None,
+        )
+
+        u, v = 6, "g"
+        assert _cut_PT(u, v, gparams, sparams)
+
+        sparams.T2_in.add("e")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+
+class TestGraphTinoutUpdating:
+    edges = [
+        (1, 3),
+        (2, 3),
+        (3, 4),
+        (4, 9),
+        (4, 5),
+        (3, 9),
+        (5, 8),
+        (5, 7),
+        (8, 7),
+        (6, 7),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+    G1 = nx.Graph()
+    G1.add_edges_from(edges)
+    G1.add_node(0)
+    G2 = nx.relabel_nodes(G1, mapping=mapped)
+
+    def test_updating(self):
+        G2_degree = dict(self.G2.degree)
+        gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
+        m, m_rev, T1, _, T1_tilde, _, T2, _, T2_tilde, _ = sparams
+
+        # Add node to the mapping
+        m[4] = self.mapped[4]
+        m_rev[self.mapped[4]] = 4
+        _update_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1 == {3, 5, 9}
+        assert T2 == {"c", "i", "e"}
+        assert T1_tilde == {0, 1, 2, 6, 7, 8}
+        assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
+
+        # Add node to the mapping
+        m[5] = self.mapped[5]
+        m_rev.update({self.mapped[5]: 5})
+        _update_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1 == {3, 9, 8, 7}
+        assert T2 == {"c", "i", "h", "g"}
+        assert T1_tilde == {0, 1, 2, 6}
+        assert T2_tilde == {"x", "a", "b", "f"}
+
+        # Add node to the mapping
+        m[6] = self.mapped[6]
+        m_rev.update({self.mapped[6]: 6})
+        _update_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1 == {3, 9, 8, 7}
+        assert T2 == {"c", "i", "h", "g"}
+        assert T1_tilde == {0, 1, 2}
+        assert T2_tilde == {"x", "a", "b"}
+
+        # Add node to the mapping
+        m[3] = self.mapped[3]
+        m_rev.update({self.mapped[3]: 3})
+        _update_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1 == {1, 2, 9, 8, 7}
+        assert T2 == {"a", "b", "i", "h", "g"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Add node to the mapping
+        m[0] = self.mapped[0]
+        m_rev.update({self.mapped[0]: 0})
+        _update_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1 == {1, 2, 9, 8, 7}
+        assert T2 == {"a", "b", "i", "h", "g"}
+        assert T1_tilde == set()
+        assert T2_tilde == set()
+
+    def test_restoring(self):
+        m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
+        m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
+
+        T1 = {1, 2, 7, 9, 8}
+        T2 = {"a", "b", "g", "i", "h"}
+        T1_tilde = set()
+        T2_tilde = set()
+
+        gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        # Remove a node from the mapping
+        m.pop(0)
+        m_rev.pop("x")
+        _restore_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1 == {1, 2, 7, 9, 8}
+        assert T2 == {"a", "b", "g", "i", "h"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Remove a node from the mapping
+        m.pop(6)
+        m_rev.pop("f")
+        _restore_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1 == {1, 2, 7, 9, 8}
+        assert T2 == {"a", "b", "g", "i", "h"}
+        assert T1_tilde == {0, 6}
+        assert T2_tilde == {"x", "f"}
+
+        # Remove a node from the mapping
+        m.pop(3)
+        m_rev.pop("c")
+        _restore_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1 == {7, 9, 8, 3}
+        assert T2 == {"g", "i", "h", "c"}
+        assert T1_tilde == {0, 6, 1, 2}
+        assert T2_tilde == {"x", "f", "a", "b"}
+
+        # Remove a node from the mapping
+        m.pop(5)
+        m_rev.pop("e")
+        _restore_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1 == {9, 3, 5}
+        assert T2 == {"i", "c", "e"}
+        assert T1_tilde == {0, 6, 1, 2, 7, 8}
+        assert T2_tilde == {"x", "f", "a", "b", "g", "h"}
+
+        # Remove a node from the mapping
+        m.pop(4)
+        m_rev.pop("d")
+        _restore_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1 == set()
+        assert T2 == set()
+        assert T1_tilde == set(self.G1.nodes())
+        assert T2_tilde == set(self.G2.nodes())
+
+
+class TestDiGraphTinoutUpdating:
+    edges = [
+        (1, 3),
+        (3, 2),
+        (3, 4),
+        (4, 9),
+        (4, 5),
+        (3, 9),
+        (5, 8),
+        (5, 7),
+        (8, 7),
+        (7, 6),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+    G1 = nx.DiGraph(edges)
+    G1.add_node(0)
+    G2 = nx.relabel_nodes(G1, mapping=mapped)
+
+    def test_updating(self):
+        G2_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(
+                self.G2.in_degree, self.G2.out_degree
+            )
+        }
+        gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
+        m, m_rev, T1_out, T1_in, T1_tilde, _, T2_out, T2_in, T2_tilde, _ = sparams
+
+        # Add node to the mapping
+        m[4] = self.mapped[4]
+        m_rev[self.mapped[4]] = 4
+        _update_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1_out == {5, 9}
+        assert T1_in == {3}
+        assert T2_out == {"i", "e"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 1, 2, 6, 7, 8}
+        assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
+
+        # Add node to the mapping
+        m[5] = self.mapped[5]
+        m_rev[self.mapped[5]] = 5
+        _update_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3}
+        assert T2_out == {"i", "g", "h"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 1, 2, 6}
+        assert T2_tilde == {"x", "a", "b", "f"}
+
+        # Add node to the mapping
+        m[6] = self.mapped[6]
+        m_rev[self.mapped[6]] = 6
+        _update_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3, 7}
+        assert T2_out == {"i", "g", "h"}
+        assert T2_in == {"c", "g"}
+        assert T1_tilde == {0, 1, 2}
+        assert T2_tilde == {"x", "a", "b"}
+
+        # Add node to the mapping
+        m[3] = self.mapped[3]
+        m_rev[self.mapped[3]] = 3
+        _update_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1_out == {9, 8, 7, 2}
+        assert T1_in == {7, 1}
+        assert T2_out == {"i", "g", "h", "b"}
+        assert T2_in == {"g", "a"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Add node to the mapping
+        m[0] = self.mapped[0]
+        m_rev[self.mapped[0]] = 0
+        _update_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1_out == {9, 8, 7, 2}
+        assert T1_in == {7, 1}
+        assert T2_out == {"i", "g", "h", "b"}
+        assert T2_in == {"g", "a"}
+        assert T1_tilde == set()
+        assert T2_tilde == set()
+
+    def test_restoring(self):
+        m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
+        m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
+
+        T1_out = {2, 7, 9, 8}
+        T1_in = {1, 7}
+        T2_out = {"b", "g", "i", "h"}
+        T2_in = {"a", "g"}
+        T1_tilde = set()
+        T2_tilde = set()
+
+        gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        # Remove a node from the mapping
+        m.pop(0)
+        m_rev.pop("x")
+        _restore_Tinout_Di(0, self.mapped[0], gparams, sparams)
+
+        assert T1_out == {2, 7, 9, 8}
+        assert T1_in == {1, 7}
+        assert T2_out == {"b", "g", "i", "h"}
+        assert T2_in == {"a", "g"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Remove a node from the mapping
+        m.pop(6)
+        m_rev.pop("f")
+        _restore_Tinout_Di(6, self.mapped[6], gparams, sparams)
+
+        assert T1_out == {2, 9, 8, 7}
+        assert T1_in == {1}
+        assert T2_out == {"b", "i", "h", "g"}
+        assert T2_in == {"a"}
+        assert T1_tilde == {0, 6}
+        assert T2_tilde == {"x", "f"}
+
+        # Remove a node from the mapping
+        m.pop(3)
+        m_rev.pop("c")
+        _restore_Tinout_Di(3, self.mapped[3], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3}
+        assert T2_out == {"i", "h", "g"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 6, 1, 2}
+        assert T2_tilde == {"x", "f", "a", "b"}
+
+        # Remove a node from the mapping
+        m.pop(5)
+        m_rev.pop("e")
+        _restore_Tinout_Di(5, self.mapped[5], gparams, sparams)
+
+        assert T1_out == {9, 5}
+        assert T1_in == {3}
+        assert T2_out == {"i", "e"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 6, 1, 2, 8, 7}
+        assert T2_tilde == {"x", "f", "a", "b", "h", "g"}
+
+        # Remove a node from the mapping
+        m.pop(4)
+        m_rev.pop("d")
+        _restore_Tinout_Di(4, self.mapped[4], gparams, sparams)
+
+        assert T1_out == set()
+        assert T1_in == set()
+        assert T2_out == set()
+        assert T2_in == set()
+        assert T1_tilde == set(self.G1.nodes())
+        assert T2_tilde == set(self.G2.nodes())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
new file mode 100644
index 00000000..b44f4588
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
@@ -0,0 +1,200 @@
+"""
+Tests for VF2 isomorphism algorithm for weighted graphs.
+"""
+
+import math
+from operator import eq
+
+import networkx as nx
+import networkx.algorithms.isomorphism as iso
+
+
+def test_simple():
+    # 16 simple tests
+    w = "weight"
+    edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)]
+    for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]:
+        g1.add_weighted_edges_from(edges)
+        g2 = g1.subgraph(g1.nodes())
+        if g1.is_multigraph():
+            em = iso.numerical_multiedge_match("weight", 1)
+        else:
+            em = iso.numerical_edge_match("weight", 1)
+        assert nx.is_isomorphic(g1, g2, edge_match=em)
+
+        for mod1, mod2 in [(False, True), (True, False), (True, True)]:
+            # mod1 tests a regular edge
+            # mod2 tests a selfloop
+            if g2.is_multigraph():
+                if mod1:
+                    data1 = {0: {"weight": 10}}
+                if mod2:
+                    data2 = {0: {"weight": 1}, 1: {"weight": 2.5}}
+            else:
+                if mod1:
+                    data1 = {"weight": 10}
+                if mod2:
+                    data2 = {"weight": 2.5}
+
+            g2 = g1.subgraph(g1.nodes()).copy()
+            if mod1:
+                if not g1.is_directed():
+                    g2._adj[1][0] = data1
+                    g2._adj[0][1] = data1
+                else:
+                    g2._succ[1][0] = data1
+                    g2._pred[0][1] = data1
+            if mod2:
+                if not g1.is_directed():
+                    g2._adj[0][0] = data2
+                else:
+                    g2._succ[0][0] = data2
+                    g2._pred[0][0] = data2
+
+            assert not nx.is_isomorphic(g1, g2, edge_match=em)
+
+
+def test_weightkey():
+    g1 = nx.DiGraph()
+    g2 = nx.DiGraph()
+
+    g1.add_edge("A", "B", weight=1)
+    g2.add_edge("C", "D", weight=0)
+
+    assert nx.is_isomorphic(g1, g2)
+    em = iso.numerical_edge_match("nonexistent attribute", 1)
+    assert nx.is_isomorphic(g1, g2, edge_match=em)
+    em = iso.numerical_edge_match("weight", 1)
+    assert not nx.is_isomorphic(g1, g2, edge_match=em)
+
+    g2 = nx.DiGraph()
+    g2.add_edge("C", "D")
+    assert nx.is_isomorphic(g1, g2, edge_match=em)
+
+
+class TestNodeMatch_Graph:
+    def setup_method(self):
+        self.g1 = nx.Graph()
+        self.g2 = nx.Graph()
+        self.build()
+
+    def build(self):
+        self.nm = iso.categorical_node_match("color", "")
+        self.em = iso.numerical_edge_match("weight", 1)
+
+        self.g1.add_node("A", color="red")
+        self.g2.add_node("C", color="blue")
+
+        self.g1.add_edge("A", "B", weight=1)
+        self.g2.add_edge("C", "D", weight=1)
+
+    def test_noweight_nocolor(self):
+        assert nx.is_isomorphic(self.g1, self.g2)
+
+    def test_color1(self):
+        assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
+
+    def test_color2(self):
+        self.g1.nodes["A"]["color"] = "blue"
+        assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
+
+    def test_weight1(self):
+        assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
+
+    def test_weight2(self):
+        self.g1.add_edge("A", "B", weight=2)
+        assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
+
+    def test_colorsandweights1(self):
+        iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
+        assert not iso
+
+    def test_colorsandweights2(self):
+        self.g1.nodes["A"]["color"] = "blue"
+        iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
+        assert iso
+
+    def test_colorsandweights3(self):
+        # make the weights disagree
+        self.g1.add_edge("A", "B", weight=2)
+        assert not nx.is_isomorphic(
+            self.g1, self.g2, node_match=self.nm, edge_match=self.em
+        )
+
+
+class TestEdgeMatch_MultiGraph:
+    def setup_method(self):
+        self.g1 = nx.MultiGraph()
+        self.g2 = nx.MultiGraph()
+        self.GM = iso.MultiGraphMatcher
+        self.build()
+
+    def build(self):
+        g1 = self.g1
+        g2 = self.g2
+
+        # We will assume integer weights only.
+        g1.add_edge("A", "B", color="green", weight=0, size=0.5)
+        g1.add_edge("A", "B", color="red", weight=1, size=0.35)
+        g1.add_edge("A", "B", color="red", weight=2, size=0.65)
+
+        g2.add_edge("C", "D", color="green", weight=1, size=0.5)
+        g2.add_edge("C", "D", color="red", weight=0, size=0.45)
+        g2.add_edge("C", "D", color="red", weight=2, size=0.65)
+
+        if g1.is_multigraph():
+            self.em = iso.numerical_multiedge_match("weight", 1)
+            self.emc = iso.categorical_multiedge_match("color", "")
+            self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1])
+            self.emg1 = iso.generic_multiedge_match("color", "red", eq)
+            self.emg2 = iso.generic_multiedge_match(
+                ["color", "weight", "size"],
+                ["red", 1, 0.5],
+                [eq, eq, math.isclose],
+            )
+        else:
+            self.em = iso.numerical_edge_match("weight", 1)
+            self.emc = iso.categorical_edge_match("color", "")
+            self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1])
+            self.emg1 = iso.generic_multiedge_match("color", "red", eq)
+            self.emg2 = iso.generic_edge_match(
+                ["color", "weight", "size"],
+                ["red", 1, 0.5],
+                [eq, eq, math.isclose],
+            )
+
+    def test_weights_only(self):
+        assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
+
+    def test_colors_only(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emc)
+        assert gm.is_isomorphic()
+
+    def test_colorsandweights(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emcm)
+        assert not gm.is_isomorphic()
+
+    def test_generic1(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emg1)
+        assert gm.is_isomorphic()
+
+    def test_generic2(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emg2)
+        assert not gm.is_isomorphic()
+
+
+class TestEdgeMatch_DiGraph(TestNodeMatch_Graph):
+    def setup_method(self):
+        TestNodeMatch_Graph.setup_method(self)
+        self.g1 = nx.DiGraph()
+        self.g2 = nx.DiGraph()
+        self.build()
+
+
+class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph):
+    def setup_method(self):
+        TestEdgeMatch_MultiGraph.setup_method(self)
+        self.g1 = nx.MultiDiGraph()
+        self.g2 = nx.MultiDiGraph()
+        self.GM = iso.MultiDiGraphMatcher
+        self.build()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py
new file mode 100644
index 00000000..e409d515
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py
@@ -0,0 +1,284 @@
+"""
+An algorithm for finding if two undirected trees are isomorphic,
+and if so returns an isomorphism between the two sets of nodes.
+
+This algorithm uses a routine to tell if two rooted trees (trees with a
+specified root node) are isomorphic, which may be independently useful.
+
+This implements an algorithm from:
+The Design and Analysis of Computer Algorithms
+by Aho, Hopcroft, and Ullman
+Addison-Wesley Publishing 1974
+Example 3.2 pp. 84-86.
+
+A more understandable version of this algorithm is described in:
+Homework Assignment 5
+McGill University SOCS 308-250B, Winter 2002
+by Matthew Suderman
+http://crypto.cs.mcgill.ca/~crepeau/CS250/2004/HW5+.pdf
+"""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = ["rooted_tree_isomorphism", "tree_isomorphism"]
+
+
+@nx._dispatchable(graphs={"t1": 0, "t2": 2}, returns_graph=True)
+def root_trees(t1, root1, t2, root2):
+    """Create a single digraph dT of free trees t1 and t2
+    #   with roots root1 and root2 respectively
+    # rename the nodes with consecutive integers
+    # so that all nodes get a unique name between both trees
+
+    # our new "fake" root node is 0
+    # t1 is numbers from 1 ... n
+    # t2 is numbered from n+1 to 2n
+    """
+
+    dT = nx.DiGraph()
+
+    newroot1 = 1  # left root will be 1
+    newroot2 = nx.number_of_nodes(t1) + 1  # right will be n+1
+
+    # may be overlap in node names here so need separate maps
+    # given the old name, what is the new
+    namemap1 = {root1: newroot1}
+    namemap2 = {root2: newroot2}
+
+    # add an edge from our new root to root1 and root2
+    dT.add_edge(0, namemap1[root1])
+    dT.add_edge(0, namemap2[root2])
+
+    for i, (v1, v2) in enumerate(nx.bfs_edges(t1, root1)):
+        namemap1[v2] = i + namemap1[root1] + 1
+        dT.add_edge(namemap1[v1], namemap1[v2])
+
+    for i, (v1, v2) in enumerate(nx.bfs_edges(t2, root2)):
+        namemap2[v2] = i + namemap2[root2] + 1
+        dT.add_edge(namemap2[v1], namemap2[v2])
+
+    # now we really want the inverse of namemap1 and namemap2
+    # giving the old name given the new
+    # since the values of namemap1 and namemap2 are unique
+    # there won't be collisions
+    namemap = {}
+    for old, new in namemap1.items():
+        namemap[new] = old
+    for old, new in namemap2.items():
+        namemap[new] = old
+
+    return (dT, namemap, newroot1, newroot2)
+
+
+# figure out the level of each node, with 0 at root
+@nx._dispatchable
+def assign_levels(G, root):
+    level = {}
+    level[root] = 0
+    for v1, v2 in nx.bfs_edges(G, root):
+        level[v2] = level[v1] + 1
+
+    return level
+
+
+# now group the nodes at each level
+def group_by_levels(levels):
+    L = {}
+    for n, lev in levels.items():
+        if lev not in L:
+            L[lev] = []
+        L[lev].append(n)
+
+    return L
+
+
+# now lets get the isomorphism by walking the ordered_children
+def generate_isomorphism(v, w, M, ordered_children):
+    # make sure tree1 comes first
+    assert v < w
+    M.append((v, w))
+    for i, (x, y) in enumerate(zip(ordered_children[v], ordered_children[w])):
+        generate_isomorphism(x, y, M, ordered_children)
+
+
+@nx._dispatchable(graphs={"t1": 0, "t2": 2})
+def rooted_tree_isomorphism(t1, root1, t2, root2):
+    """
+    Given two rooted trees `t1` and `t2`,
+    with roots `root1` and `root2` respectively
+    this routine will determine if they are isomorphic.
+
+    These trees may be either directed or undirected,
+    but if they are directed, all edges should flow from the root.
+
+    It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes
+    of `t2`, such that two trees are then identical.
+
+    Note that two trees may have more than one isomorphism, and this
+    routine just returns one valid mapping.
+
+    Parameters
+    ----------
+    `t1` :  NetworkX graph
+        One of the trees being compared
+
+    `root1` : a node of `t1` which is the root of the tree
+
+    `t2` : undirected NetworkX graph
+        The other tree being compared
+
+    `root2` : a node of `t2` which is the root of the tree
+
+    This is a subroutine used to implement `tree_isomorphism`, but will
+    be somewhat faster if you already have rooted trees.
+
+    Returns
+    -------
+    isomorphism : list
+        A list of pairs in which the left element is a node in `t1`
+        and the right element is a node in `t2`.  The pairs are in
+        arbitrary order.  If the nodes in one tree is mapped to the names in
+        the other, then trees will be identical. Note that an isomorphism
+        will not necessarily be unique.
+
+        If `t1` and `t2` are not isomorphic, then it returns the empty list.
+    """
+
+    assert nx.is_tree(t1)
+    assert nx.is_tree(t2)
+
+    # get the rooted tree formed by combining them
+    # with unique names
+    (dT, namemap, newroot1, newroot2) = root_trees(t1, root1, t2, root2)
+
+    # compute the distance from the root, with 0 for our
+    levels = assign_levels(dT, 0)
+
+    # height
+    h = max(levels.values())
+
+    # collect nodes into a dict by level
+    L = group_by_levels(levels)
+
+    # each node has a label, initially set to 0
+    label = {v: 0 for v in dT}
+    # and also ordered_labels and ordered_children
+    # which will store ordered tuples
+    ordered_labels = {v: () for v in dT}
+    ordered_children = {v: () for v in dT}
+
+    # nothing to do on last level so start on h-1
+    # also nothing to do for our fake level 0, so skip that
+    for i in range(h - 1, 0, -1):
+        # update the ordered_labels and ordered_children
+        # for any children
+        for v in L[i]:
+            # nothing to do if no children
+            if dT.out_degree(v) > 0:
+                # get all the pairs of labels and nodes of children
+                # and sort by labels
+                s = sorted((label[u], u) for u in dT.successors(v))
+
+                # invert to give a list of two tuples
+                # the sorted labels, and the corresponding children
+                ordered_labels[v], ordered_children[v] = list(zip(*s))
+
+        # now collect and sort the sorted ordered_labels
+        # for all nodes in L[i], carrying along the node
+        forlabel = sorted((ordered_labels[v], v) for v in L[i])
+
+        # now assign labels to these nodes, according to the sorted order
+        # starting from 0, where identical ordered_labels get the same label
+        current = 0
+        for i, (ol, v) in enumerate(forlabel):
+            # advance to next label if not 0, and different from previous
+            if (i != 0) and (ol != forlabel[i - 1][0]):
+                current += 1
+            label[v] = current
+
+    # they are isomorphic if the labels of newroot1 and newroot2 are 0
+    isomorphism = []
+    if label[newroot1] == 0 and label[newroot2] == 0:
+        generate_isomorphism(newroot1, newroot2, isomorphism, ordered_children)
+
+        # get the mapping back in terms of the old names
+        # return in sorted order for neatness
+        isomorphism = [(namemap[u], namemap[v]) for (u, v) in isomorphism]
+
+    return isomorphism
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs={"t1": 0, "t2": 1})
+def tree_isomorphism(t1, t2):
+    """
+    Given two undirected (or free) trees `t1` and `t2`,
+    this routine will determine if they are isomorphic.
+    It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes
+    of `t2`, such that two trees are then identical.
+
+    Note that two trees may have more than one isomorphism, and this
+    routine just returns one valid mapping.
+
+    Parameters
+    ----------
+    t1 : undirected NetworkX graph
+        One of the trees being compared
+
+    t2 : undirected NetworkX graph
+        The other tree being compared
+
+    Returns
+    -------
+    isomorphism : list
+        A list of pairs in which the left element is a node in `t1`
+        and the right element is a node in `t2`.  The pairs are in
+        arbitrary order.  If the nodes in one tree is mapped to the names in
+        the other, then trees will be identical. Note that an isomorphism
+        will not necessarily be unique.
+
+        If `t1` and `t2` are not isomorphic, then it returns the empty list.
+
+    Notes
+    -----
+    This runs in O(n*log(n)) time for trees with n nodes.
+    """
+
+    assert nx.is_tree(t1)
+    assert nx.is_tree(t2)
+
+    # To be isomorphic, t1 and t2 must have the same number of nodes.
+    if nx.number_of_nodes(t1) != nx.number_of_nodes(t2):
+        return []
+
+    # Another shortcut is that the sorted degree sequences need to be the same.
+    degree_sequence1 = sorted(d for (n, d) in t1.degree())
+    degree_sequence2 = sorted(d for (n, d) in t2.degree())
+
+    if degree_sequence1 != degree_sequence2:
+        return []
+
+    # A tree can have either 1 or 2 centers.
+    # If the number doesn't match then t1 and t2 are not isomorphic.
+    center1 = nx.center(t1)
+    center2 = nx.center(t2)
+
+    if len(center1) != len(center2):
+        return []
+
+    # If there is only 1 center in each, then use it.
+    if len(center1) == 1:
+        return rooted_tree_isomorphism(t1, center1[0], t2, center2[0])
+
+    # If there both have 2 centers,  then try the first for t1
+    # with the first for t2.
+    attempts = rooted_tree_isomorphism(t1, center1[0], t2, center2[0])
+
+    # If that worked we're done.
+    if len(attempts) > 0:
+        return attempts
+
+    # Otherwise, try center1[0] with the center2[1], and see if that works
+    return rooted_tree_isomorphism(t1, center1[0], t2, center2[1])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py
new file mode 100644
index 00000000..3093d9c9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py
@@ -0,0 +1,1075 @@
+"""
+***************
+VF2++ Algorithm
+***************
+
+An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing.
+
+The simplest interface to use this module is to call:
+
+`vf2pp_is_isomorphic`: to check whether two graphs are isomorphic.
+`vf2pp_isomorphism`: to obtain the node mapping between two graphs,
+in case they are isomorphic.
+`vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs,
+if isomorphic.
+
+Introduction
+------------
+The VF2++ algorithm, follows a similar logic to that of VF2, while also
+introducing new easy-to-check cutting rules and determining the optimal access
+order of nodes. It is also implemented in a non-recursive manner, which saves
+both time and space, when compared to its previous counterpart.
+
+The optimal node ordering is obtained after taking into consideration both the
+degree but also the label rarity of each node.
+This way we place the nodes that are more likely to match, first in the order,
+thus examining the most promising branches in the beginning.
+The rules also consider node labels, making it easier to prune unfruitful
+branches early in the process.
+
+Examples
+--------
+
+Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows:
+
+Without node labels:
+
+>>> import networkx as nx
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None)
+True
+>>> nx.vf2pp_isomorphism(G1, G2, node_label=None)
+{1: 1, 2: 2, 0: 0, 3: 3}
+
+With node labels:
+
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> mapped = {1: 1, 2: 2, 3: 3, 0: 0}
+>>> nx.set_node_attributes(
+...     G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label"
+... )
+>>> nx.set_node_attributes(
+...     G2,
+...     dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])),
+...     "label",
+... )
+>>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label")
+True
+>>> nx.vf2pp_isomorphism(G1, G2, node_label="label")
+{1: 1, 2: 2, 0: 0, 3: 3}
+
+References
+----------
+.. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph
+   isomorphism algorithm". Discrete Applied Mathematics. 242.
+   https://doi.org/10.1016/j.dam.2018.02.018
+
+"""
+
+import collections
+
+import networkx as nx
+
+__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"]
+
+_GraphParameters = collections.namedtuple(
+    "_GraphParameters",
+    [
+        "G1",
+        "G2",
+        "G1_labels",
+        "G2_labels",
+        "nodes_of_G1Labels",
+        "nodes_of_G2Labels",
+        "G2_nodes_of_degree",
+    ],
+)
+
+_StateParameters = collections.namedtuple(
+    "_StateParameters",
+    [
+        "mapping",
+        "reverse_mapping",
+        "T1",
+        "T1_in",
+        "T1_tilde",
+        "T1_tilde_in",
+        "T2",
+        "T2_in",
+        "T2_tilde",
+        "T2_tilde_in",
+    ],
+)
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
+def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None):
+    """Return an isomorphic mapping between `G1` and `G2` if it exists.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Returns
+    -------
+    dict or None
+        Node mapping if the two graphs are isomorphic. None otherwise.
+    """
+    try:
+        mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label))
+        return mapping
+    except StopIteration:
+        return None
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
+def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None):
+    """Examines whether G1 and G2 are isomorphic.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Returns
+    -------
+    bool
+        True if the two graphs are isomorphic, False otherwise.
+    """
+    if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None:
+        return True
+    return False
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
+def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None):
+    """Yields all the possible mappings between G1 and G2.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Yields
+    ------
+    dict
+        Isomorphic mapping between the nodes in `G1` and `G2`.
+    """
+    if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0:
+        return False
+
+    # Create the degree dicts based on graph type
+    if G1.is_directed():
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+        G2_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree)
+        }
+    else:
+        G1_degree = dict(G1.degree)
+        G2_degree = dict(G2.degree)
+
+    if not G1.is_directed():
+        find_candidates = _find_candidates
+        restore_Tinout = _restore_Tinout
+    else:
+        find_candidates = _find_candidates_Di
+        restore_Tinout = _restore_Tinout_Di
+
+    # Check that both graphs have the same number of nodes and degree sequence
+    if G1.order() != G2.order():
+        return False
+    if sorted(G1_degree.values()) != sorted(G2_degree.values()):
+        return False
+
+    # Initialize parameters and cache necessary information about degree and labels
+    graph_params, state_params = _initialize_parameters(
+        G1, G2, G2_degree, node_label, default_label
+    )
+
+    # Check if G1 and G2 have the same labels, and that number of nodes per label is equal between the two graphs
+    if not _precheck_label_properties(graph_params):
+        return False
+
+    # Calculate the optimal node ordering
+    node_order = _matching_order(graph_params)
+
+    # Initialize the stack
+    stack = []
+    candidates = iter(
+        find_candidates(node_order[0], graph_params, state_params, G1_degree)
+    )
+    stack.append((node_order[0], candidates))
+
+    mapping = state_params.mapping
+    reverse_mapping = state_params.reverse_mapping
+
+    # Index of the node from the order, currently being examined
+    matching_node = 1
+
+    while stack:
+        current_node, candidate_nodes = stack[-1]
+
+        try:
+            candidate = next(candidate_nodes)
+        except StopIteration:
+            # If no remaining candidates, return to a previous state, and follow another branch
+            stack.pop()
+            matching_node -= 1
+            if stack:
+                # Pop the previously added u-v pair, and look for a different candidate _v for u
+                popped_node1, _ = stack[-1]
+                popped_node2 = mapping[popped_node1]
+                mapping.pop(popped_node1)
+                reverse_mapping.pop(popped_node2)
+                restore_Tinout(popped_node1, popped_node2, graph_params, state_params)
+            continue
+
+        if _feasibility(current_node, candidate, graph_params, state_params):
+            # Terminate if mapping is extended to its full
+            if len(mapping) == G2.number_of_nodes() - 1:
+                cp_mapping = mapping.copy()
+                cp_mapping[current_node] = candidate
+                yield cp_mapping
+                continue
+
+            # Feasibility rules pass, so extend the mapping and update the parameters
+            mapping[current_node] = candidate
+            reverse_mapping[candidate] = current_node
+            _update_Tinout(current_node, candidate, graph_params, state_params)
+            # Append the next node and its candidates to the stack
+            candidates = iter(
+                find_candidates(
+                    node_order[matching_node], graph_params, state_params, G1_degree
+                )
+            )
+            stack.append((node_order[matching_node], candidates))
+            matching_node += 1
+
+
+def _precheck_label_properties(graph_params):
+    G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params
+    if any(
+        label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes)
+        for label, nodes in nodes_of_G2Labels.items()
+    ):
+        return False
+    return True
+
+
+def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1):
+    """Initializes all the necessary parameters for VF2++
+
+    Parameters
+    ----------
+    G1,G2: NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism or monomorphism
+
+    G1_labels,G2_labels: dict
+        The label of every node in G1 and G2 respectively
+
+    Returns
+    -------
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2
+        G1_labels,G2_labels: dict
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    G1_labels = dict(G1.nodes(data=node_label, default=default_label))
+    G2_labels = dict(G2.nodes(data=node_label, default=default_label))
+
+    graph_params = _GraphParameters(
+        G1,
+        G2,
+        G1_labels,
+        G2_labels,
+        nx.utils.groups(G1_labels),
+        nx.utils.groups(G2_labels),
+        nx.utils.groups(G2_degree),
+    )
+
+    T1, T1_in = set(), set()
+    T2, T2_in = set(), set()
+    if G1.is_directed():
+        T1_tilde, T1_tilde_in = (
+            set(G1.nodes()),
+            set(),
+        )  # todo: do we need Ti_tilde_in? What nodes does it have?
+        T2_tilde, T2_tilde_in = set(G2.nodes()), set()
+    else:
+        T1_tilde, T1_tilde_in = set(G1.nodes()), set()
+        T2_tilde, T2_tilde_in = set(G2.nodes()), set()
+
+    state_params = _StateParameters(
+        {},
+        {},
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    )
+
+    return graph_params, state_params
+
+
+def _matching_order(graph_params):
+    """The node ordering as introduced in VF2++.
+
+    Notes
+    -----
+    Taking into account the structure of the Graph and the node labeling, the nodes are placed in an order such that,
+    most of the unfruitful/infeasible branches of the search space can be pruned on high levels, significantly
+    decreasing the number of visited states. The premise is that, the algorithm will be able to recognize
+    inconsistencies early, proceeding to go deep into the search tree only if it's needed.
+
+    Parameters
+    ----------
+    graph_params: namedtuple
+        Contains:
+
+            G1,G2: NetworkX Graph or MultiGraph instances.
+                The two graphs to check for isomorphism or monomorphism.
+
+            G1_labels,G2_labels: dict
+                The label of every node in G1 and G2 respectively.
+
+    Returns
+    -------
+    node_order: list
+        The ordering of the nodes.
+    """
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params
+    if not G1 and not G2:
+        return {}
+
+    if G1.is_directed():
+        G1 = G1.to_undirected(as_view=True)
+
+    V1_unordered = set(G1.nodes())
+    label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()}
+    used_degrees = {node: 0 for node in G1}
+    node_order = []
+
+    while V1_unordered:
+        max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered)
+        rarest_nodes = [
+            n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity
+        ]
+        max_node = max(rarest_nodes, key=G1.degree)
+
+        for dlevel_nodes in nx.bfs_layers(G1, max_node):
+            nodes_to_add = dlevel_nodes.copy()
+            while nodes_to_add:
+                max_used_degree = max(used_degrees[n] for n in nodes_to_add)
+                max_used_degree_nodes = [
+                    n for n in nodes_to_add if used_degrees[n] == max_used_degree
+                ]
+                max_degree = max(G1.degree[n] for n in max_used_degree_nodes)
+                max_degree_nodes = [
+                    n for n in max_used_degree_nodes if G1.degree[n] == max_degree
+                ]
+                next_node = min(
+                    max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]]
+                )
+
+                node_order.append(next_node)
+                for node in G1.neighbors(next_node):
+                    used_degrees[node] += 1
+
+                nodes_to_add.remove(next_node)
+                label_rarity[G1_labels[next_node]] -= 1
+                V1_unordered.discard(next_node)
+
+    return node_order
+
+
+def _find_candidates(
+    u, graph_params, state_params, G1_degree
+):  # todo: make the 4th argument the degree of u
+    """Given node u of G1, finds the candidates of u from G2.
+
+    Parameters
+    ----------
+    u: Graph node
+        The node from G1 for which to find the candidates from G2.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_tilde contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    candidates: set
+        The nodes from G2 which are candidates for u.
+    """
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
+    mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
+
+    covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping]
+    if not covered_nbrs:
+        candidates = set(nodes_of_G2Labels[G1_labels[u]])
+        candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+        candidates.intersection_update(T2_tilde)
+        candidates.difference_update(reverse_mapping)
+        if G1.is_multigraph():
+            candidates.difference_update(
+                {
+                    node
+                    for node in candidates
+                    if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+                }
+            )
+        return candidates
+
+    nbr1 = covered_nbrs[0]
+    common_nodes = set(G2[mapping[nbr1]])
+
+    for nbr1 in covered_nbrs[1:]:
+        common_nodes.intersection_update(G2[mapping[nbr1]])
+
+    common_nodes.difference_update(reverse_mapping)
+    common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+    common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
+    if G1.is_multigraph():
+        common_nodes.difference_update(
+            {
+                node
+                for node in common_nodes
+                if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+            }
+        )
+    return common_nodes
+
+
+def _find_candidates_Di(u, graph_params, state_params, G1_degree):
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
+    mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
+
+    covered_successors = [succ for succ in G1[u] if succ in mapping]
+    covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping]
+
+    if not (covered_successors or covered_predecessors):
+        candidates = set(nodes_of_G2Labels[G1_labels[u]])
+        candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+        candidates.intersection_update(T2_tilde)
+        candidates.difference_update(reverse_mapping)
+        if G1.is_multigraph():
+            candidates.difference_update(
+                {
+                    node
+                    for node in candidates
+                    if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+                }
+            )
+        return candidates
+
+    if covered_successors:
+        succ1 = covered_successors[0]
+        common_nodes = set(G2.pred[mapping[succ1]])
+
+        for succ1 in covered_successors[1:]:
+            common_nodes.intersection_update(G2.pred[mapping[succ1]])
+    else:
+        pred1 = covered_predecessors.pop()
+        common_nodes = set(G2[mapping[pred1]])
+
+    for pred1 in covered_predecessors:
+        common_nodes.intersection_update(G2[mapping[pred1]])
+
+    common_nodes.difference_update(reverse_mapping)
+    common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+    common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
+    if G1.is_multigraph():
+        common_nodes.difference_update(
+            {
+                node
+                for node in common_nodes
+                if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+            }
+        )
+    return common_nodes
+
+
+def _feasibility(node1, node2, graph_params, state_params):
+    """Given a candidate pair of nodes u and v from G1 and G2 respectively, checks if it's feasible to extend the
+    mapping, i.e. if u and v can be matched.
+
+    Notes
+    -----
+    This function performs all the necessary checking by applying both consistency and cutting rules.
+
+    Parameters
+    ----------
+    node1, node2: Graph node
+        The candidate pair of nodes being checked for matching
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if all checks are successful, False otherwise.
+    """
+    G1 = graph_params.G1
+
+    if _cut_PT(node1, node2, graph_params, state_params):
+        return False
+
+    if G1.is_multigraph():
+        if not _consistent_PT(node1, node2, graph_params, state_params):
+            return False
+
+    return True
+
+
+def _cut_PT(u, v, graph_params, state_params):
+    """Implements the cutting rules for the ISO problem.
+
+    Parameters
+    ----------
+    u, v: Graph node
+        The two candidate nodes being examined.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise.
+    """
+    G1, G2, G1_labels, G2_labels, _, _, _ = graph_params
+    (
+        _,
+        _,
+        T1,
+        T1_in,
+        T1_tilde,
+        _,
+        T2,
+        T2_in,
+        T2_tilde,
+        _,
+    ) = state_params
+
+    u_labels_predecessors, v_labels_predecessors = {}, {}
+    if G1.is_directed():
+        u_labels_predecessors = nx.utils.groups(
+            {n1: G1_labels[n1] for n1 in G1.pred[u]}
+        )
+        v_labels_predecessors = nx.utils.groups(
+            {n2: G2_labels[n2] for n2 in G2.pred[v]}
+        )
+
+        if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()):
+            return True
+
+    u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]})
+    v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]})
+
+    # if the neighbors of u, do not have the same labels as those of v, NOT feasible.
+    if set(u_labels_successors.keys()) != set(v_labels_successors.keys()):
+        return True
+
+    for label, G1_nbh in u_labels_successors.items():
+        G2_nbh = v_labels_successors[label]
+
+        if G1.is_multigraph():
+            # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
+            u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh)
+            v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh)
+            if any(
+                u_nbr_edges != v_nbr_edges
+                for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges)
+            ):
+                return True
+
+        if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)):
+            return True
+        if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)):
+            return True
+        if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len(
+            T2_in.intersection(G2_nbh)
+        ):
+            return True
+
+    if not G1.is_directed():
+        return False
+
+    for label, G1_pred in u_labels_predecessors.items():
+        G2_pred = v_labels_predecessors[label]
+
+        if G1.is_multigraph():
+            # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
+            u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred)
+            v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred)
+            if any(
+                u_nbr_edges != v_nbr_edges
+                for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges)
+            ):
+                return True
+
+        if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)):
+            return True
+        if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)):
+            return True
+        if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)):
+            return True
+
+    return False
+
+
+def _consistent_PT(u, v, graph_params, state_params):
+    """Checks the consistency of extending the mapping using the current node pair.
+
+    Parameters
+    ----------
+    u, v: Graph node
+        The two candidate nodes being examined.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if the pair passes all the consistency checks successfully. False otherwise.
+    """
+    G1, G2 = graph_params.G1, graph_params.G2
+    mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping
+
+    for neighbor in G1[u]:
+        if neighbor in mapping:
+            if G1.number_of_edges(u, neighbor) != G2.number_of_edges(
+                v, mapping[neighbor]
+            ):
+                return False
+
+    for neighbor in G2[v]:
+        if neighbor in reverse_mapping:
+            if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges(
+                v, neighbor
+            ):
+                return False
+
+    if not G1.is_directed():
+        return True
+
+    for predecessor in G1.pred[u]:
+        if predecessor in mapping:
+            if G1.number_of_edges(predecessor, u) != G2.number_of_edges(
+                mapping[predecessor], v
+            ):
+                return False
+
+    for predecessor in G2.pred[v]:
+        if predecessor in reverse_mapping:
+            if G1.number_of_edges(
+                reverse_mapping[predecessor], u
+            ) != G2.number_of_edges(predecessor, v):
+                return False
+
+    return True
+
+
+def _update_Tinout(new_node1, new_node2, graph_params, state_params):
+    """Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping.
+
+    Notes
+    -----
+    This function should be called right after the feasibility checks are passed, and node1 is mapped to node2. The
+    purpose of this function is to avoid brute force computing of Ti/Ti_out by iterating over all nodes of the graph
+    and checking which nodes satisfy the necessary conditions. Instead, in every step of the algorithm we focus
+    exclusively on the two nodes that are being added to the mapping, incrementally updating Ti/Ti_out.
+
+    Parameters
+    ----------
+    new_node1, new_node2: Graph node
+        The two new nodes, added to the mapping.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping}
+    uncovered_successors_G2 = {
+        succ for succ in G2[new_node2] if succ not in reverse_mapping
+    }
+
+    # Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively
+    T1.update(uncovered_successors_G1)
+    T2.update(uncovered_successors_G2)
+    T1.discard(new_node1)
+    T2.discard(new_node2)
+
+    T1_tilde.difference_update(uncovered_successors_G1)
+    T2_tilde.difference_update(uncovered_successors_G2)
+    T1_tilde.discard(new_node1)
+    T2_tilde.discard(new_node2)
+
+    if not G1.is_directed():
+        return
+
+    uncovered_predecessors_G1 = {
+        pred for pred in G1.pred[new_node1] if pred not in mapping
+    }
+    uncovered_predecessors_G2 = {
+        pred for pred in G2.pred[new_node2] if pred not in reverse_mapping
+    }
+
+    T1_in.update(uncovered_predecessors_G1)
+    T2_in.update(uncovered_predecessors_G2)
+    T1_in.discard(new_node1)
+    T2_in.discard(new_node2)
+
+    T1_tilde.difference_update(uncovered_predecessors_G1)
+    T2_tilde.difference_update(uncovered_predecessors_G2)
+    T1_tilde.discard(new_node1)
+    T2_tilde.discard(new_node2)
+
+
+def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params):
+    """Restores the previous version of Ti/Ti_out when a node pair is deleted from the mapping.
+
+    Parameters
+    ----------
+    popped_node1, popped_node2: Graph node
+        The two nodes deleted from the mapping.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    is_added = False
+    for neighbor in G1[popped_node1]:
+        if neighbor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if any(nbr in mapping for nbr in G1[neighbor]):
+                continue
+            T1.discard(neighbor)
+            T1_tilde.add(neighbor)
+
+    # Case where the node is not present in neither the mapping nor T1. By definition, it should belong to T1_tilde
+    if not is_added:
+        T1_tilde.add(popped_node1)
+
+    is_added = False
+    for neighbor in G2[popped_node2]:
+        if neighbor in reverse_mapping:
+            is_added = True
+            T2.add(popped_node2)
+        else:
+            if any(nbr in reverse_mapping for nbr in G2[neighbor]):
+                continue
+            T2.discard(neighbor)
+            T2_tilde.add(neighbor)
+
+    if not is_added:
+        T2_tilde.add(popped_node2)
+
+
+def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params):
+    # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    is_added = False
+    for successor in G1[popped_node1]:
+        if successor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1_in.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in mapping for pred in G1.pred[successor]):
+                T1.discard(successor)
+
+            if not any(succ in mapping for succ in G1[successor]):
+                T1_in.discard(successor)
+
+            if successor not in T1:
+                if successor not in T1_in:
+                    T1_tilde.add(successor)
+
+    for predecessor in G1.pred[popped_node1]:
+        if predecessor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in mapping for pred in G1.pred[predecessor]):
+                T1.discard(predecessor)
+
+            if not any(succ in mapping for succ in G1[predecessor]):
+                T1_in.discard(predecessor)
+
+            if not (predecessor in T1 or predecessor in T1_in):
+                T1_tilde.add(predecessor)
+
+    # Case where the node is not present in neither the mapping nor T1. By definition it should belong to T1_tilde
+    if not is_added:
+        T1_tilde.add(popped_node1)
+
+    is_added = False
+    for successor in G2[popped_node2]:
+        if successor in reverse_mapping:
+            is_added = True
+            T2_in.add(popped_node2)
+        else:
+            if not any(pred in reverse_mapping for pred in G2.pred[successor]):
+                T2.discard(successor)
+
+            if not any(succ in reverse_mapping for succ in G2[successor]):
+                T2_in.discard(successor)
+
+            if successor not in T2:
+                if successor not in T2_in:
+                    T2_tilde.add(successor)
+
+    for predecessor in G2.pred[popped_node2]:
+        if predecessor in reverse_mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T2.add(popped_node2)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in reverse_mapping for pred in G2.pred[predecessor]):
+                T2.discard(predecessor)
+
+            if not any(succ in reverse_mapping for succ in G2[predecessor]):
+                T2_in.discard(predecessor)
+
+            if not (predecessor in T2 or predecessor in T2_in):
+                T2_tilde.add(predecessor)
+
+    if not is_added:
+        T2_tilde.add(popped_node2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
new file mode 100644
index 00000000..6fcf8a15
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
@@ -0,0 +1,192 @@
+"""
+Module to simplify the specification of user-defined equality functions for
+node and edge attributes during isomorphism checks.
+
+During the construction of an isomorphism, the algorithm considers two
+candidate nodes n1 in G1 and n2 in G2.  The graphs G1 and G2 are then
+compared with respect to properties involving n1 and n2, and if the outcome
+is good, then the candidate nodes are considered isomorphic. NetworkX
+provides a simple mechanism for users to extend the comparisons to include
+node and edge attributes.
+
+Node attributes are handled by the node_match keyword. When considering
+n1 and n2, the algorithm passes their node attribute dictionaries to
+node_match, and if it returns False, then n1 and n2 cannot be
+considered to be isomorphic.
+
+Edge attributes are handled by the edge_match keyword. When considering
+n1 and n2, the algorithm must verify that outgoing edges from n1 are
+commensurate with the outgoing edges for n2. If the graph is directed,
+then a similar check is also performed for incoming edges.
+
+Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from
+G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge
+between (n1, v1) and only one edge between (n2, v2). Those edge attribute
+dictionaries are passed to edge_match, and if it returns False, then
+n1 and n2 cannot be considered isomorphic. For multigraphs and
+multidigraphs, there can be multiple edges between (n1, v1) and also
+multiple edges between (n2, v2).  Now, there must exist an isomorphism
+from "all the edges between (n1, v1)" to "all the edges between (n2, v2)".
+So, all of the edge attribute dictionaries are passed to edge_match, and
+it must determine if there is an isomorphism between the two sets of edges.
+"""
+
+from . import isomorphvf2 as vf2
+
+__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"]
+
+
+def _semantic_feasibility(self, G1_node, G2_node):
+    """Returns True if mapping G1_node to G2_node is semantically feasible."""
+    # Make sure the nodes match
+    if self.node_match is not None:
+        nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node])
+        if not nm:
+            return False
+
+    # Make sure the edges match
+    if self.edge_match is not None:
+        # Cached lookups
+        G1nbrs = self.G1_adj[G1_node]
+        G2nbrs = self.G2_adj[G2_node]
+        core_1 = self.core_1
+        edge_match = self.edge_match
+
+        for neighbor in G1nbrs:
+            # G1_node is not in core_1, so we must handle R_self separately
+            if neighbor == G1_node:
+                if G2_node in G2nbrs and not edge_match(
+                    G1nbrs[G1_node], G2nbrs[G2_node]
+                ):
+                    return False
+            elif neighbor in core_1:
+                G2_nbr = core_1[neighbor]
+                if G2_nbr in G2nbrs and not edge_match(
+                    G1nbrs[neighbor], G2nbrs[G2_nbr]
+                ):
+                    return False
+        # syntactic check has already verified that neighbors are symmetric
+
+    return True
+
+
+class GraphMatcher(vf2.GraphMatcher):
+    """VF2 isomorphism checker for undirected graphs."""
+
+    def __init__(self, G1, G2, node_match=None, edge_match=None):
+        """Initialize graph matcher.
+
+        Parameters
+        ----------
+        G1, G2: graph
+            The graphs to be tested.
+
+        node_match: callable
+            A function that returns True iff node n1 in G1 and n2 in G2
+            should be considered equal during the isomorphism test. The
+            function will be called like::
+
+               node_match(G1.nodes[n1], G2.nodes[n2])
+
+            That is, the function will receive the node attribute dictionaries
+            of the nodes under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        edge_match: callable
+            A function that returns True iff the edge attribute dictionary for
+            the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
+            considered equal during the isomorphism test. The function will be
+            called like::
+
+               edge_match(G1[u1][v1], G2[u2][v2])
+
+            That is, the function will receive the edge attribute dictionaries
+            of the edges under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        """
+        vf2.GraphMatcher.__init__(self, G1, G2)
+
+        self.node_match = node_match
+        self.edge_match = edge_match
+
+        # These will be modified during checks to minimize code repeat.
+        self.G1_adj = self.G1.adj
+        self.G2_adj = self.G2.adj
+
+    semantic_feasibility = _semantic_feasibility
+
+
+class DiGraphMatcher(vf2.DiGraphMatcher):
+    """VF2 isomorphism checker for directed graphs."""
+
+    def __init__(self, G1, G2, node_match=None, edge_match=None):
+        """Initialize graph matcher.
+
+        Parameters
+        ----------
+        G1, G2 : graph
+            The graphs to be tested.
+
+        node_match : callable
+            A function that returns True iff node n1 in G1 and n2 in G2
+            should be considered equal during the isomorphism test. The
+            function will be called like::
+
+               node_match(G1.nodes[n1], G2.nodes[n2])
+
+            That is, the function will receive the node attribute dictionaries
+            of the nodes under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        edge_match : callable
+            A function that returns True iff the edge attribute dictionary for
+            the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
+            considered equal during the isomorphism test. The function will be
+            called like::
+
+               edge_match(G1[u1][v1], G2[u2][v2])
+
+            That is, the function will receive the edge attribute dictionaries
+            of the edges under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        """
+        vf2.DiGraphMatcher.__init__(self, G1, G2)
+
+        self.node_match = node_match
+        self.edge_match = edge_match
+
+        # These will be modified during checks to minimize code repeat.
+        self.G1_adj = self.G1.adj
+        self.G2_adj = self.G2.adj
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if mapping G1_node to G2_node is semantically feasible."""
+
+        # Test node_match and also test edge_match on successors
+        feasible = _semantic_feasibility(self, G1_node, G2_node)
+        if not feasible:
+            return False
+
+        # Test edge_match on predecessors
+        self.G1_adj = self.G1.pred
+        self.G2_adj = self.G2.pred
+        feasible = _semantic_feasibility(self, G1_node, G2_node)
+        self.G1_adj = self.G1.adj
+        self.G2_adj = self.G2.adj
+
+        return feasible
+
+
+# The "semantics" of edge_match are different for multi(di)graphs, but
+# the implementation is the same.  So, technically we do not need to
+# provide "multi" versions, but we do so to match NetworkX's base classes.
+
+
+class MultiGraphMatcher(GraphMatcher):
+    """VF2 isomorphism checker for undirected multigraphs."""
+
+
+class MultiDiGraphMatcher(DiGraphMatcher):
+    """VF2 isomorphism checker for directed multigraphs."""