aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/networkx/tests
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/networkx/tests
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/networkx/tests')
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py250
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py321
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py532
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py349
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py282
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py40
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_import.py11
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py97
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py347
10 files changed, 2229 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py
new file mode 100644
index 00000000..5e458150
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py
@@ -0,0 +1,250 @@
+import pytest
+
+np = pytest.importorskip("numpy")
+import random
+
+import networkx as nx
+from networkx.algorithms import approximation as approx
+from networkx.algorithms import threshold
+
+progress = 0
+
+# store the random numbers after setting a global seed
+np.random.seed(42)
+np_rv = np.random.rand()
+random.seed(42)
+py_rv = random.random()
+
+
+def t(f, *args, **kwds):
+ """call one function and check if global RNG changed"""
+ global progress
+ progress += 1
+ print(progress, ",", end="")
+
+ f(*args, **kwds)
+
+ after_np_rv = np.random.rand()
+ # if np_rv != after_np_rv:
+ # print(np_rv, after_np_rv, "don't match np!")
+ assert np_rv == after_np_rv
+ np.random.seed(42)
+
+ after_py_rv = random.random()
+ # if py_rv != after_py_rv:
+ # print(py_rv, after_py_rv, "don't match py!")
+ assert py_rv == after_py_rv
+ random.seed(42)
+
+
+def run_all_random_functions(seed):
+ n = 20
+ m = 10
+ k = l = 2
+ s = v = 10
+ p = q = p1 = p2 = p_in = p_out = 0.4
+ alpha = radius = theta = 0.75
+ sizes = (20, 20, 10)
+ colors = [1, 2, 3]
+ G = nx.barbell_graph(12, 20)
+ H = nx.cycle_graph(3)
+ H.add_weighted_edges_from((u, v, 0.2) for u, v in H.edges)
+ deg_sequence = [3, 2, 1, 3, 2, 1, 3, 2, 1, 2, 1, 2, 1]
+ in_degree_sequence = w = sequence = aseq = bseq = deg_sequence
+
+ # print("starting...")
+ t(nx.maximal_independent_set, G, seed=seed)
+ t(nx.rich_club_coefficient, G, seed=seed, normalized=False)
+ t(nx.random_reference, G, seed=seed)
+ t(nx.lattice_reference, G, seed=seed)
+ t(nx.sigma, G, 1, 2, seed=seed)
+ t(nx.omega, G, 1, 2, seed=seed)
+ # print("out of smallworld.py")
+ t(nx.double_edge_swap, G, seed=seed)
+ # print("starting connected_double_edge_swap")
+ t(nx.connected_double_edge_swap, nx.complete_graph(9), seed=seed)
+ # print("ending connected_double_edge_swap")
+ t(nx.random_layout, G, seed=seed)
+ t(nx.fruchterman_reingold_layout, G, seed=seed)
+ t(nx.algebraic_connectivity, G, seed=seed)
+ t(nx.fiedler_vector, G, seed=seed)
+ t(nx.spectral_ordering, G, seed=seed)
+ # print('starting average_clustering')
+ t(approx.average_clustering, G, seed=seed)
+ t(approx.simulated_annealing_tsp, H, "greedy", source=1, seed=seed)
+ t(approx.threshold_accepting_tsp, H, "greedy", source=1, seed=seed)
+ t(
+ approx.traveling_salesman_problem,
+ H,
+ method=lambda G, weight: approx.simulated_annealing_tsp(
+ G, "greedy", weight, seed=seed
+ ),
+ )
+ t(
+ approx.traveling_salesman_problem,
+ H,
+ method=lambda G, weight: approx.threshold_accepting_tsp(
+ G, "greedy", weight, seed=seed
+ ),
+ )
+ t(nx.betweenness_centrality, G, seed=seed)
+ t(nx.edge_betweenness_centrality, G, seed=seed)
+ t(nx.approximate_current_flow_betweenness_centrality, G, seed=seed)
+ # print("kernighan")
+ t(nx.algorithms.community.kernighan_lin_bisection, G, seed=seed)
+ # nx.algorithms.community.asyn_lpa_communities(G, seed=seed)
+ t(nx.algorithms.tree.greedy_branching, G, seed=seed)
+ # print('done with graph argument functions')
+
+ t(nx.spectral_graph_forge, G, alpha, seed=seed)
+ t(nx.algorithms.community.asyn_fluidc, G, k, max_iter=1, seed=seed)
+ t(
+ nx.algorithms.connectivity.edge_augmentation.greedy_k_edge_augmentation,
+ G,
+ k,
+ seed=seed,
+ )
+ t(nx.algorithms.coloring.strategy_random_sequential, G, colors, seed=seed)
+
+ cs = ["d", "i", "i", "d", "d", "i"]
+ t(threshold.swap_d, cs, seed=seed)
+ t(nx.configuration_model, deg_sequence, seed=seed)
+ t(
+ nx.directed_configuration_model,
+ in_degree_sequence,
+ in_degree_sequence,
+ seed=seed,
+ )
+ t(nx.expected_degree_graph, w, seed=seed)
+ t(nx.random_degree_sequence_graph, sequence, seed=seed)
+ joint_degrees = {
+ 1: {4: 1},
+ 2: {2: 2, 3: 2, 4: 2},
+ 3: {2: 2, 4: 1},
+ 4: {1: 1, 2: 2, 3: 1},
+ }
+ t(nx.joint_degree_graph, joint_degrees, seed=seed)
+ joint_degree_sequence = [
+ (1, 0),
+ (1, 0),
+ (1, 0),
+ (2, 0),
+ (1, 0),
+ (2, 1),
+ (0, 1),
+ (0, 1),
+ ]
+ t(nx.random_clustered_graph, joint_degree_sequence, seed=seed)
+ constructor = [(3, 3, 0.5), (10, 10, 0.7)]
+ t(nx.random_shell_graph, constructor, seed=seed)
+ t(nx.random_triad, G.to_directed(), seed=seed)
+ mapping = {1: 0.4, 2: 0.3, 3: 0.3}
+ t(nx.utils.random_weighted_sample, mapping, k, seed=seed)
+ t(nx.utils.weighted_choice, mapping, seed=seed)
+ t(nx.algorithms.bipartite.configuration_model, aseq, bseq, seed=seed)
+ t(nx.algorithms.bipartite.preferential_attachment_graph, aseq, p, seed=seed)
+
+ def kernel_integral(u, w, z):
+ return z - w
+
+ t(nx.random_kernel_graph, n, kernel_integral, seed=seed)
+
+ sizes = [75, 75, 300]
+ probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]]
+ t(nx.stochastic_block_model, sizes, probs, seed=seed)
+ t(nx.random_partition_graph, sizes, p_in, p_out, seed=seed)
+
+ # print("starting generator functions")
+ t(threshold.random_threshold_sequence, n, p, seed=seed)
+ t(nx.tournament.random_tournament, n, seed=seed)
+ t(nx.relaxed_caveman_graph, l, k, p, seed=seed)
+ t(nx.planted_partition_graph, l, k, p_in, p_out, seed=seed)
+ t(nx.gaussian_random_partition_graph, n, s, v, p_in, p_out, seed=seed)
+ t(nx.gn_graph, n, seed=seed)
+ t(nx.gnr_graph, n, p, seed=seed)
+ t(nx.gnc_graph, n, seed=seed)
+ t(nx.scale_free_graph, n, seed=seed)
+ t(nx.directed.random_uniform_k_out_graph, n, k, seed=seed)
+ t(nx.random_k_out_graph, n, k, alpha, seed=seed)
+ N = 1000
+ t(nx.partial_duplication_graph, N, n, p, q, seed=seed)
+ t(nx.duplication_divergence_graph, n, p, seed=seed)
+ t(nx.random_geometric_graph, n, radius, seed=seed)
+ t(nx.soft_random_geometric_graph, n, radius, seed=seed)
+ t(nx.geographical_threshold_graph, n, theta, seed=seed)
+ t(nx.waxman_graph, n, seed=seed)
+ t(nx.navigable_small_world_graph, n, seed=seed)
+ t(nx.thresholded_random_geometric_graph, n, radius, theta, seed=seed)
+ t(nx.uniform_random_intersection_graph, n, m, p, seed=seed)
+ t(nx.k_random_intersection_graph, n, m, k, seed=seed)
+
+ t(nx.general_random_intersection_graph, n, 2, [0.1, 0.5], seed=seed)
+ t(nx.fast_gnp_random_graph, n, p, seed=seed)
+ t(nx.gnp_random_graph, n, p, seed=seed)
+ t(nx.dense_gnm_random_graph, n, m, seed=seed)
+ t(nx.gnm_random_graph, n, m, seed=seed)
+ t(nx.newman_watts_strogatz_graph, n, k, p, seed=seed)
+ t(nx.watts_strogatz_graph, n, k, p, seed=seed)
+ t(nx.connected_watts_strogatz_graph, n, k, p, seed=seed)
+ t(nx.random_regular_graph, 3, n, seed=seed)
+ t(nx.barabasi_albert_graph, n, m, seed=seed)
+ t(nx.extended_barabasi_albert_graph, n, m, p, q, seed=seed)
+ t(nx.powerlaw_cluster_graph, n, m, p, seed=seed)
+ t(nx.random_lobster, n, p1, p2, seed=seed)
+ t(nx.random_powerlaw_tree, n, seed=seed, tries=5000)
+ t(nx.random_powerlaw_tree_sequence, 10, seed=seed, tries=5000)
+ t(nx.random_labeled_tree, n, seed=seed)
+ t(nx.utils.powerlaw_sequence, n, seed=seed)
+ t(nx.utils.zipf_rv, 2.3, seed=seed)
+ cdist = [0.2, 0.4, 0.5, 0.7, 0.9, 1.0]
+ t(nx.utils.discrete_sequence, n, cdistribution=cdist, seed=seed)
+ t(nx.algorithms.bipartite.random_graph, n, m, p, seed=seed)
+ t(nx.algorithms.bipartite.gnmk_random_graph, n, m, k, seed=seed)
+ LFR = nx.generators.LFR_benchmark_graph
+ t(
+ LFR,
+ 25,
+ 3,
+ 1.5,
+ 0.1,
+ average_degree=3,
+ min_community=10,
+ seed=seed,
+ max_community=20,
+ )
+ t(nx.random_internet_as_graph, n, seed=seed)
+ # print("done")
+
+
+# choose to test an integer seed, or whether a single RNG can be everywhere
+# np_rng = np.random.RandomState(14)
+# seed = np_rng
+# seed = 14
+
+
+@pytest.mark.slow
+# print("NetworkX Version:", nx.__version__)
+def test_rng_interface():
+ global progress
+
+ # try different kinds of seeds
+ for seed in [14, np.random.RandomState(14)]:
+ np.random.seed(42)
+ random.seed(42)
+ run_all_random_functions(seed)
+ progress = 0
+
+ # check that both global RNGs are unaffected
+ after_np_rv = np.random.rand()
+ # if np_rv != after_np_rv:
+ # print(np_rv, after_np_rv, "don't match np!")
+ assert np_rv == after_np_rv
+ after_py_rv = random.random()
+ # if py_rv != after_py_rv:
+ # print(py_rv, after_py_rv, "don't match py!")
+ assert py_rv == after_py_rv
+
+
+# print("\nDone testing seed:", seed)
+
+# test_rng_interface()
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py
new file mode 100644
index 00000000..44bed943
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py
@@ -0,0 +1,321 @@
+import pytest
+
+import networkx as nx
+from networkx.convert import (
+ from_dict_of_dicts,
+ from_dict_of_lists,
+ to_dict_of_dicts,
+ to_dict_of_lists,
+ to_networkx_graph,
+)
+from networkx.generators.classic import barbell_graph, cycle_graph
+from networkx.utils import edges_equal, graphs_equal, nodes_equal
+
+
+class TestConvert:
+ def edgelists_equal(self, e1, e2):
+ return sorted(sorted(e) for e in e1) == sorted(sorted(e) for e in e2)
+
+ def test_simple_graphs(self):
+ for dest, source in [
+ (to_dict_of_dicts, from_dict_of_dicts),
+ (to_dict_of_lists, from_dict_of_lists),
+ ]:
+ G = barbell_graph(10, 3)
+ G.graph = {}
+ dod = dest(G)
+
+ # Dict of [dicts, lists]
+ GG = source(dod)
+ assert graphs_equal(G, GG)
+ GW = to_networkx_graph(dod)
+ assert graphs_equal(G, GW)
+ GI = nx.Graph(dod)
+ assert graphs_equal(G, GI)
+
+ # With nodelist keyword
+ P4 = nx.path_graph(4)
+ P3 = nx.path_graph(3)
+ P4.graph = {}
+ P3.graph = {}
+ dod = dest(P4, nodelist=[0, 1, 2])
+ Gdod = nx.Graph(dod)
+ assert graphs_equal(Gdod, P3)
+
+ def test_exceptions(self):
+ # NX graph
+ class G:
+ adj = None
+
+ pytest.raises(nx.NetworkXError, to_networkx_graph, G)
+
+ # pygraphviz agraph
+ class G:
+ is_strict = None
+
+ pytest.raises(nx.NetworkXError, to_networkx_graph, G)
+
+ # Dict of [dicts, lists]
+ G = {"a": 0}
+ pytest.raises(TypeError, to_networkx_graph, G)
+
+ # list or generator of edges
+ class G:
+ next = None
+
+ pytest.raises(nx.NetworkXError, to_networkx_graph, G)
+
+ # no match
+ pytest.raises(nx.NetworkXError, to_networkx_graph, "a")
+
+ def test_digraphs(self):
+ for dest, source in [
+ (to_dict_of_dicts, from_dict_of_dicts),
+ (to_dict_of_lists, from_dict_of_lists),
+ ]:
+ G = cycle_graph(10)
+
+ # Dict of [dicts, lists]
+ dod = dest(G)
+ GG = source(dod)
+ assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GG.edges()))
+ GW = to_networkx_graph(dod)
+ assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GW.edges()))
+ GI = nx.Graph(dod)
+ assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GI.edges()))
+
+ G = cycle_graph(10, create_using=nx.DiGraph)
+ dod = dest(G)
+ GG = source(dod, create_using=nx.DiGraph)
+ assert sorted(G.nodes()) == sorted(GG.nodes())
+ assert sorted(G.edges()) == sorted(GG.edges())
+ GW = to_networkx_graph(dod, create_using=nx.DiGraph)
+ assert sorted(G.nodes()) == sorted(GW.nodes())
+ assert sorted(G.edges()) == sorted(GW.edges())
+ GI = nx.DiGraph(dod)
+ assert sorted(G.nodes()) == sorted(GI.nodes())
+ assert sorted(G.edges()) == sorted(GI.edges())
+
+ def test_graph(self):
+ g = nx.cycle_graph(10)
+ G = nx.Graph()
+ G.add_nodes_from(g)
+ G.add_weighted_edges_from((u, v, u) for u, v in g.edges())
+
+ # Dict of dicts
+ dod = to_dict_of_dicts(G)
+ GG = from_dict_of_dicts(dod, create_using=nx.Graph)
+ assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GG.edges()))
+ GW = to_networkx_graph(dod, create_using=nx.Graph)
+ assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GW.edges()))
+ GI = nx.Graph(dod)
+ assert sorted(G.nodes()) == sorted(GI.nodes())
+ assert sorted(G.edges()) == sorted(GI.edges())
+
+ # Dict of lists
+ dol = to_dict_of_lists(G)
+ GG = from_dict_of_lists(dol, create_using=nx.Graph)
+ # dict of lists throws away edge data so set it to none
+ enone = [(u, v, {}) for (u, v, d) in G.edges(data=True)]
+ assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
+ assert edges_equal(enone, sorted(GG.edges(data=True)))
+ GW = to_networkx_graph(dol, create_using=nx.Graph)
+ assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
+ assert edges_equal(enone, sorted(GW.edges(data=True)))
+ GI = nx.Graph(dol)
+ assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
+ assert edges_equal(enone, sorted(GI.edges(data=True)))
+
+ def test_with_multiedges_self_loops(self):
+ G = cycle_graph(10)
+ XG = nx.Graph()
+ XG.add_nodes_from(G)
+ XG.add_weighted_edges_from((u, v, u) for u, v in G.edges())
+ XGM = nx.MultiGraph()
+ XGM.add_nodes_from(G)
+ XGM.add_weighted_edges_from((u, v, u) for u, v in G.edges())
+ XGM.add_edge(0, 1, weight=2) # multiedge
+ XGS = nx.Graph()
+ XGS.add_nodes_from(G)
+ XGS.add_weighted_edges_from((u, v, u) for u, v in G.edges())
+ XGS.add_edge(0, 0, weight=100) # self loop
+
+ # Dict of dicts
+ # with self loops, OK
+ dod = to_dict_of_dicts(XGS)
+ GG = from_dict_of_dicts(dod, create_using=nx.Graph)
+ assert nodes_equal(XGS.nodes(), GG.nodes())
+ assert edges_equal(XGS.edges(), GG.edges())
+ GW = to_networkx_graph(dod, create_using=nx.Graph)
+ assert nodes_equal(XGS.nodes(), GW.nodes())
+ assert edges_equal(XGS.edges(), GW.edges())
+ GI = nx.Graph(dod)
+ assert nodes_equal(XGS.nodes(), GI.nodes())
+ assert edges_equal(XGS.edges(), GI.edges())
+
+ # Dict of lists
+ # with self loops, OK
+ dol = to_dict_of_lists(XGS)
+ GG = from_dict_of_lists(dol, create_using=nx.Graph)
+ # dict of lists throws away edge data so set it to none
+ enone = [(u, v, {}) for (u, v, d) in XGS.edges(data=True)]
+ assert nodes_equal(sorted(XGS.nodes()), sorted(GG.nodes()))
+ assert edges_equal(enone, sorted(GG.edges(data=True)))
+ GW = to_networkx_graph(dol, create_using=nx.Graph)
+ assert nodes_equal(sorted(XGS.nodes()), sorted(GW.nodes()))
+ assert edges_equal(enone, sorted(GW.edges(data=True)))
+ GI = nx.Graph(dol)
+ assert nodes_equal(sorted(XGS.nodes()), sorted(GI.nodes()))
+ assert edges_equal(enone, sorted(GI.edges(data=True)))
+
+ # Dict of dicts
+ # with multiedges, OK
+ dod = to_dict_of_dicts(XGM)
+ GG = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=True)
+ assert nodes_equal(sorted(XGM.nodes()), sorted(GG.nodes()))
+ assert edges_equal(sorted(XGM.edges()), sorted(GG.edges()))
+ GW = to_networkx_graph(dod, create_using=nx.MultiGraph, multigraph_input=True)
+ assert nodes_equal(sorted(XGM.nodes()), sorted(GW.nodes()))
+ assert edges_equal(sorted(XGM.edges()), sorted(GW.edges()))
+ GI = nx.MultiGraph(dod)
+ assert nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes()))
+ assert sorted(XGM.edges()) == sorted(GI.edges())
+ GE = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=False)
+ assert nodes_equal(sorted(XGM.nodes()), sorted(GE.nodes()))
+ assert sorted(XGM.edges()) != sorted(GE.edges())
+ GI = nx.MultiGraph(XGM)
+ assert nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes()))
+ assert edges_equal(sorted(XGM.edges()), sorted(GI.edges()))
+ GM = nx.MultiGraph(G)
+ assert nodes_equal(sorted(GM.nodes()), sorted(G.nodes()))
+ assert edges_equal(sorted(GM.edges()), sorted(G.edges()))
+
+ # Dict of lists
+ # with multiedges, OK, but better write as DiGraph else you'll
+ # get double edges
+ dol = to_dict_of_lists(G)
+ GG = from_dict_of_lists(dol, create_using=nx.MultiGraph)
+ assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GG.edges()))
+ GW = to_networkx_graph(dol, create_using=nx.MultiGraph)
+ assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GW.edges()))
+ GI = nx.MultiGraph(dol)
+ assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(GI.edges()))
+
+ def test_edgelists(self):
+ P = nx.path_graph(4)
+ e = [(0, 1), (1, 2), (2, 3)]
+ G = nx.Graph(e)
+ assert nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(P.edges()))
+ assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
+
+ e = [(0, 1, {}), (1, 2, {}), (2, 3, {})]
+ G = nx.Graph(e)
+ assert nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(P.edges()))
+ assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
+
+ e = ((n, n + 1) for n in range(3))
+ G = nx.Graph(e)
+ assert nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
+ assert edges_equal(sorted(G.edges()), sorted(P.edges()))
+ assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
+
+ def test_directed_to_undirected(self):
+ edges1 = [(0, 1), (1, 2), (2, 0)]
+ edges2 = [(0, 1), (1, 2), (0, 2)]
+ assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(), edges1)
+ assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(), edges1)
+ assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(), edges1)
+ assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(), edges1)
+
+ assert self.edgelists_equal(
+ nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(), edges1
+ )
+ assert self.edgelists_equal(
+ nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(), edges1
+ )
+
+ assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(), edges1)
+ assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(), edges1)
+
+ def test_attribute_dict_integrity(self):
+ # we must not replace dict-like graph data structures with dicts
+ G = nx.Graph()
+ G.add_nodes_from("abc")
+ H = to_networkx_graph(G, create_using=nx.Graph)
+ assert list(H.nodes) == list(G.nodes)
+ H = nx.DiGraph(G)
+ assert list(H.nodes) == list(G.nodes)
+
+ def test_to_edgelist(self):
+ G = nx.Graph([(1, 1)])
+ elist = nx.to_edgelist(G, nodelist=list(G))
+ assert edges_equal(G.edges(data=True), elist)
+
+ def test_custom_node_attr_dict_safekeeping(self):
+ class custom_dict(dict):
+ pass
+
+ class Custom(nx.Graph):
+ node_attr_dict_factory = custom_dict
+
+ g = nx.Graph()
+ g.add_node(1, weight=1)
+
+ h = Custom(g)
+ assert isinstance(g._node[1], dict)
+ assert isinstance(h._node[1], custom_dict)
+
+ # this raise exception
+ # h._node.update((n, dd.copy()) for n, dd in g.nodes.items())
+ # assert isinstance(h._node[1], custom_dict)
+
+
+@pytest.mark.parametrize(
+ "edgelist",
+ (
+ # Graph with no edge data
+ [(0, 1), (1, 2)],
+ # Graph with edge data
+ [(0, 1, {"weight": 1.0}), (1, 2, {"weight": 2.0})],
+ ),
+)
+def test_to_dict_of_dicts_with_edgedata_param(edgelist):
+ G = nx.Graph()
+ G.add_edges_from(edgelist)
+ # Innermost dict value == edge_data when edge_data != None.
+ # In the case when G has edge data, it is overwritten
+ expected = {0: {1: 10}, 1: {0: 10, 2: 10}, 2: {1: 10}}
+ assert nx.to_dict_of_dicts(G, edge_data=10) == expected
+
+
+def test_to_dict_of_dicts_with_edgedata_and_nodelist():
+ G = nx.path_graph(5)
+ nodelist = [2, 3, 4]
+ expected = {2: {3: 10}, 3: {2: 10, 4: 10}, 4: {3: 10}}
+ assert nx.to_dict_of_dicts(G, nodelist=nodelist, edge_data=10) == expected
+
+
+def test_to_dict_of_dicts_with_edgedata_multigraph():
+ """Multi edge data overwritten when edge_data != None"""
+ G = nx.MultiGraph()
+ G.add_edge(0, 1, key="a")
+ G.add_edge(0, 1, key="b")
+ # Multi edge data lost when edge_data is not None
+ expected = {0: {1: 10}, 1: {0: 10}}
+ assert nx.to_dict_of_dicts(G, edge_data=10) == expected
+
+
+def test_to_networkx_graph_non_edgelist():
+ invalid_edgelist = [1, 2, 3]
+ with pytest.raises(nx.NetworkXError, match="Input is not a valid edge list"):
+ nx.to_networkx_graph(invalid_edgelist)
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py
new file mode 100644
index 00000000..1c39afde
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py
@@ -0,0 +1,532 @@
+import itertools
+
+import pytest
+
+np = pytest.importorskip("numpy")
+npt = pytest.importorskip("numpy.testing")
+
+import networkx as nx
+from networkx.generators.classic import barbell_graph, cycle_graph, path_graph
+from networkx.utils import graphs_equal
+
+
+class TestConvertNumpyArray:
+ def setup_method(self):
+ self.G1 = barbell_graph(10, 3)
+ self.G2 = cycle_graph(10, create_using=nx.DiGraph)
+ self.G3 = self.create_weighted(nx.Graph())
+ self.G4 = self.create_weighted(nx.DiGraph())
+
+ def create_weighted(self, G):
+ g = cycle_graph(4)
+ G.add_nodes_from(g)
+ G.add_weighted_edges_from((u, v, 10 + u) for u, v in g.edges())
+ return G
+
+ def assert_equal(self, G1, G2):
+ assert sorted(G1.nodes()) == sorted(G2.nodes())
+ assert sorted(G1.edges()) == sorted(G2.edges())
+
+ def identity_conversion(self, G, A, create_using):
+ assert A.sum() > 0
+ GG = nx.from_numpy_array(A, create_using=create_using)
+ self.assert_equal(G, GG)
+ GW = nx.to_networkx_graph(A, create_using=create_using)
+ self.assert_equal(G, GW)
+ GI = nx.empty_graph(0, create_using).__class__(A)
+ self.assert_equal(G, GI)
+
+ def test_shape(self):
+ "Conversion from non-square array."
+ A = np.array([[1, 2, 3], [4, 5, 6]])
+ pytest.raises(nx.NetworkXError, nx.from_numpy_array, A)
+
+ def test_identity_graph_array(self):
+ "Conversion from graph to array to graph."
+ A = nx.to_numpy_array(self.G1)
+ self.identity_conversion(self.G1, A, nx.Graph())
+
+ def test_identity_digraph_array(self):
+ """Conversion from digraph to array to digraph."""
+ A = nx.to_numpy_array(self.G2)
+ self.identity_conversion(self.G2, A, nx.DiGraph())
+
+ def test_identity_weighted_graph_array(self):
+ """Conversion from weighted graph to array to weighted graph."""
+ A = nx.to_numpy_array(self.G3)
+ self.identity_conversion(self.G3, A, nx.Graph())
+
+ def test_identity_weighted_digraph_array(self):
+ """Conversion from weighted digraph to array to weighted digraph."""
+ A = nx.to_numpy_array(self.G4)
+ self.identity_conversion(self.G4, A, nx.DiGraph())
+
+ def test_nodelist(self):
+ """Conversion from graph to array to graph with nodelist."""
+ P4 = path_graph(4)
+ P3 = path_graph(3)
+ nodelist = list(P3)
+ A = nx.to_numpy_array(P4, nodelist=nodelist)
+ GA = nx.Graph(A)
+ self.assert_equal(GA, P3)
+
+ # Make nodelist ambiguous by containing duplicates.
+ nodelist += [nodelist[0]]
+ pytest.raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist)
+
+ # Make nodelist invalid by including nonexistent nodes
+ nodelist = [-1, 0, 1]
+ with pytest.raises(
+ nx.NetworkXError,
+ match=f"Nodes {nodelist - P3.nodes} in nodelist is not in G",
+ ):
+ nx.to_numpy_array(P3, nodelist=nodelist)
+
+ def test_weight_keyword(self):
+ WP4 = nx.Graph()
+ WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
+ P4 = path_graph(4)
+ A = nx.to_numpy_array(P4)
+ np.testing.assert_equal(A, nx.to_numpy_array(WP4, weight=None))
+ np.testing.assert_equal(0.5 * A, nx.to_numpy_array(WP4))
+ np.testing.assert_equal(0.3 * A, nx.to_numpy_array(WP4, weight="other"))
+
+ def test_from_numpy_array_type(self):
+ A = np.array([[1]])
+ G = nx.from_numpy_array(A)
+ assert type(G[0][0]["weight"]) == int
+
+ A = np.array([[1]]).astype(float)
+ G = nx.from_numpy_array(A)
+ assert type(G[0][0]["weight"]) == float
+
+ A = np.array([[1]]).astype(str)
+ G = nx.from_numpy_array(A)
+ assert type(G[0][0]["weight"]) == str
+
+ A = np.array([[1]]).astype(bool)
+ G = nx.from_numpy_array(A)
+ assert type(G[0][0]["weight"]) == bool
+
+ A = np.array([[1]]).astype(complex)
+ G = nx.from_numpy_array(A)
+ assert type(G[0][0]["weight"]) == complex
+
+ A = np.array([[1]]).astype(object)
+ pytest.raises(TypeError, nx.from_numpy_array, A)
+
+ A = np.array([[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]])
+ with pytest.raises(
+ nx.NetworkXError, match=f"Input array must be 2D, not {A.ndim}"
+ ):
+ g = nx.from_numpy_array(A)
+
+ def test_from_numpy_array_dtype(self):
+ dt = [("weight", float), ("cost", int)]
+ A = np.array([[(1.0, 2)]], dtype=dt)
+ G = nx.from_numpy_array(A)
+ assert type(G[0][0]["weight"]) == float
+ assert type(G[0][0]["cost"]) == int
+ assert G[0][0]["cost"] == 2
+ assert G[0][0]["weight"] == 1.0
+
+ def test_from_numpy_array_parallel_edges(self):
+ """Tests that the :func:`networkx.from_numpy_array` function
+ interprets integer weights as the number of parallel edges when
+ creating a multigraph.
+
+ """
+ A = np.array([[1, 1], [1, 2]])
+ # First, with a simple graph, each integer entry in the adjacency
+ # matrix is interpreted as the weight of a single edge in the graph.
+ expected = nx.DiGraph()
+ edges = [(0, 0), (0, 1), (1, 0)]
+ expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
+ expected.add_edge(1, 1, weight=2)
+ actual = nx.from_numpy_array(A, parallel_edges=True, create_using=nx.DiGraph)
+ assert graphs_equal(actual, expected)
+ actual = nx.from_numpy_array(A, parallel_edges=False, create_using=nx.DiGraph)
+ assert graphs_equal(actual, expected)
+ # Now each integer entry in the adjacency matrix is interpreted as the
+ # number of parallel edges in the graph if the appropriate keyword
+ # argument is specified.
+ edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)]
+ expected = nx.MultiDiGraph()
+ expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
+ actual = nx.from_numpy_array(
+ A, parallel_edges=True, create_using=nx.MultiDiGraph
+ )
+ assert graphs_equal(actual, expected)
+ expected = nx.MultiDiGraph()
+ expected.add_edges_from(set(edges), weight=1)
+ # The sole self-loop (edge 0) on vertex 1 should have weight 2.
+ expected[1][1][0]["weight"] = 2
+ actual = nx.from_numpy_array(
+ A, parallel_edges=False, create_using=nx.MultiDiGraph
+ )
+ assert graphs_equal(actual, expected)
+
+ @pytest.mark.parametrize(
+ "dt",
+ (
+ None, # default
+ int, # integer dtype
+ np.dtype(
+ [("weight", "f8"), ("color", "i1")]
+ ), # Structured dtype with named fields
+ ),
+ )
+ def test_from_numpy_array_no_edge_attr(self, dt):
+ A = np.array([[0, 1], [1, 0]], dtype=dt)
+ G = nx.from_numpy_array(A, edge_attr=None)
+ assert "weight" not in G.edges[0, 1]
+ assert len(G.edges[0, 1]) == 0
+
+ def test_from_numpy_array_multiedge_no_edge_attr(self):
+ A = np.array([[0, 2], [2, 0]])
+ G = nx.from_numpy_array(A, create_using=nx.MultiDiGraph, edge_attr=None)
+ assert all("weight" not in e for _, e in G[0][1].items())
+ assert len(G[0][1][0]) == 0
+
+ def test_from_numpy_array_custom_edge_attr(self):
+ A = np.array([[0, 2], [3, 0]])
+ G = nx.from_numpy_array(A, edge_attr="cost")
+ assert "weight" not in G.edges[0, 1]
+ assert G.edges[0, 1]["cost"] == 3
+
+ def test_symmetric(self):
+ """Tests that a symmetric array has edges added only once to an
+ undirected multigraph when using :func:`networkx.from_numpy_array`.
+
+ """
+ A = np.array([[0, 1], [1, 0]])
+ G = nx.from_numpy_array(A, create_using=nx.MultiGraph)
+ expected = nx.MultiGraph()
+ expected.add_edge(0, 1, weight=1)
+ assert graphs_equal(G, expected)
+
+ def test_dtype_int_graph(self):
+ """Test that setting dtype int actually gives an integer array.
+
+ For more information, see GitHub pull request #1363.
+
+ """
+ G = nx.complete_graph(3)
+ A = nx.to_numpy_array(G, dtype=int)
+ assert A.dtype == int
+
+ def test_dtype_int_multigraph(self):
+ """Test that setting dtype int actually gives an integer array.
+
+ For more information, see GitHub pull request #1363.
+
+ """
+ G = nx.MultiGraph(nx.complete_graph(3))
+ A = nx.to_numpy_array(G, dtype=int)
+ assert A.dtype == int
+
+
+@pytest.fixture
+def multigraph_test_graph():
+ G = nx.MultiGraph()
+ G.add_edge(1, 2, weight=7)
+ G.add_edge(1, 2, weight=70)
+ return G
+
+
+@pytest.mark.parametrize(("operator", "expected"), ((sum, 77), (min, 7), (max, 70)))
+def test_numpy_multigraph(multigraph_test_graph, operator, expected):
+ A = nx.to_numpy_array(multigraph_test_graph, multigraph_weight=operator)
+ assert A[1, 0] == expected
+
+
+def test_to_numpy_array_multigraph_nodelist(multigraph_test_graph):
+ G = multigraph_test_graph
+ G.add_edge(0, 1, weight=3)
+ A = nx.to_numpy_array(G, nodelist=[1, 2])
+ assert A.shape == (2, 2)
+ assert A[1, 0] == 77
+
+
+@pytest.mark.parametrize(
+ "G, expected",
+ [
+ (nx.Graph(), np.array([[0, 1 + 2j], [1 + 2j, 0]], dtype=complex)),
+ (nx.DiGraph(), np.array([[0, 1 + 2j], [0, 0]], dtype=complex)),
+ ],
+)
+def test_to_numpy_array_complex_weights(G, expected):
+ G.add_edge(0, 1, weight=1 + 2j)
+ A = nx.to_numpy_array(G, dtype=complex)
+ npt.assert_array_equal(A, expected)
+
+
+def test_to_numpy_array_arbitrary_weights():
+ G = nx.DiGraph()
+ w = 922337203685477580102 # Out of range for int64
+ G.add_edge(0, 1, weight=922337203685477580102) # val not representable by int64
+ A = nx.to_numpy_array(G, dtype=object)
+ expected = np.array([[0, w], [0, 0]], dtype=object)
+ npt.assert_array_equal(A, expected)
+
+ # Undirected
+ A = nx.to_numpy_array(G.to_undirected(), dtype=object)
+ expected = np.array([[0, w], [w, 0]], dtype=object)
+ npt.assert_array_equal(A, expected)
+
+
+@pytest.mark.parametrize(
+ "func, expected",
+ ((min, -1), (max, 10), (sum, 11), (np.mean, 11 / 3), (np.median, 2)),
+)
+def test_to_numpy_array_multiweight_reduction(func, expected):
+ """Test various functions for reducing multiedge weights."""
+ G = nx.MultiDiGraph()
+ weights = [-1, 2, 10.0]
+ for w in weights:
+ G.add_edge(0, 1, weight=w)
+ A = nx.to_numpy_array(G, multigraph_weight=func, dtype=float)
+ assert np.allclose(A, [[0, expected], [0, 0]])
+
+ # Undirected case
+ A = nx.to_numpy_array(G.to_undirected(), multigraph_weight=func, dtype=float)
+ assert np.allclose(A, [[0, expected], [expected, 0]])
+
+
+@pytest.mark.parametrize(
+ ("G, expected"),
+ [
+ (nx.Graph(), [[(0, 0), (10, 5)], [(10, 5), (0, 0)]]),
+ (nx.DiGraph(), [[(0, 0), (10, 5)], [(0, 0), (0, 0)]]),
+ ],
+)
+def test_to_numpy_array_structured_dtype_attrs_from_fields(G, expected):
+ """When `dtype` is structured (i.e. has names) and `weight` is None, use
+ the named fields of the dtype to look up edge attributes."""
+ G.add_edge(0, 1, weight=10, cost=5.0)
+ dtype = np.dtype([("weight", int), ("cost", int)])
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
+ expected = np.asarray(expected, dtype=dtype)
+ npt.assert_array_equal(A, expected)
+
+
+def test_to_numpy_array_structured_dtype_single_attr_default():
+ G = nx.path_graph(3)
+ dtype = np.dtype([("weight", float)]) # A single named field
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
+ expected = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]], dtype=float)
+ npt.assert_array_equal(A["weight"], expected)
+
+
+@pytest.mark.parametrize(
+ ("field_name", "expected_attr_val"),
+ [
+ ("weight", 1),
+ ("cost", 3),
+ ],
+)
+def test_to_numpy_array_structured_dtype_single_attr(field_name, expected_attr_val):
+ G = nx.Graph()
+ G.add_edge(0, 1, cost=3)
+ dtype = np.dtype([(field_name, float)])
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
+ expected = np.array([[0, expected_attr_val], [expected_attr_val, 0]], dtype=float)
+ npt.assert_array_equal(A[field_name], expected)
+
+
+@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
+@pytest.mark.parametrize(
+ "edge",
+ [
+ (0, 1), # No edge attributes
+ (0, 1, {"weight": 10}), # One edge attr
+ (0, 1, {"weight": 5, "flow": -4}), # Multiple but not all edge attrs
+ (0, 1, {"weight": 2.0, "cost": 10, "flow": -45}), # All attrs
+ ],
+)
+def test_to_numpy_array_structured_dtype_multiple_fields(graph_type, edge):
+ G = graph_type([edge])
+ dtype = np.dtype([("weight", float), ("cost", float), ("flow", float)])
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None)
+ for attr in dtype.names:
+ expected = nx.to_numpy_array(G, dtype=float, weight=attr)
+ npt.assert_array_equal(A[attr], expected)
+
+
+@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
+def test_to_numpy_array_structured_dtype_scalar_nonedge(G):
+ G.add_edge(0, 1, weight=10)
+ dtype = np.dtype([("weight", float), ("cost", float)])
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=np.nan)
+ for attr in dtype.names:
+ expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=np.nan)
+ npt.assert_array_equal(A[attr], expected)
+
+
+@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
+def test_to_numpy_array_structured_dtype_nonedge_ary(G):
+ """Similar to the scalar case, except has a different non-edge value for
+ each named field."""
+ G.add_edge(0, 1, weight=10)
+ dtype = np.dtype([("weight", float), ("cost", float)])
+ nonedges = np.array([(0, np.inf)], dtype=dtype)
+ A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=nonedges)
+ for attr in dtype.names:
+ nonedge = nonedges[attr]
+ expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=nonedge)
+ npt.assert_array_equal(A[attr], expected)
+
+
+def test_to_numpy_array_structured_dtype_with_weight_raises():
+ """Using both a structured dtype (with named fields) and specifying a `weight`
+ parameter is ambiguous."""
+ G = nx.path_graph(3)
+ dtype = np.dtype([("weight", int), ("cost", int)])
+ exception_msg = "Specifying `weight` not supported for structured dtypes"
+ with pytest.raises(ValueError, match=exception_msg):
+ nx.to_numpy_array(G, dtype=dtype) # Default is weight="weight"
+ with pytest.raises(ValueError, match=exception_msg):
+ nx.to_numpy_array(G, dtype=dtype, weight="cost")
+
+
+@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph))
+def test_to_numpy_array_structured_multigraph_raises(graph_type):
+ G = nx.path_graph(3, create_using=graph_type)
+ dtype = np.dtype([("weight", int), ("cost", int)])
+ with pytest.raises(nx.NetworkXError, match="Structured arrays are not supported"):
+ nx.to_numpy_array(G, dtype=dtype, weight=None)
+
+
+def test_from_numpy_array_nodelist_bad_size():
+ """An exception is raised when `len(nodelist) != A.shape[0]`."""
+ n = 5 # Number of nodes
+ A = np.diag(np.ones(n - 1), k=1) # Adj. matrix for P_n
+ expected = nx.path_graph(n)
+
+ assert graphs_equal(nx.from_numpy_array(A, edge_attr=None), expected)
+ nodes = list(range(n))
+ assert graphs_equal(
+ nx.from_numpy_array(A, edge_attr=None, nodelist=nodes), expected
+ )
+
+ # Too many node labels
+ nodes = list(range(n + 1))
+ with pytest.raises(ValueError, match="nodelist must have the same length as A"):
+ nx.from_numpy_array(A, nodelist=nodes)
+
+ # Too few node labels
+ nodes = list(range(n - 1))
+ with pytest.raises(ValueError, match="nodelist must have the same length as A"):
+ nx.from_numpy_array(A, nodelist=nodes)
+
+
+@pytest.mark.parametrize(
+ "nodes",
+ (
+ [4, 3, 2, 1, 0],
+ [9, 7, 1, 2, 8],
+ ["a", "b", "c", "d", "e"],
+ [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
+ ["A", 2, 7, "spam", (1, 3)],
+ ),
+)
+def test_from_numpy_array_nodelist(nodes):
+ A = np.diag(np.ones(4), k=1)
+ # Without edge attributes
+ expected = nx.relabel_nodes(
+ nx.path_graph(5), mapping=dict(enumerate(nodes)), copy=True
+ )
+ G = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes)
+ assert graphs_equal(G, expected)
+
+ # With edge attributes
+ nx.set_edge_attributes(expected, 1.0, name="weight")
+ G = nx.from_numpy_array(A, nodelist=nodes)
+ assert graphs_equal(G, expected)
+
+
+@pytest.mark.parametrize(
+ "nodes",
+ (
+ [4, 3, 2, 1, 0],
+ [9, 7, 1, 2, 8],
+ ["a", "b", "c", "d", "e"],
+ [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
+ ["A", 2, 7, "spam", (1, 3)],
+ ),
+)
+def test_from_numpy_array_nodelist_directed(nodes):
+ A = np.diag(np.ones(4), k=1)
+ # Without edge attributes
+ H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
+ expected = nx.relabel_nodes(H, mapping=dict(enumerate(nodes)), copy=True)
+ G = nx.from_numpy_array(A, create_using=nx.DiGraph, edge_attr=None, nodelist=nodes)
+ assert graphs_equal(G, expected)
+
+ # With edge attributes
+ nx.set_edge_attributes(expected, 1.0, name="weight")
+ G = nx.from_numpy_array(A, create_using=nx.DiGraph, nodelist=nodes)
+ assert graphs_equal(G, expected)
+
+
+@pytest.mark.parametrize(
+ "nodes",
+ (
+ [4, 3, 2, 1, 0],
+ [9, 7, 1, 2, 8],
+ ["a", "b", "c", "d", "e"],
+ [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
+ ["A", 2, 7, "spam", (1, 3)],
+ ),
+)
+def test_from_numpy_array_nodelist_multigraph(nodes):
+ A = np.array(
+ [
+ [0, 1, 0, 0, 0],
+ [1, 0, 2, 0, 0],
+ [0, 2, 0, 3, 0],
+ [0, 0, 3, 0, 4],
+ [0, 0, 0, 4, 0],
+ ]
+ )
+
+ H = nx.MultiGraph()
+ for i, edge in enumerate(((0, 1), (1, 2), (2, 3), (3, 4))):
+ H.add_edges_from(itertools.repeat(edge, i + 1))
+ expected = nx.relabel_nodes(H, mapping=dict(enumerate(nodes)), copy=True)
+
+ G = nx.from_numpy_array(
+ A,
+ parallel_edges=True,
+ create_using=nx.MultiGraph,
+ edge_attr=None,
+ nodelist=nodes,
+ )
+ assert graphs_equal(G, expected)
+
+
+@pytest.mark.parametrize(
+ "nodes",
+ (
+ [4, 3, 2, 1, 0],
+ [9, 7, 1, 2, 8],
+ ["a", "b", "c", "d", "e"],
+ [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
+ ["A", 2, 7, "spam", (1, 3)],
+ ),
+)
+@pytest.mark.parametrize("graph", (nx.complete_graph, nx.cycle_graph, nx.wheel_graph))
+def test_from_numpy_array_nodelist_rountrip(graph, nodes):
+ G = graph(5)
+ A = nx.to_numpy_array(G)
+ expected = nx.relabel_nodes(G, mapping=dict(enumerate(nodes)), copy=True)
+ H = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes)
+ assert graphs_equal(H, expected)
+
+ # With an isolated node
+ G = graph(4)
+ G.add_node("foo")
+ A = nx.to_numpy_array(G)
+ expected = nx.relabel_nodes(G, mapping=dict(zip(G.nodes, nodes)), copy=True)
+ H = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes)
+ assert graphs_equal(H, expected)
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py
new file mode 100644
index 00000000..8c3f02a0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py
@@ -0,0 +1,349 @@
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal, graphs_equal, nodes_equal
+
+np = pytest.importorskip("numpy")
+pd = pytest.importorskip("pandas")
+
+
+class TestConvertPandas:
+ def setup_method(self):
+ self.rng = np.random.RandomState(seed=5)
+ ints = self.rng.randint(1, 11, size=(3, 2))
+ a = ["A", "B", "C"]
+ b = ["D", "A", "E"]
+ df = pd.DataFrame(ints, columns=["weight", "cost"])
+ df[0] = a # Column label 0 (int)
+ df["b"] = b # Column label 'b' (str)
+ self.df = df
+
+ mdf = pd.DataFrame([[4, 16, "A", "D"]], columns=["weight", "cost", 0, "b"])
+ self.mdf = pd.concat([df, mdf])
+
+ def test_exceptions(self):
+ G = pd.DataFrame(["a"]) # adj
+ pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
+ G = pd.DataFrame(["a", 0.0]) # elist
+ pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
+ df = pd.DataFrame([[1, 1], [1, 0]], dtype=int, index=[1, 2], columns=["a", "b"])
+ pytest.raises(nx.NetworkXError, nx.from_pandas_adjacency, df)
+
+ def test_from_edgelist_all_attr(self):
+ Gtrue = nx.Graph(
+ [
+ ("E", "C", {"cost": 9, "weight": 10}),
+ ("B", "A", {"cost": 1, "weight": 7}),
+ ("A", "D", {"cost": 7, "weight": 4}),
+ ]
+ )
+ G = nx.from_pandas_edgelist(self.df, 0, "b", True)
+ assert graphs_equal(G, Gtrue)
+ # MultiGraph
+ MGtrue = nx.MultiGraph(Gtrue)
+ MGtrue.add_edge("A", "D", cost=16, weight=4)
+ MG = nx.from_pandas_edgelist(self.mdf, 0, "b", True, nx.MultiGraph())
+ assert graphs_equal(MG, MGtrue)
+
+ def test_from_edgelist_multi_attr(self):
+ Gtrue = nx.Graph(
+ [
+ ("E", "C", {"cost": 9, "weight": 10}),
+ ("B", "A", {"cost": 1, "weight": 7}),
+ ("A", "D", {"cost": 7, "weight": 4}),
+ ]
+ )
+ G = nx.from_pandas_edgelist(self.df, 0, "b", ["weight", "cost"])
+ assert graphs_equal(G, Gtrue)
+
+ def test_from_edgelist_multi_attr_incl_target(self):
+ Gtrue = nx.Graph(
+ [
+ ("E", "C", {0: "C", "b": "E", "weight": 10}),
+ ("B", "A", {0: "B", "b": "A", "weight": 7}),
+ ("A", "D", {0: "A", "b": "D", "weight": 4}),
+ ]
+ )
+ G = nx.from_pandas_edgelist(self.df, 0, "b", [0, "b", "weight"])
+ assert graphs_equal(G, Gtrue)
+
+ def test_from_edgelist_multidigraph_and_edge_attr(self):
+ # example from issue #2374
+ edges = [
+ ("X1", "X4", {"Co": "zA", "Mi": 0, "St": "X1"}),
+ ("X1", "X4", {"Co": "zB", "Mi": 54, "St": "X2"}),
+ ("X1", "X4", {"Co": "zB", "Mi": 49, "St": "X3"}),
+ ("X1", "X4", {"Co": "zB", "Mi": 44, "St": "X4"}),
+ ("Y1", "Y3", {"Co": "zC", "Mi": 0, "St": "Y1"}),
+ ("Y1", "Y3", {"Co": "zC", "Mi": 34, "St": "Y2"}),
+ ("Y1", "Y3", {"Co": "zC", "Mi": 29, "St": "X2"}),
+ ("Y1", "Y3", {"Co": "zC", "Mi": 24, "St": "Y3"}),
+ ("Z1", "Z3", {"Co": "zD", "Mi": 0, "St": "Z1"}),
+ ("Z1", "Z3", {"Co": "zD", "Mi": 14, "St": "X3"}),
+ ]
+ Gtrue = nx.MultiDiGraph(edges)
+ data = {
+ "O": ["X1", "X1", "X1", "X1", "Y1", "Y1", "Y1", "Y1", "Z1", "Z1"],
+ "D": ["X4", "X4", "X4", "X4", "Y3", "Y3", "Y3", "Y3", "Z3", "Z3"],
+ "St": ["X1", "X2", "X3", "X4", "Y1", "Y2", "X2", "Y3", "Z1", "X3"],
+ "Co": ["zA", "zB", "zB", "zB", "zC", "zC", "zC", "zC", "zD", "zD"],
+ "Mi": [0, 54, 49, 44, 0, 34, 29, 24, 0, 14],
+ }
+ df = pd.DataFrame.from_dict(data)
+ G1 = nx.from_pandas_edgelist(
+ df, source="O", target="D", edge_attr=True, create_using=nx.MultiDiGraph
+ )
+ G2 = nx.from_pandas_edgelist(
+ df,
+ source="O",
+ target="D",
+ edge_attr=["St", "Co", "Mi"],
+ create_using=nx.MultiDiGraph,
+ )
+ assert graphs_equal(G1, Gtrue)
+ assert graphs_equal(G2, Gtrue)
+
+ def test_from_edgelist_one_attr(self):
+ Gtrue = nx.Graph(
+ [
+ ("E", "C", {"weight": 10}),
+ ("B", "A", {"weight": 7}),
+ ("A", "D", {"weight": 4}),
+ ]
+ )
+ G = nx.from_pandas_edgelist(self.df, 0, "b", "weight")
+ assert graphs_equal(G, Gtrue)
+
+ def test_from_edgelist_int_attr_name(self):
+ # note: this also tests that edge_attr can be `source`
+ Gtrue = nx.Graph(
+ [("E", "C", {0: "C"}), ("B", "A", {0: "B"}), ("A", "D", {0: "A"})]
+ )
+ G = nx.from_pandas_edgelist(self.df, 0, "b", 0)
+ assert graphs_equal(G, Gtrue)
+
+ def test_from_edgelist_invalid_attr(self):
+ pytest.raises(
+ nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", "misspell"
+ )
+ pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", 1)
+ # see Issue #3562
+ edgeframe = pd.DataFrame([[0, 1], [1, 2], [2, 0]], columns=["s", "t"])
+ pytest.raises(
+ nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", True
+ )
+ pytest.raises(
+ nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", "weight"
+ )
+ pytest.raises(
+ nx.NetworkXError,
+ nx.from_pandas_edgelist,
+ edgeframe,
+ "s",
+ "t",
+ ["weight", "size"],
+ )
+
+ def test_from_edgelist_no_attr(self):
+ Gtrue = nx.Graph([("E", "C", {}), ("B", "A", {}), ("A", "D", {})])
+ G = nx.from_pandas_edgelist(self.df, 0, "b")
+ assert graphs_equal(G, Gtrue)
+
+ def test_from_edgelist(self):
+ # Pandas DataFrame
+ G = nx.cycle_graph(10)
+ G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
+
+ edgelist = nx.to_edgelist(G)
+ source = [s for s, t, d in edgelist]
+ target = [t for s, t, d in edgelist]
+ weight = [d["weight"] for s, t, d in edgelist]
+ edges = pd.DataFrame({"source": source, "target": target, "weight": weight})
+
+ GG = nx.from_pandas_edgelist(edges, edge_attr="weight")
+ assert nodes_equal(G.nodes(), GG.nodes())
+ assert edges_equal(G.edges(), GG.edges())
+ GW = nx.to_networkx_graph(edges, create_using=nx.Graph)
+ assert nodes_equal(G.nodes(), GW.nodes())
+ assert edges_equal(G.edges(), GW.edges())
+
+ def test_to_edgelist_default_source_or_target_col_exists(self):
+ G = nx.path_graph(10)
+ G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
+ nx.set_edge_attributes(G, 0, name="source")
+ pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G)
+
+ # drop source column to test an exception raised for the target column
+ for u, v, d in G.edges(data=True):
+ d.pop("source", None)
+
+ nx.set_edge_attributes(G, 0, name="target")
+ pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G)
+
+ def test_to_edgelist_custom_source_or_target_col_exists(self):
+ G = nx.path_graph(10)
+ G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
+ nx.set_edge_attributes(G, 0, name="source_col_name")
+ pytest.raises(
+ nx.NetworkXError, nx.to_pandas_edgelist, G, source="source_col_name"
+ )
+
+ # drop source column to test an exception raised for the target column
+ for u, v, d in G.edges(data=True):
+ d.pop("source_col_name", None)
+
+ nx.set_edge_attributes(G, 0, name="target_col_name")
+ pytest.raises(
+ nx.NetworkXError, nx.to_pandas_edgelist, G, target="target_col_name"
+ )
+
+ def test_to_edgelist_edge_key_col_exists(self):
+ G = nx.path_graph(10, create_using=nx.MultiGraph)
+ G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges()))
+ nx.set_edge_attributes(G, 0, name="edge_key_name")
+ pytest.raises(
+ nx.NetworkXError, nx.to_pandas_edgelist, G, edge_key="edge_key_name"
+ )
+
+ def test_from_adjacency(self):
+ nodelist = [1, 2]
+ dftrue = pd.DataFrame(
+ [[1, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist
+ )
+ G = nx.Graph([(1, 1), (1, 2)])
+ df = nx.to_pandas_adjacency(G, dtype=int)
+ pd.testing.assert_frame_equal(df, dftrue)
+
+ @pytest.mark.parametrize("graph", [nx.Graph, nx.MultiGraph])
+ def test_roundtrip(self, graph):
+ # edgelist
+ Gtrue = graph([(1, 1), (1, 2)])
+ df = nx.to_pandas_edgelist(Gtrue)
+ G = nx.from_pandas_edgelist(df, create_using=graph)
+ assert graphs_equal(Gtrue, G)
+ # adjacency
+ adj = {1: {1: {"weight": 1}, 2: {"weight": 1}}, 2: {1: {"weight": 1}}}
+ Gtrue = graph(adj)
+ df = nx.to_pandas_adjacency(Gtrue, dtype=int)
+ G = nx.from_pandas_adjacency(df, create_using=graph)
+ assert graphs_equal(Gtrue, G)
+
+ def test_from_adjacency_named(self):
+ # example from issue #3105
+ data = {
+ "A": {"A": 0, "B": 0, "C": 0},
+ "B": {"A": 1, "B": 0, "C": 0},
+ "C": {"A": 0, "B": 1, "C": 0},
+ }
+ dftrue = pd.DataFrame(data, dtype=np.intp)
+ df = dftrue[["A", "C", "B"]]
+ G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph())
+ df = nx.to_pandas_adjacency(G, dtype=np.intp)
+ pd.testing.assert_frame_equal(df, dftrue)
+
+ @pytest.mark.parametrize("edge_attr", [["attr2", "attr3"], True])
+ def test_edgekey_with_multigraph(self, edge_attr):
+ df = pd.DataFrame(
+ {
+ "source": {"A": "N1", "B": "N2", "C": "N1", "D": "N1"},
+ "target": {"A": "N2", "B": "N3", "C": "N1", "D": "N2"},
+ "attr1": {"A": "F1", "B": "F2", "C": "F3", "D": "F4"},
+ "attr2": {"A": 1, "B": 0, "C": 0, "D": 0},
+ "attr3": {"A": 0, "B": 1, "C": 0, "D": 1},
+ }
+ )
+ Gtrue = nx.MultiGraph(
+ [
+ ("N1", "N2", "F1", {"attr2": 1, "attr3": 0}),
+ ("N2", "N3", "F2", {"attr2": 0, "attr3": 1}),
+ ("N1", "N1", "F3", {"attr2": 0, "attr3": 0}),
+ ("N1", "N2", "F4", {"attr2": 0, "attr3": 1}),
+ ]
+ )
+ # example from issue #4065
+ G = nx.from_pandas_edgelist(
+ df,
+ source="source",
+ target="target",
+ edge_attr=edge_attr,
+ edge_key="attr1",
+ create_using=nx.MultiGraph(),
+ )
+ assert graphs_equal(G, Gtrue)
+
+ df_roundtrip = nx.to_pandas_edgelist(G, edge_key="attr1")
+ df_roundtrip = df_roundtrip.sort_values("attr1")
+ df_roundtrip.index = ["A", "B", "C", "D"]
+ pd.testing.assert_frame_equal(
+ df, df_roundtrip[["source", "target", "attr1", "attr2", "attr3"]]
+ )
+
+ def test_edgekey_with_normal_graph_no_action(self):
+ Gtrue = nx.Graph(
+ [
+ ("E", "C", {"cost": 9, "weight": 10}),
+ ("B", "A", {"cost": 1, "weight": 7}),
+ ("A", "D", {"cost": 7, "weight": 4}),
+ ]
+ )
+ G = nx.from_pandas_edgelist(self.df, 0, "b", True, edge_key="weight")
+ assert graphs_equal(G, Gtrue)
+
+ def test_nonexisting_edgekey_raises(self):
+ with pytest.raises(nx.exception.NetworkXError):
+ nx.from_pandas_edgelist(
+ self.df,
+ source="source",
+ target="target",
+ edge_key="Not_real",
+ edge_attr=True,
+ create_using=nx.MultiGraph(),
+ )
+
+ def test_multigraph_with_edgekey_no_edgeattrs(self):
+ Gtrue = nx.MultiGraph()
+ Gtrue.add_edge(0, 1, key=0)
+ Gtrue.add_edge(0, 1, key=3)
+ df = nx.to_pandas_edgelist(Gtrue, edge_key="key")
+ expected = pd.DataFrame({"source": [0, 0], "target": [1, 1], "key": [0, 3]})
+ pd.testing.assert_frame_equal(expected, df)
+ G = nx.from_pandas_edgelist(df, edge_key="key", create_using=nx.MultiGraph)
+ assert graphs_equal(Gtrue, G)
+
+
+def test_to_pandas_adjacency_with_nodelist():
+ G = nx.complete_graph(5)
+ nodelist = [1, 4]
+ expected = pd.DataFrame(
+ [[0, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist
+ )
+ pd.testing.assert_frame_equal(
+ expected, nx.to_pandas_adjacency(G, nodelist, dtype=int)
+ )
+
+
+def test_to_pandas_edgelist_with_nodelist():
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (1, 2), (1, 3)], weight=2.0)
+ G.add_edge(0, 5, weight=100)
+ df = nx.to_pandas_edgelist(G, nodelist=[1, 2])
+ assert 0 not in df["source"].to_numpy()
+ assert 100 not in df["weight"].to_numpy()
+
+
+def test_from_pandas_adjacency_with_index_collisions():
+ """See gh-7407"""
+ df = pd.DataFrame(
+ [
+ [0, 1, 0, 0],
+ [0, 0, 1, 0],
+ [0, 0, 0, 1],
+ [0, 0, 0, 0],
+ ],
+ index=[1010001, 2, 1, 1010002],
+ columns=[1010001, 2, 1, 1010002],
+ )
+ G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph)
+ expected = nx.DiGraph([(1010001, 2), (2, 1), (1, 1010002)])
+ assert nodes_equal(G.nodes, expected.nodes)
+ assert edges_equal(G.edges, expected.edges)
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py
new file mode 100644
index 00000000..aa513b85
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py
@@ -0,0 +1,282 @@
+import pytest
+
+np = pytest.importorskip("numpy")
+sp = pytest.importorskip("scipy")
+
+import networkx as nx
+from networkx.generators.classic import barbell_graph, cycle_graph, path_graph
+from networkx.utils import graphs_equal
+
+
+class TestConvertScipy:
+ def setup_method(self):
+ self.G1 = barbell_graph(10, 3)
+ self.G2 = cycle_graph(10, create_using=nx.DiGraph)
+
+ self.G3 = self.create_weighted(nx.Graph())
+ self.G4 = self.create_weighted(nx.DiGraph())
+
+ def test_exceptions(self):
+ class G:
+ format = None
+
+ pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
+
+ def create_weighted(self, G):
+ g = cycle_graph(4)
+ e = list(g.edges())
+ source = [u for u, v in e]
+ dest = [v for u, v in e]
+ weight = [s + 10 for s in source]
+ ex = zip(source, dest, weight)
+ G.add_weighted_edges_from(ex)
+ return G
+
+ def identity_conversion(self, G, A, create_using):
+ GG = nx.from_scipy_sparse_array(A, create_using=create_using)
+ assert nx.is_isomorphic(G, GG)
+
+ GW = nx.to_networkx_graph(A, create_using=create_using)
+ assert nx.is_isomorphic(G, GW)
+
+ GI = nx.empty_graph(0, create_using).__class__(A)
+ assert nx.is_isomorphic(G, GI)
+
+ ACSR = A.tocsr()
+ GI = nx.empty_graph(0, create_using).__class__(ACSR)
+ assert nx.is_isomorphic(G, GI)
+
+ ACOO = A.tocoo()
+ GI = nx.empty_graph(0, create_using).__class__(ACOO)
+ assert nx.is_isomorphic(G, GI)
+
+ ACSC = A.tocsc()
+ GI = nx.empty_graph(0, create_using).__class__(ACSC)
+ assert nx.is_isomorphic(G, GI)
+
+ AD = A.todense()
+ GI = nx.empty_graph(0, create_using).__class__(AD)
+ assert nx.is_isomorphic(G, GI)
+
+ AA = A.toarray()
+ GI = nx.empty_graph(0, create_using).__class__(AA)
+ assert nx.is_isomorphic(G, GI)
+
+ def test_shape(self):
+ "Conversion from non-square sparse array."
+ A = sp.sparse.lil_array([[1, 2, 3], [4, 5, 6]])
+ pytest.raises(nx.NetworkXError, nx.from_scipy_sparse_array, A)
+
+ def test_identity_graph_matrix(self):
+ "Conversion from graph to sparse matrix to graph."
+ A = nx.to_scipy_sparse_array(self.G1)
+ self.identity_conversion(self.G1, A, nx.Graph())
+
+ def test_identity_digraph_matrix(self):
+ "Conversion from digraph to sparse matrix to digraph."
+ A = nx.to_scipy_sparse_array(self.G2)
+ self.identity_conversion(self.G2, A, nx.DiGraph())
+
+ def test_identity_weighted_graph_matrix(self):
+ """Conversion from weighted graph to sparse matrix to weighted graph."""
+ A = nx.to_scipy_sparse_array(self.G3)
+ self.identity_conversion(self.G3, A, nx.Graph())
+
+ def test_identity_weighted_digraph_matrix(self):
+ """Conversion from weighted digraph to sparse matrix to weighted digraph."""
+ A = nx.to_scipy_sparse_array(self.G4)
+ self.identity_conversion(self.G4, A, nx.DiGraph())
+
+ def test_nodelist(self):
+ """Conversion from graph to sparse matrix to graph with nodelist."""
+ P4 = path_graph(4)
+ P3 = path_graph(3)
+ nodelist = list(P3.nodes())
+ A = nx.to_scipy_sparse_array(P4, nodelist=nodelist)
+ GA = nx.Graph(A)
+ assert nx.is_isomorphic(GA, P3)
+
+ pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=[])
+ # Test nodelist duplicates.
+ long_nl = nodelist + [0]
+ pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=long_nl)
+
+ # Test nodelist contains non-nodes
+ non_nl = [-1, 0, 1, 2]
+ pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=non_nl)
+
+ def test_weight_keyword(self):
+ WP4 = nx.Graph()
+ WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
+ P4 = path_graph(4)
+ A = nx.to_scipy_sparse_array(P4)
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+ np.testing.assert_equal(
+ 0.5 * A.todense(), nx.to_scipy_sparse_array(WP4).todense()
+ )
+ np.testing.assert_equal(
+ 0.3 * A.todense(), nx.to_scipy_sparse_array(WP4, weight="other").todense()
+ )
+
+ def test_format_keyword(self):
+ WP4 = nx.Graph()
+ WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
+ P4 = path_graph(4)
+ A = nx.to_scipy_sparse_array(P4, format="csr")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ A = nx.to_scipy_sparse_array(P4, format="csc")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ A = nx.to_scipy_sparse_array(P4, format="coo")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ A = nx.to_scipy_sparse_array(P4, format="bsr")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ A = nx.to_scipy_sparse_array(P4, format="lil")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ A = nx.to_scipy_sparse_array(P4, format="dia")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ A = nx.to_scipy_sparse_array(P4, format="dok")
+ np.testing.assert_equal(
+ A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
+ )
+
+ def test_format_keyword_raise(self):
+ with pytest.raises(nx.NetworkXError):
+ WP4 = nx.Graph()
+ WP4.add_edges_from(
+ (n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3)
+ )
+ P4 = path_graph(4)
+ nx.to_scipy_sparse_array(P4, format="any_other")
+
+ def test_null_raise(self):
+ with pytest.raises(nx.NetworkXError):
+ nx.to_scipy_sparse_array(nx.Graph())
+
+ def test_empty(self):
+ G = nx.Graph()
+ G.add_node(1)
+ M = nx.to_scipy_sparse_array(G)
+ np.testing.assert_equal(M.toarray(), np.array([[0]]))
+
+ def test_ordering(self):
+ G = nx.DiGraph()
+ G.add_edge(1, 2)
+ G.add_edge(2, 3)
+ G.add_edge(3, 1)
+ M = nx.to_scipy_sparse_array(G, nodelist=[3, 2, 1])
+ np.testing.assert_equal(
+ M.toarray(), np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0]])
+ )
+
+ def test_selfloop_graph(self):
+ G = nx.Graph([(1, 1)])
+ M = nx.to_scipy_sparse_array(G)
+ np.testing.assert_equal(M.toarray(), np.array([[1]]))
+
+ G.add_edges_from([(2, 3), (3, 4)])
+ M = nx.to_scipy_sparse_array(G, nodelist=[2, 3, 4])
+ np.testing.assert_equal(
+ M.toarray(), np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]])
+ )
+
+ def test_selfloop_digraph(self):
+ G = nx.DiGraph([(1, 1)])
+ M = nx.to_scipy_sparse_array(G)
+ np.testing.assert_equal(M.toarray(), np.array([[1]]))
+
+ G.add_edges_from([(2, 3), (3, 4)])
+ M = nx.to_scipy_sparse_array(G, nodelist=[2, 3, 4])
+ np.testing.assert_equal(
+ M.toarray(), np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]])
+ )
+
+ def test_from_scipy_sparse_array_parallel_edges(self):
+ """Tests that the :func:`networkx.from_scipy_sparse_array` function
+ interprets integer weights as the number of parallel edges when
+ creating a multigraph.
+
+ """
+ A = sp.sparse.csr_array([[1, 1], [1, 2]])
+ # First, with a simple graph, each integer entry in the adjacency
+ # matrix is interpreted as the weight of a single edge in the graph.
+ expected = nx.DiGraph()
+ edges = [(0, 0), (0, 1), (1, 0)]
+ expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
+ expected.add_edge(1, 1, weight=2)
+ actual = nx.from_scipy_sparse_array(
+ A, parallel_edges=True, create_using=nx.DiGraph
+ )
+ assert graphs_equal(actual, expected)
+ actual = nx.from_scipy_sparse_array(
+ A, parallel_edges=False, create_using=nx.DiGraph
+ )
+ assert graphs_equal(actual, expected)
+ # Now each integer entry in the adjacency matrix is interpreted as the
+ # number of parallel edges in the graph if the appropriate keyword
+ # argument is specified.
+ edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)]
+ expected = nx.MultiDiGraph()
+ expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
+ actual = nx.from_scipy_sparse_array(
+ A, parallel_edges=True, create_using=nx.MultiDiGraph
+ )
+ assert graphs_equal(actual, expected)
+ expected = nx.MultiDiGraph()
+ expected.add_edges_from(set(edges), weight=1)
+ # The sole self-loop (edge 0) on vertex 1 should have weight 2.
+ expected[1][1][0]["weight"] = 2
+ actual = nx.from_scipy_sparse_array(
+ A, parallel_edges=False, create_using=nx.MultiDiGraph
+ )
+ assert graphs_equal(actual, expected)
+
+ def test_symmetric(self):
+ """Tests that a symmetric matrix has edges added only once to an
+ undirected multigraph when using
+ :func:`networkx.from_scipy_sparse_array`.
+
+ """
+ A = sp.sparse.csr_array([[0, 1], [1, 0]])
+ G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph)
+ expected = nx.MultiGraph()
+ expected.add_edge(0, 1, weight=1)
+ assert graphs_equal(G, expected)
+
+
+@pytest.mark.parametrize("sparse_format", ("csr", "csc", "dok"))
+def test_from_scipy_sparse_array_formats(sparse_format):
+ """Test all formats supported by _generate_weighted_edges."""
+ # trinode complete graph with non-uniform edge weights
+ expected = nx.Graph()
+ expected.add_edges_from(
+ [
+ (0, 1, {"weight": 3}),
+ (0, 2, {"weight": 2}),
+ (1, 0, {"weight": 3}),
+ (1, 2, {"weight": 1}),
+ (2, 0, {"weight": 2}),
+ (2, 1, {"weight": 1}),
+ ]
+ )
+ A = sp.sparse.coo_array([[0, 3, 2], [3, 0, 1], [2, 1, 0]]).asformat(sparse_format)
+ assert graphs_equal(expected, nx.from_scipy_sparse_array(A))
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py
new file mode 100644
index 00000000..cf59983c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py
@@ -0,0 +1,40 @@
+import pytest
+
+import networkx as nx
+
+# smoke tests for exceptions
+
+
+def test_raises_networkxexception():
+ with pytest.raises(nx.NetworkXException):
+ raise nx.NetworkXException
+
+
+def test_raises_networkxerr():
+ with pytest.raises(nx.NetworkXError):
+ raise nx.NetworkXError
+
+
+def test_raises_networkx_pointless_concept():
+ with pytest.raises(nx.NetworkXPointlessConcept):
+ raise nx.NetworkXPointlessConcept
+
+
+def test_raises_networkxalgorithmerr():
+ with pytest.raises(nx.NetworkXAlgorithmError):
+ raise nx.NetworkXAlgorithmError
+
+
+def test_raises_networkx_unfeasible():
+ with pytest.raises(nx.NetworkXUnfeasible):
+ raise nx.NetworkXUnfeasible
+
+
+def test_raises_networkx_no_path():
+ with pytest.raises(nx.NetworkXNoPath):
+ raise nx.NetworkXNoPath
+
+
+def test_raises_networkx_unbounded():
+ with pytest.raises(nx.NetworkXUnbounded):
+ raise nx.NetworkXUnbounded
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_import.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_import.py
new file mode 100644
index 00000000..32aafdf2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_import.py
@@ -0,0 +1,11 @@
+import pytest
+
+
+def test_namespace_alias():
+ with pytest.raises(ImportError):
+ from networkx import nx
+
+
+def test_namespace_nesting():
+ with pytest.raises(ImportError):
+ from networkx import networkx
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py
new file mode 100644
index 00000000..9b7f1b1d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py
@@ -0,0 +1,97 @@
+import importlib
+import sys
+import types
+
+import pytest
+
+import networkx.lazy_imports as lazy
+
+
+def test_lazy_import_basics():
+ math = lazy._lazy_import("math")
+ anything_not_real = lazy._lazy_import("anything_not_real")
+
+ # Now test that accessing attributes does what it should
+ assert math.sin(math.pi) == pytest.approx(0, 1e-6)
+ # poor-mans pytest.raises for testing errors on attribute access
+ try:
+ anything_not_real.pi
+ assert False # Should not get here
+ except ModuleNotFoundError:
+ pass
+ assert isinstance(anything_not_real, lazy.DelayedImportErrorModule)
+ # see if it changes for second access
+ try:
+ anything_not_real.pi
+ assert False # Should not get here
+ except ModuleNotFoundError:
+ pass
+
+
+def test_lazy_import_impact_on_sys_modules():
+ math = lazy._lazy_import("math")
+ anything_not_real = lazy._lazy_import("anything_not_real")
+
+ assert type(math) == types.ModuleType
+ assert "math" in sys.modules
+ assert type(anything_not_real) == lazy.DelayedImportErrorModule
+ assert "anything_not_real" not in sys.modules
+
+ # only do this if numpy is installed
+ np_test = pytest.importorskip("numpy")
+ np = lazy._lazy_import("numpy")
+ assert type(np) == types.ModuleType
+ assert "numpy" in sys.modules
+
+ np.pi # trigger load of numpy
+
+ assert type(np) == types.ModuleType
+ assert "numpy" in sys.modules
+
+
+def test_lazy_import_nonbuiltins():
+ sp = lazy._lazy_import("scipy")
+ np = lazy._lazy_import("numpy")
+ if isinstance(sp, lazy.DelayedImportErrorModule):
+ try:
+ sp.special.erf
+ assert False
+ except ModuleNotFoundError:
+ pass
+ elif isinstance(np, lazy.DelayedImportErrorModule):
+ try:
+ np.sin(np.pi)
+ assert False
+ except ModuleNotFoundError:
+ pass
+ else:
+ assert sp.special.erf(np.pi) == pytest.approx(1, 1e-4)
+
+
+def test_lazy_attach():
+ name = "mymod"
+ submods = ["mysubmodule", "anothersubmodule"]
+ myall = {"not_real_submod": ["some_var_or_func"]}
+
+ locls = {
+ "attach": lazy.attach,
+ "name": name,
+ "submods": submods,
+ "myall": myall,
+ }
+ s = "__getattr__, __lazy_dir__, __all__ = attach(name, submods, myall)"
+
+ exec(s, {}, locls)
+ expected = {
+ "attach": lazy.attach,
+ "name": name,
+ "submods": submods,
+ "myall": myall,
+ "__getattr__": None,
+ "__lazy_dir__": None,
+ "__all__": None,
+ }
+ assert locls.keys() == expected.keys()
+ for k, v in expected.items():
+ if v is not None:
+ assert locls[k] == v
diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py
new file mode 100644
index 00000000..0ebf4d3e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py
@@ -0,0 +1,347 @@
+import pytest
+
+import networkx as nx
+from networkx.generators.classic import empty_graph
+from networkx.utils import edges_equal, nodes_equal
+
+
+class TestRelabel:
+ def test_convert_node_labels_to_integers(self):
+ # test that empty graph converts fine for all options
+ G = empty_graph()
+ H = nx.convert_node_labels_to_integers(G, 100)
+ assert list(H.nodes()) == []
+ assert list(H.edges()) == []
+
+ for opt in ["default", "sorted", "increasing degree", "decreasing degree"]:
+ G = empty_graph()
+ H = nx.convert_node_labels_to_integers(G, 100, ordering=opt)
+ assert list(H.nodes()) == []
+ assert list(H.edges()) == []
+
+ G = empty_graph()
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+ H = nx.convert_node_labels_to_integers(G)
+ degH = (d for n, d in H.degree())
+ degG = (d for n, d in G.degree())
+ assert sorted(degH) == sorted(degG)
+
+ H = nx.convert_node_labels_to_integers(G, 1000)
+ degH = (d for n, d in H.degree())
+ degG = (d for n, d in G.degree())
+ assert sorted(degH) == sorted(degG)
+ assert nodes_equal(H.nodes(), [1000, 1001, 1002, 1003])
+
+ H = nx.convert_node_labels_to_integers(G, ordering="increasing degree")
+ degH = (d for n, d in H.degree())
+ degG = (d for n, d in G.degree())
+ assert sorted(degH) == sorted(degG)
+ assert H.degree(0) == 1
+ assert H.degree(1) == 2
+ assert H.degree(2) == 2
+ assert H.degree(3) == 3
+
+ H = nx.convert_node_labels_to_integers(G, ordering="decreasing degree")
+ degH = (d for n, d in H.degree())
+ degG = (d for n, d in G.degree())
+ assert sorted(degH) == sorted(degG)
+ assert H.degree(0) == 3
+ assert H.degree(1) == 2
+ assert H.degree(2) == 2
+ assert H.degree(3) == 1
+
+ H = nx.convert_node_labels_to_integers(
+ G, ordering="increasing degree", label_attribute="label"
+ )
+ degH = (d for n, d in H.degree())
+ degG = (d for n, d in G.degree())
+ assert sorted(degH) == sorted(degG)
+ assert H.degree(0) == 1
+ assert H.degree(1) == 2
+ assert H.degree(2) == 2
+ assert H.degree(3) == 3
+
+ # check mapping
+ assert H.nodes[3]["label"] == "C"
+ assert H.nodes[0]["label"] == "D"
+ assert H.nodes[1]["label"] == "A" or H.nodes[2]["label"] == "A"
+ assert H.nodes[1]["label"] == "B" or H.nodes[2]["label"] == "B"
+
+ def test_convert_to_integers2(self):
+ G = empty_graph()
+ G.add_edges_from([("C", "D"), ("A", "B"), ("A", "C"), ("B", "C")])
+ H = nx.convert_node_labels_to_integers(G, ordering="sorted")
+ degH = (d for n, d in H.degree())
+ degG = (d for n, d in G.degree())
+ assert sorted(degH) == sorted(degG)
+
+ H = nx.convert_node_labels_to_integers(
+ G, ordering="sorted", label_attribute="label"
+ )
+ assert H.nodes[0]["label"] == "A"
+ assert H.nodes[1]["label"] == "B"
+ assert H.nodes[2]["label"] == "C"
+ assert H.nodes[3]["label"] == "D"
+
+ def test_convert_to_integers_raise(self):
+ with pytest.raises(nx.NetworkXError):
+ G = nx.Graph()
+ H = nx.convert_node_labels_to_integers(G, ordering="increasing age")
+
+ def test_relabel_nodes_copy(self):
+ G = nx.empty_graph()
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+ mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
+ H = nx.relabel_nodes(G, mapping)
+ assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
+
+ def test_relabel_nodes_function(self):
+ G = nx.empty_graph()
+ G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+ # function mapping no longer encouraged but works
+
+ def mapping(n):
+ return ord(n)
+
+ H = nx.relabel_nodes(G, mapping)
+ assert nodes_equal(H.nodes(), [65, 66, 67, 68])
+
+ def test_relabel_nodes_callable_type(self):
+ G = nx.path_graph(4)
+ H = nx.relabel_nodes(G, str)
+ assert nodes_equal(H.nodes, ["0", "1", "2", "3"])
+
+ @pytest.mark.parametrize("non_mc", ("0123", ["0", "1", "2", "3"]))
+ def test_relabel_nodes_non_mapping_or_callable(self, non_mc):
+ """If `mapping` is neither a Callable or a Mapping, an exception
+ should be raised."""
+ G = nx.path_graph(4)
+ with pytest.raises(AttributeError):
+ nx.relabel_nodes(G, non_mc)
+
+ def test_relabel_nodes_graph(self):
+ G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+ mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
+ H = nx.relabel_nodes(G, mapping)
+ assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
+
+ def test_relabel_nodes_orderedgraph(self):
+ G = nx.Graph()
+ G.add_nodes_from([1, 2, 3])
+ G.add_edges_from([(1, 3), (2, 3)])
+ mapping = {1: "a", 2: "b", 3: "c"}
+ H = nx.relabel_nodes(G, mapping)
+ assert list(H.nodes) == ["a", "b", "c"]
+
+ def test_relabel_nodes_digraph(self):
+ G = nx.DiGraph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+ mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
+ H = nx.relabel_nodes(G, mapping, copy=False)
+ assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
+
+ def test_relabel_nodes_multigraph(self):
+ G = nx.MultiGraph([("a", "b"), ("a", "b")])
+ mapping = {"a": "aardvark", "b": "bear"}
+ G = nx.relabel_nodes(G, mapping, copy=False)
+ assert nodes_equal(G.nodes(), ["aardvark", "bear"])
+ assert edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")])
+
+ def test_relabel_nodes_multidigraph(self):
+ G = nx.MultiDiGraph([("a", "b"), ("a", "b")])
+ mapping = {"a": "aardvark", "b": "bear"}
+ G = nx.relabel_nodes(G, mapping, copy=False)
+ assert nodes_equal(G.nodes(), ["aardvark", "bear"])
+ assert edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")])
+
+ def test_relabel_isolated_nodes_to_same(self):
+ G = nx.Graph()
+ G.add_nodes_from(range(4))
+ mapping = {1: 1}
+ H = nx.relabel_nodes(G, mapping, copy=False)
+ assert nodes_equal(H.nodes(), list(range(4)))
+
+ def test_relabel_nodes_missing(self):
+ G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+ mapping = {0: "aardvark"}
+ # copy=True
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ assert nodes_equal(H.nodes, G.nodes)
+ # copy=False
+ GG = G.copy()
+ nx.relabel_nodes(G, mapping, copy=False)
+ assert nodes_equal(G.nodes, GG.nodes)
+
+ def test_relabel_copy_name(self):
+ G = nx.Graph()
+ H = nx.relabel_nodes(G, {}, copy=True)
+ assert H.graph == G.graph
+ H = nx.relabel_nodes(G, {}, copy=False)
+ assert H.graph == G.graph
+ G.name = "first"
+ H = nx.relabel_nodes(G, {}, copy=True)
+ assert H.graph == G.graph
+ H = nx.relabel_nodes(G, {}, copy=False)
+ assert H.graph == G.graph
+
+ def test_relabel_toposort(self):
+ K5 = nx.complete_graph(4)
+ G = nx.complete_graph(4)
+ G = nx.relabel_nodes(G, {i: i + 1 for i in range(4)}, copy=False)
+ assert nx.is_isomorphic(K5, G)
+ G = nx.complete_graph(4)
+ G = nx.relabel_nodes(G, {i: i - 1 for i in range(4)}, copy=False)
+ assert nx.is_isomorphic(K5, G)
+
+ def test_relabel_selfloop(self):
+ G = nx.DiGraph([(1, 1), (1, 2), (2, 3)])
+ G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False)
+ assert nodes_equal(G.nodes(), ["One", "Three", "Two"])
+ G = nx.MultiDiGraph([(1, 1), (1, 2), (2, 3)])
+ G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False)
+ assert nodes_equal(G.nodes(), ["One", "Three", "Two"])
+ G = nx.MultiDiGraph([(1, 1)])
+ G = nx.relabel_nodes(G, {1: 0}, copy=False)
+ assert nodes_equal(G.nodes(), [0])
+
+ def test_relabel_multidigraph_inout_merge_nodes(self):
+ for MG in (nx.MultiGraph, nx.MultiDiGraph):
+ for cc in (True, False):
+ G = MG([(0, 4), (1, 4), (4, 2), (4, 3)])
+ G[0][4][0]["value"] = "a"
+ G[1][4][0]["value"] = "b"
+ G[4][2][0]["value"] = "c"
+ G[4][3][0]["value"] = "d"
+ G.add_edge(0, 4, key="x", value="e")
+ G.add_edge(4, 3, key="x", value="f")
+ mapping = {0: 9, 1: 9, 2: 9, 3: 9}
+ H = nx.relabel_nodes(G, mapping, copy=cc)
+ # No ordering on keys enforced
+ assert {"value": "a"} in H[9][4].values()
+ assert {"value": "b"} in H[9][4].values()
+ assert {"value": "c"} in H[4][9].values()
+ assert len(H[4][9]) == 3 if G.is_directed() else 6
+ assert {"value": "d"} in H[4][9].values()
+ assert {"value": "e"} in H[9][4].values()
+ assert {"value": "f"} in H[4][9].values()
+ assert len(H[9][4]) == 3 if G.is_directed() else 6
+
+ def test_relabel_multigraph_merge_inplace(self):
+ G = nx.MultiGraph([(0, 1), (0, 2), (0, 3), (0, 1), (0, 2), (0, 3)])
+ G[0][1][0]["value"] = "a"
+ G[0][2][0]["value"] = "b"
+ G[0][3][0]["value"] = "c"
+ mapping = {1: 4, 2: 4, 3: 4}
+ nx.relabel_nodes(G, mapping, copy=False)
+ # No ordering on keys enforced
+ assert {"value": "a"} in G[0][4].values()
+ assert {"value": "b"} in G[0][4].values()
+ assert {"value": "c"} in G[0][4].values()
+
+ def test_relabel_multidigraph_merge_inplace(self):
+ G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)])
+ G[0][1][0]["value"] = "a"
+ G[0][2][0]["value"] = "b"
+ G[0][3][0]["value"] = "c"
+ mapping = {1: 4, 2: 4, 3: 4}
+ nx.relabel_nodes(G, mapping, copy=False)
+ # No ordering on keys enforced
+ assert {"value": "a"} in G[0][4].values()
+ assert {"value": "b"} in G[0][4].values()
+ assert {"value": "c"} in G[0][4].values()
+
+ def test_relabel_multidigraph_inout_copy(self):
+ G = nx.MultiDiGraph([(0, 4), (1, 4), (4, 2), (4, 3)])
+ G[0][4][0]["value"] = "a"
+ G[1][4][0]["value"] = "b"
+ G[4][2][0]["value"] = "c"
+ G[4][3][0]["value"] = "d"
+ G.add_edge(0, 4, key="x", value="e")
+ G.add_edge(4, 3, key="x", value="f")
+ mapping = {0: 9, 1: 9, 2: 9, 3: 9}
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ # No ordering on keys enforced
+ assert {"value": "a"} in H[9][4].values()
+ assert {"value": "b"} in H[9][4].values()
+ assert {"value": "c"} in H[4][9].values()
+ assert len(H[4][9]) == 3
+ assert {"value": "d"} in H[4][9].values()
+ assert {"value": "e"} in H[9][4].values()
+ assert {"value": "f"} in H[4][9].values()
+ assert len(H[9][4]) == 3
+
+ def test_relabel_multigraph_merge_copy(self):
+ G = nx.MultiGraph([(0, 1), (0, 2), (0, 3)])
+ G[0][1][0]["value"] = "a"
+ G[0][2][0]["value"] = "b"
+ G[0][3][0]["value"] = "c"
+ mapping = {1: 4, 2: 4, 3: 4}
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ assert {"value": "a"} in H[0][4].values()
+ assert {"value": "b"} in H[0][4].values()
+ assert {"value": "c"} in H[0][4].values()
+
+ def test_relabel_multidigraph_merge_copy(self):
+ G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)])
+ G[0][1][0]["value"] = "a"
+ G[0][2][0]["value"] = "b"
+ G[0][3][0]["value"] = "c"
+ mapping = {1: 4, 2: 4, 3: 4}
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ assert {"value": "a"} in H[0][4].values()
+ assert {"value": "b"} in H[0][4].values()
+ assert {"value": "c"} in H[0][4].values()
+
+ def test_relabel_multigraph_nonnumeric_key(self):
+ for MG in (nx.MultiGraph, nx.MultiDiGraph):
+ for cc in (True, False):
+ G = nx.MultiGraph()
+ G.add_edge(0, 1, key="I", value="a")
+ G.add_edge(0, 2, key="II", value="b")
+ G.add_edge(0, 3, key="II", value="c")
+ mapping = {1: 4, 2: 4, 3: 4}
+ nx.relabel_nodes(G, mapping, copy=False)
+ assert {"value": "a"} in G[0][4].values()
+ assert {"value": "b"} in G[0][4].values()
+ assert {"value": "c"} in G[0][4].values()
+ assert 0 in G[0][4]
+ assert "I" in G[0][4]
+ assert "II" in G[0][4]
+
+ def test_relabel_circular(self):
+ G = nx.path_graph(3)
+ mapping = {0: 1, 1: 0}
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ with pytest.raises(nx.NetworkXUnfeasible):
+ H = nx.relabel_nodes(G, mapping, copy=False)
+
+ def test_relabel_preserve_node_order_full_mapping_with_copy_true(self):
+ G = nx.path_graph(3)
+ original_order = list(G.nodes())
+ mapping = {2: "a", 1: "b", 0: "c"} # dictionary keys out of order on purpose
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ new_order = list(H.nodes())
+ assert [mapping.get(i, i) for i in original_order] == new_order
+
+ def test_relabel_preserve_node_order_full_mapping_with_copy_false(self):
+ G = nx.path_graph(3)
+ original_order = list(G)
+ mapping = {2: "a", 1: "b", 0: "c"} # dictionary keys out of order on purpose
+ H = nx.relabel_nodes(G, mapping, copy=False)
+ new_order = list(H)
+ assert [mapping.get(i, i) for i in original_order] == new_order
+
+ def test_relabel_preserve_node_order_partial_mapping_with_copy_true(self):
+ G = nx.path_graph(3)
+ original_order = list(G)
+ mapping = {1: "a", 0: "b"} # partial mapping and keys out of order on purpose
+ H = nx.relabel_nodes(G, mapping, copy=True)
+ new_order = list(H)
+ assert [mapping.get(i, i) for i in original_order] == new_order
+
+ def test_relabel_preserve_node_order_partial_mapping_with_copy_false(self):
+ G = nx.path_graph(3)
+ original_order = list(G)
+ mapping = {1: "a", 0: "b"} # partial mapping and keys out of order on purpose
+ H = nx.relabel_nodes(G, mapping, copy=False)
+ new_order = list(H)
+ assert [mapping.get(i, i) for i in original_order] != new_order